Metadata Downloading (#3525)
This commit is contained in:
parent
eb66763078
commit
f4fd7230ea
108 changed files with 6296 additions and 484 deletions
|
@ -0,0 +1,188 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 migration to move loose leaf chapters into their own volume and retain user progress.
|
||||
/// </summary>
|
||||
public static class MigrateLooseLeafChapters
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateLooseLeafChapters"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateLooseLeafChapters migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
|
||||
var extension = settings.EncodeMediaAs.GetExtension();
|
||||
|
||||
var progress = await dataContext.AppUserProgresses
|
||||
.Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord
|
||||
{
|
||||
IsSpecial = c.IsSpecial,
|
||||
AppUserId = p.AppUserId,
|
||||
PagesRead = p.PagesRead,
|
||||
Range = c.Range,
|
||||
Number = c.Number,
|
||||
MinNumber = c.MinNumber,
|
||||
SeriesId = p.SeriesId,
|
||||
VolumeId = p.VolumeId,
|
||||
ProgressId = p.Id
|
||||
})
|
||||
.Where(d => !d.IsSpecial)
|
||||
.Join(dataContext.Volume, d => d.VolumeId, v => v.Id, (d, v) => new
|
||||
{
|
||||
ProgressRecord = d,
|
||||
Volume = v
|
||||
})
|
||||
.Where(d => d.Volume.Name == "0")
|
||||
.ToListAsync();
|
||||
|
||||
// First, group all the progresses into different series
|
||||
logger.LogCritical("Migrating {Count} progress events to new Volume structure for Loose leafs - This may take over 10 minutes depending on size of DB. Please wait", progress.Count);
|
||||
var progressesGroupedBySeries = progress
|
||||
.GroupBy(p => p.ProgressRecord.SeriesId);
|
||||
|
||||
foreach (var seriesGroup in progressesGroupedBySeries)
|
||||
{
|
||||
// Get each series and move the loose leafs from the old volume to the new Volume
|
||||
var seriesId = seriesGroup.Key;
|
||||
|
||||
// Handle All Loose Leafs
|
||||
var looseLeafsInSeries = seriesGroup
|
||||
.Where(p => !p.ProgressRecord.IsSpecial)
|
||||
.ToList();
|
||||
|
||||
// Get distinct Volumes by Id. For each one, create it then create the progress events
|
||||
var distinctVolumes = looseLeafsInSeries.DistinctBy(d => d.Volume.Id);
|
||||
foreach (var distinctVolume in distinctVolumes)
|
||||
{
|
||||
// Create a new volume for each series with the appropriate number (-100000)
|
||||
var chapters = await dataContext.Chapter
|
||||
.Where(c => c.VolumeId == distinctVolume.Volume.Id && !c.IsSpecial).ToListAsync();
|
||||
|
||||
var newVolume = new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithSeriesId(seriesId)
|
||||
.WithCreated(distinctVolume.Volume.Created)
|
||||
.WithLastModified(distinctVolume.Volume.LastModified)
|
||||
.Build();
|
||||
|
||||
newVolume.Pages = chapters.Sum(c => c.Pages);
|
||||
newVolume.WordCount = chapters.Sum(c => c.WordCount);
|
||||
newVolume.MinHoursToRead = chapters.Sum(c => c.MinHoursToRead);
|
||||
newVolume.MaxHoursToRead = chapters.Sum(c => c.MaxHoursToRead);
|
||||
newVolume.AvgHoursToRead = chapters.Sum(c => c.AvgHoursToRead);
|
||||
dataContext.Volume.Add(newVolume);
|
||||
await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId
|
||||
|
||||
// Migrate the progress event to the new volume
|
||||
var oldVolumeProgresses = await dataContext.AppUserProgresses
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
foreach (var oldProgress in oldVolumeProgresses)
|
||||
{
|
||||
oldProgress.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
|
||||
logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
chapters.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
|
||||
// Move the loose leaf chapters from the old volume to the new Volume
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
// Update the VolumeId on the existing progress event
|
||||
chapter.VolumeId = newVolume.Id;
|
||||
|
||||
// We need to migrate cover images as well
|
||||
//UpdateCoverImage(directoryService, logger, chapter, extension, newVolume);
|
||||
}
|
||||
|
||||
|
||||
var oldVolumeBookmarks = await dataContext.AppUserBookmark
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
logger.LogInformation("Moving {Count} existing Bookmarks from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
oldVolumeBookmarks.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
foreach (var bookmark in oldVolumeBookmarks)
|
||||
{
|
||||
bookmark.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
|
||||
var oldVolumePersonalToC = await dataContext.AppUserTableOfContent
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
logger.LogInformation("Moving {Count} existing Personal ToC from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
oldVolumePersonalToC.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
foreach (var pToc in oldVolumePersonalToC)
|
||||
{
|
||||
pToc.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
var oldVolumeReadingListItems = await dataContext.ReadingListItem
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
logger.LogInformation("Moving {Count} existing Personal ToC from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
oldVolumeReadingListItems.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
foreach (var readingListItem in oldVolumeReadingListItems)
|
||||
{
|
||||
readingListItem.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
}
|
||||
|
||||
// Save changes after processing all series
|
||||
if (dataContext.ChangeTracker.HasChanges())
|
||||
{
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateLooseLeafChapters",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateLooseLeafChapters migration - Completed. This is not an error");
|
||||
}
|
||||
|
||||
private static void UpdateCoverImage(IDirectoryService directoryService, ILogger<Program> logger, Chapter chapter,
|
||||
string extension, Volume newVolume)
|
||||
{
|
||||
var existingCover = ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId) + extension;
|
||||
var newCover = ImageService.GetChapterFormat(chapter.Id, newVolume.Id) + extension;
|
||||
try
|
||||
{
|
||||
if (!chapter.CoverImageLocked)
|
||||
{
|
||||
// First rename existing cover
|
||||
File.Copy(Path.Join(directoryService.CoverImageDirectory, existingCover), Path.Join(directoryService.CoverImageDirectory, newCover));
|
||||
chapter.CoverImage = newCover;
|
||||
}
|
||||
} catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Unable to rename {OldCover} to {NewCover}, this cover will need manual refresh", existingCover, newCover);
|
||||
}
|
||||
}
|
||||
}
|
207
API/Data/ManualMigrations/v0.8.0/ManualMigrateMixedSpecials.cs
Normal file
207
API/Data/ManualMigrations/v0.8.0/ManualMigrateMixedSpecials.cs
Normal file
|
@ -0,0 +1,207 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
public class UserProgressCsvRecord
|
||||
{
|
||||
public bool IsSpecial { get; set; }
|
||||
public int AppUserId { get; set; }
|
||||
public int PagesRead { get; set; }
|
||||
public string Range { get; set; }
|
||||
public string Number { get; set; }
|
||||
public float MinNumber { get; set; }
|
||||
public int SeriesId { get; set; }
|
||||
public int VolumeId { get; set; }
|
||||
public int ProgressId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 migration to move Specials into their own volume and retain user progress.
|
||||
/// </summary>
|
||||
public static class MigrateMixedSpecials
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateMixedSpecials"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running ManualMigrateMixedSpecials migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// First, group all the progresses into different series
|
||||
// Get each series and move the specials from old volume to the new Volume()
|
||||
// Create a new progress event from existing and store the Id of existing progress event to delete it
|
||||
// Save per series
|
||||
|
||||
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
|
||||
var extension = settings.EncodeMediaAs.GetExtension();
|
||||
|
||||
var progress = await dataContext.AppUserProgresses
|
||||
.Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord
|
||||
{
|
||||
IsSpecial = c.IsSpecial,
|
||||
AppUserId = p.AppUserId,
|
||||
PagesRead = p.PagesRead,
|
||||
Range = c.Range,
|
||||
Number = c.Number,
|
||||
MinNumber = c.MinNumber,
|
||||
SeriesId = p.SeriesId,
|
||||
VolumeId = p.VolumeId,
|
||||
ProgressId = p.Id
|
||||
})
|
||||
.Where(d => d.IsSpecial || d.Number == "0")
|
||||
.Join(dataContext.Volume, d => d.VolumeId, v => v.Id,
|
||||
(d, v) => new
|
||||
{
|
||||
ProgressRecord = d,
|
||||
Volume = v
|
||||
})
|
||||
.Where(d => d.Volume.Name == "0")
|
||||
.ToListAsync();
|
||||
|
||||
// First, group all the progresses into different series
|
||||
logger.LogCritical("Migrating {Count} progress events to new Volume structure for Specials - This may take over 10 minutes depending on size of DB. Please wait", progress.Count);
|
||||
var progressesGroupedBySeries = progress.GroupBy(p => p.ProgressRecord.SeriesId);
|
||||
|
||||
foreach (var seriesGroup in progressesGroupedBySeries)
|
||||
{
|
||||
// Get each series and move the specials from the old volume to the new Volume
|
||||
var seriesId = seriesGroup.Key;
|
||||
|
||||
// Handle All Specials
|
||||
var specialsInSeries = seriesGroup
|
||||
.Where(p => p.ProgressRecord.IsSpecial)
|
||||
.ToList();
|
||||
|
||||
// Get distinct Volumes by Id. For each one, create it then create the progress events
|
||||
var distinctVolumes = specialsInSeries.DistinctBy(d => d.Volume.Id);
|
||||
foreach (var distinctVolume in distinctVolumes)
|
||||
{
|
||||
// Create a new volume for each series with the appropriate number (-100000)
|
||||
var chapters = await dataContext.Chapter
|
||||
.Where(c => c.VolumeId == distinctVolume.Volume.Id && c.IsSpecial).ToListAsync();
|
||||
|
||||
var newVolume = new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithSeriesId(seriesId)
|
||||
.WithCreated(distinctVolume.Volume.Created)
|
||||
.WithLastModified(distinctVolume.Volume.LastModified)
|
||||
.Build();
|
||||
|
||||
newVolume.Pages = chapters.Sum(c => c.Pages);
|
||||
newVolume.WordCount = chapters.Sum(c => c.WordCount);
|
||||
newVolume.MinHoursToRead = chapters.Sum(c => c.MinHoursToRead);
|
||||
newVolume.MaxHoursToRead = chapters.Sum(c => c.MaxHoursToRead);
|
||||
newVolume.AvgHoursToRead = chapters.Sum(c => c.AvgHoursToRead);
|
||||
|
||||
dataContext.Volume.Add(newVolume);
|
||||
await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId
|
||||
|
||||
// Migrate the progress event to the new volume
|
||||
var oldVolumeProgresses = await dataContext.AppUserProgresses
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
foreach (var oldProgress in oldVolumeProgresses)
|
||||
{
|
||||
oldProgress.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
|
||||
logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
chapters.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
|
||||
// Move the special chapters from the old volume to the new Volume
|
||||
foreach (var specialChapter in chapters)
|
||||
{
|
||||
// Update the VolumeId on the existing progress event
|
||||
specialChapter.VolumeId = newVolume.Id;
|
||||
|
||||
//UpdateCoverImage(directoryService, logger, specialChapter, extension, newVolume);
|
||||
}
|
||||
|
||||
var oldVolumeBookmarks = await dataContext.AppUserBookmark
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
logger.LogInformation("Moving {Count} existing Bookmarks from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
oldVolumeBookmarks.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
foreach (var bookmark in oldVolumeBookmarks)
|
||||
{
|
||||
bookmark.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
|
||||
var oldVolumePersonalToC = await dataContext.AppUserTableOfContent
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
logger.LogInformation("Moving {Count} existing Personal ToC from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
oldVolumePersonalToC.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
foreach (var pToc in oldVolumePersonalToC)
|
||||
{
|
||||
pToc.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
var oldVolumeReadingListItems = await dataContext.ReadingListItem
|
||||
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
|
||||
logger.LogInformation("Moving {Count} existing Personal ToC from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
oldVolumeReadingListItems.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
foreach (var readingListItem in oldVolumeReadingListItems)
|
||||
{
|
||||
readingListItem.VolumeId = newVolume.Id;
|
||||
}
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// Save changes after processing all series
|
||||
if (dataContext.ChangeTracker.HasChanges())
|
||||
{
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "ManualMigrateMixedSpecials",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running ManualMigrateMixedSpecials migration - Completed. This is not an error");
|
||||
}
|
||||
|
||||
private static void UpdateCoverImage(IDirectoryService directoryService, ILogger<Program> logger, Chapter specialChapter,
|
||||
string extension, Volume newVolume)
|
||||
{
|
||||
// We need to migrate cover images as well
|
||||
var existingCover = ImageService.GetChapterFormat(specialChapter.Id, specialChapter.VolumeId) + extension;
|
||||
var newCover = ImageService.GetChapterFormat(specialChapter.Id, newVolume.Id) + extension;
|
||||
try
|
||||
{
|
||||
|
||||
if (!specialChapter.CoverImageLocked)
|
||||
{
|
||||
// First rename existing cover
|
||||
File.Copy(Path.Join(directoryService.CoverImageDirectory, existingCover), Path.Join(directoryService.CoverImageDirectory, newCover));
|
||||
specialChapter.CoverImage = newCover;
|
||||
}
|
||||
} catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Unable to rename {OldCover} to {NewCover}, this cover will need manual refresh", existingCover, newCover);
|
||||
}
|
||||
}
|
||||
}
|
90
API/Data/ManualMigrations/v0.8.0/MigrateChapterFields.cs
Normal file
90
API/Data/ManualMigrations/v0.8.0/MigrateChapterFields.cs
Normal file
|
@ -0,0 +1,90 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Introduced in v0.8.0, this migrates the existing Chapter and Volume 0 -> Parser defined, MangaFile.FileName
|
||||
/// </summary>
|
||||
public static class MigrateChapterFields
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateChapterFields"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// Update all volumes only have specials in them (rare)
|
||||
var volumesWithJustSpecials = dataContext.Volume
|
||||
.Include(v => v.Chapters)
|
||||
.Where(v => v.Name == "0" && v.Chapters.All(c => c.IsSpecial))
|
||||
.ToList();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating {Count} volumes that only have specials in them", volumesWithJustSpecials.Count);
|
||||
foreach (var volume in volumesWithJustSpecials)
|
||||
{
|
||||
volume.Name = $"{Parser.SpecialVolumeNumber}";
|
||||
volume.MinNumber = Parser.SpecialVolumeNumber;
|
||||
volume.MaxNumber = Parser.SpecialVolumeNumber;
|
||||
}
|
||||
|
||||
// Update all volumes that only have loose leafs in them
|
||||
var looseLeafVolumes = dataContext.Volume
|
||||
.Include(v => v.Chapters)
|
||||
.Where(v => v.Name == "0" && v.Chapters.All(c => !c.IsSpecial))
|
||||
.ToList();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating {Count} volumes that only have loose leaf chapters in them", looseLeafVolumes.Count);
|
||||
foreach (var volume in looseLeafVolumes)
|
||||
{
|
||||
volume.Name = $"{Parser.DefaultChapterNumber}";
|
||||
volume.MinNumber = Parser.DefaultChapterNumber;
|
||||
volume.MaxNumber = Parser.DefaultChapterNumber;
|
||||
}
|
||||
|
||||
// Update all MangaFile
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating all MangaFiles");
|
||||
foreach (var mangaFile in dataContext.MangaFile)
|
||||
{
|
||||
mangaFile.FileName = Parser.RemoveExtensionIfSupported(mangaFile.FilePath);
|
||||
}
|
||||
|
||||
var looseLeafChapters = await dataContext.Chapter.Where(c => c.Number == "0").ToListAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating {Count} loose leaf chapters", looseLeafChapters.Count);
|
||||
foreach (var chapter in looseLeafChapters)
|
||||
{
|
||||
chapter.Number = Parser.DefaultChapter;
|
||||
chapter.MinNumber = Parser.DefaultChapterNumber;
|
||||
chapter.MaxNumber = Parser.DefaultChapterNumber;
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateChapterFields",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
51
API/Data/ManualMigrations/v0.8.0/MigrateChapterNumber.cs
Normal file
51
API/Data/ManualMigrations/v0.8.0/MigrateChapterNumber.cs
Normal file
|
@ -0,0 +1,51 @@
|
|||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// Introduced in v0.8.0, this migrates the existing Chapter Range -> Chapter Min/Max Number
|
||||
/// </summary>
|
||||
public static class MigrateChapterNumber
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateChapterNumber"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterNumber migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// Get all volumes
|
||||
foreach (var chapter in dataContext.Chapter)
|
||||
{
|
||||
if (chapter.IsSpecial)
|
||||
{
|
||||
chapter.MinNumber = Parser.DefaultChapterNumber;
|
||||
chapter.MaxNumber = Parser.DefaultChapterNumber;
|
||||
continue;
|
||||
}
|
||||
chapter.MinNumber = Parser.MinNumberFromRange(chapter.Range);
|
||||
chapter.MaxNumber = Parser.MaxNumberFromRange(chapter.Range);
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateChapterNumber",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterNumber migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
57
API/Data/ManualMigrations/v0.8.0/MigrateChapterRange.cs
Normal file
57
API/Data/ManualMigrations/v0.8.0/MigrateChapterRange.cs
Normal file
|
@ -0,0 +1,57 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 changed the range to that it doesn't have filename by default
|
||||
/// </summary>
|
||||
public static class MigrateChapterRange
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateChapterRange"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterRange migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var chapters = await dataContext.Chapter.ToListAsync();
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
if (Parser.MinNumberFromRange(chapter.Range).Is(0.0f))
|
||||
{
|
||||
chapter.Range = chapter.GetNumberTitle();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Save changes after processing all series
|
||||
if (dataContext.ChangeTracker.HasChanges())
|
||||
{
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateChapterRange",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterRange migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.History;
|
||||
using API.Extensions.QueryExtensions;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 refactored User Collections
|
||||
/// </summary>
|
||||
public static class MigrateCollectionTagToUserCollections
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateCollectionTagToUserCollections") ||
|
||||
!await dataContext.AppUser.AnyAsync())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateCollectionTagToUserCollections migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// Find the first user that is an admin
|
||||
var defaultAdmin = await unitOfWork.UserRepository.GetDefaultAdminUser(AppUserIncludes.Collections);
|
||||
if (defaultAdmin == null)
|
||||
{
|
||||
await CompleteMigration(dataContext, logger);
|
||||
return;
|
||||
}
|
||||
|
||||
// For all collectionTags, move them over to said user
|
||||
var existingCollections = await dataContext.CollectionTag
|
||||
.OrderBy(c => c.NormalizedTitle)
|
||||
.Includes(CollectionTagIncludes.SeriesMetadataWithSeries)
|
||||
.ToListAsync();
|
||||
foreach (var existingCollectionTag in existingCollections)
|
||||
{
|
||||
var collection = new AppUserCollection()
|
||||
{
|
||||
Title = existingCollectionTag.Title,
|
||||
NormalizedTitle = existingCollectionTag.Title.Normalize(),
|
||||
CoverImage = existingCollectionTag.CoverImage,
|
||||
CoverImageLocked = existingCollectionTag.CoverImageLocked,
|
||||
Promoted = existingCollectionTag.Promoted,
|
||||
AgeRating = AgeRating.Unknown,
|
||||
Summary = existingCollectionTag.Summary,
|
||||
Items = existingCollectionTag.SeriesMetadatas.Select(s => s.Series).ToList()
|
||||
};
|
||||
|
||||
collection.AgeRating = await unitOfWork.SeriesRepository.GetMaxAgeRatingFromSeriesAsync(collection.Items.Select(s => s.Id));
|
||||
defaultAdmin.Collections.Add(collection);
|
||||
}
|
||||
unitOfWork.UserRepository.Update(defaultAdmin);
|
||||
|
||||
await unitOfWork.CommitAsync();
|
||||
|
||||
await CompleteMigration(dataContext, logger);
|
||||
}
|
||||
|
||||
private static async Task CompleteMigration(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateCollectionTagToUserCollections",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateCollectionTagToUserCollections migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 ensured that MangaFile Path is normalized. This will normalize existing data to avoid churn.
|
||||
/// </summary>
|
||||
public static class MigrateDuplicateDarkTheme
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateDuplicateDarkTheme"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateDuplicateDarkTheme migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var darkThemes = await dataContext.SiteTheme.Where(t => t.Name == "Dark").ToListAsync();
|
||||
|
||||
if (darkThemes.Count > 1)
|
||||
{
|
||||
var correctDarkTheme = darkThemes.First(d => !string.IsNullOrEmpty(d.Description));
|
||||
|
||||
// Get users
|
||||
var users = await dataContext.AppUser
|
||||
.Include(u => u.UserPreferences)
|
||||
.ThenInclude(p => p.Theme)
|
||||
.Where(u => u.UserPreferences.Theme.Name == "Dark")
|
||||
.ToListAsync();
|
||||
|
||||
// Find any users that have a duplicate Dark theme as default and switch to the correct one
|
||||
foreach (var user in users)
|
||||
{
|
||||
if (string.IsNullOrEmpty(user.UserPreferences.Theme.Description))
|
||||
{
|
||||
user.UserPreferences.Theme = correctDarkTheme;
|
||||
}
|
||||
}
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
// Now remove the bad themes
|
||||
dataContext.SiteTheme.RemoveRange(darkThemes.Where(d => string.IsNullOrEmpty(d.Description)));
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateDuplicateDarkTheme",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateDuplicateDarkTheme migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
46
API/Data/ManualMigrations/v0.8.0/MigrateMangaFilePath.cs
Normal file
46
API/Data/ManualMigrations/v0.8.0/MigrateMangaFilePath.cs
Normal file
|
@ -0,0 +1,46 @@
|
|||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 ensured that MangaFile Path is normalized. This will normalize existing data to avoid churn.
|
||||
/// </summary>
|
||||
public static class MigrateMangaFilePath
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateMangaFilePath"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateMangaFilePath migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
|
||||
foreach(var file in dataContext.MangaFile)
|
||||
{
|
||||
file.FilePath = Parser.NormalizePath(file.FilePath);
|
||||
}
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateMangaFilePath",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateMangaFilePath migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
124
API/Data/ManualMigrations/v0.8.0/MigrateProgressExport.cs
Normal file
124
API/Data/ManualMigrations/v0.8.0/MigrateProgressExport.cs
Normal file
|
@ -0,0 +1,124 @@
|
|||
using System;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.History;
|
||||
using API.Services;
|
||||
using CsvHelper;
|
||||
using CsvHelper.Configuration.Attributes;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
public class ProgressExport
|
||||
{
|
||||
[Name("Library Id")]
|
||||
public int LibraryId { get; set; }
|
||||
|
||||
[Name("Library Name")]
|
||||
public string LibraryName { get; set; }
|
||||
|
||||
[Name("Series Name")]
|
||||
public string SeriesName { get; set; }
|
||||
|
||||
[Name("Volume Number")]
|
||||
public string VolumeRange { get; set; }
|
||||
|
||||
[Name("Volume LookupName")]
|
||||
public string VolumeLookupName { get; set; }
|
||||
|
||||
[Name("Chapter Number")]
|
||||
public string ChapterRange { get; set; }
|
||||
|
||||
[Name("FileName")]
|
||||
public string MangaFileName { get; set; }
|
||||
|
||||
[Name("FilePath")]
|
||||
public string MangaFilePath { get; set; }
|
||||
|
||||
[Name("AppUser Name")]
|
||||
public string AppUserName { get; set; }
|
||||
|
||||
[Name("AppUser Id")]
|
||||
public int AppUserId { get; set; }
|
||||
|
||||
[Name("Pages Read")]
|
||||
public int PagesRead { get; set; }
|
||||
|
||||
[Name("BookScrollId")]
|
||||
public string BookScrollId { get; set; }
|
||||
|
||||
[Name("Progress Created")]
|
||||
public DateTime Created { get; set; }
|
||||
|
||||
[Name("Progress LastModified")]
|
||||
public DateTime LastModified { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 - Progress is extracted and saved in a csv
|
||||
/// </summary>
|
||||
public static class MigrateProgressExport
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IDirectoryService directoryService, ILogger<Program> logger)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateProgressExport"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateProgressExport migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var data = await dataContext.AppUserProgresses
|
||||
.Join(dataContext.Series, progress => progress.SeriesId, series => series.Id, (progress, series) => new { progress, series })
|
||||
.Join(dataContext.Volume, ps => ps.progress.VolumeId, volume => volume.Id, (ps, volume) => new { ps.progress, ps.series, volume })
|
||||
.Join(dataContext.Chapter, psv => psv.progress.ChapterId, chapter => chapter.Id, (psv, chapter) => new { psv.progress, psv.series, psv.volume, chapter })
|
||||
.Join(dataContext.MangaFile, psvc => psvc.chapter.Id, mangaFile => mangaFile.ChapterId, (psvc, mangaFile) => new { psvc.progress, psvc.series, psvc.volume, psvc.chapter, mangaFile })
|
||||
.Join(dataContext.AppUser, psvcm => psvcm.progress.AppUserId, appUser => appUser.Id, (psvcm, appUser) => new
|
||||
{
|
||||
LibraryId = psvcm.series.LibraryId,
|
||||
LibraryName = psvcm.series.Library.Name,
|
||||
SeriesName = psvcm.series.Name,
|
||||
VolumeRange = psvcm.volume.MinNumber + "-" + psvcm.volume.MaxNumber,
|
||||
VolumeLookupName = psvcm.volume.Name,
|
||||
ChapterRange = psvcm.chapter.Range,
|
||||
MangaFileName = psvcm.mangaFile.FileName,
|
||||
MangaFilePath = psvcm.mangaFile.FilePath,
|
||||
AppUserName = appUser.UserName,
|
||||
AppUserId = appUser.Id,
|
||||
PagesRead = psvcm.progress.PagesRead,
|
||||
BookScrollId = psvcm.progress.BookScrollId,
|
||||
ProgressCreated = psvcm.progress.Created,
|
||||
ProgressLastModified = psvcm.progress.LastModified
|
||||
}).ToListAsync();
|
||||
|
||||
|
||||
// Write the mapped data to a CSV file
|
||||
await using var writer = new StreamWriter(Path.Join(directoryService.ConfigDirectory, "progress_export.csv"));
|
||||
await using var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
|
||||
await csv.WriteRecordsAsync(data);
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateProgressExport migration - Completed. This is not an error");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// On new installs, the db isn't setup yet, so this has nothing to do
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateProgressExport",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue