Missing Migration (#2790)

This commit is contained in:
Joe Milazzo 2024-03-17 17:21:28 -05:00 committed by GitHub
parent 2b6fd1224f
commit f443e513d1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 387 additions and 29 deletions

View file

@ -0,0 +1,159 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Extensions;
using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.8.0 migration to move loose leaf chapters into their own volume and retain user progress.
/// </summary>
public static class MigrateLooseLeafChapters
{
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger<Program> logger)
{
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateLooseLeafChapters"))
{
return;
}
logger.LogCritical(
"Running MigrateLooseLeafChapters migration - Please be patient, this may take some time. This is not an error");
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
var extension = settings.EncodeMediaAs.GetExtension();
var progress = await dataContext.AppUserProgresses
.Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord
{
IsSpecial = c.IsSpecial,
AppUserId = p.AppUserId,
PagesRead = p.PagesRead,
Range = c.Range,
Number = c.Number,
MinNumber = c.MinNumber,
SeriesId = p.SeriesId,
VolumeId = p.VolumeId,
ProgressId = p.Id
})
.Where(d => !d.IsSpecial)
.Join(dataContext.Volume, d => d.VolumeId, v => v.Id, (d, v) => new
{
ProgressRecord = d,
Volume = v
})
.Where(d => d.Volume.Name == "0")
.ToListAsync();
// First, group all the progresses into different series
logger.LogCritical("Migrating {Count} progress events to new Volume structure for Loose leafs - This may take over 10 minutes depending on size of DB. Please wait", progress.Count);
var progressesGroupedBySeries = progress
.GroupBy(p => p.ProgressRecord.SeriesId);
foreach (var seriesGroup in progressesGroupedBySeries)
{
// Get each series and move the loose leafs from the old volume to the new Volume
var seriesId = seriesGroup.Key;
// Handle All Loose Leafs
var looseLeafsInSeries = seriesGroup
.Where(p => !p.ProgressRecord.IsSpecial)
.ToList();
// Get distinct Volumes by Id. For each one, create it then create the progress events
var distinctVolumes = looseLeafsInSeries.DistinctBy(d => d.Volume.Id);
foreach (var distinctVolume in distinctVolumes)
{
// Create a new volume for each series with the appropriate number (-100000)
var chapters = await dataContext.Chapter
.Where(c => c.VolumeId == distinctVolume.Volume.Id && !c.IsSpecial).ToListAsync();
var newVolume = new VolumeBuilder(Parser.LooseLeafVolume)
.WithSeriesId(seriesId)
.WithCreated(distinctVolume.Volume.Created)
.WithLastModified(distinctVolume.Volume.LastModified)
.Build();
newVolume.Pages = chapters.Sum(c => c.Pages);
newVolume.WordCount = chapters.Sum(c => c.WordCount);
newVolume.MinHoursToRead = chapters.Sum(c => c.MinHoursToRead);
newVolume.MaxHoursToRead = chapters.Sum(c => c.MaxHoursToRead);
newVolume.AvgHoursToRead = chapters.Sum(c => c.AvgHoursToRead);
dataContext.Volume.Add(newVolume);
await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId
// Migrate the progress event to the new volume
var oldVolumeProgresses = await dataContext.AppUserProgresses
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
foreach (var oldProgress in oldVolumeProgresses)
{
oldProgress.VolumeId = newVolume.Id;
}
logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
chapters.Count, distinctVolume.Volume.Id, newVolume.Id);
// Move the loose leaf chapters from the old volume to the new Volume
foreach (var chapter in chapters)
{
// Update the VolumeId on the existing progress event
chapter.VolumeId = newVolume.Id;
// We need to migrate cover images as well
//UpdateCoverImage(directoryService, logger, chapter, extension, newVolume);
}
// Update the progress table with the new VolumeId
await dataContext.SaveChangesAsync();
}
}
// Save changes after processing all series
if (dataContext.ChangeTracker.HasChanges())
{
await dataContext.SaveChangesAsync();
}
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
{
Name = "MigrateLooseLeafChapters",
ProductVersion = BuildInfo.Version.ToString(),
RanAt = DateTime.UtcNow
});
await dataContext.SaveChangesAsync();
logger.LogCritical(
"Running MigrateLooseLeafChapters migration - Completed. This is not an error");
}
private static void UpdateCoverImage(IDirectoryService directoryService, ILogger<Program> logger, Chapter chapter,
string extension, Volume newVolume)
{
var existingCover = ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId) + extension;
var newCover = ImageService.GetChapterFormat(chapter.Id, newVolume.Id) + extension;
try
{
if (!chapter.CoverImageLocked)
{
// First rename existing cover
File.Copy(Path.Join(directoryService.CoverImageDirectory, existingCover), Path.Join(directoryService.CoverImageDirectory, newCover));
chapter.CoverImage = newCover;
}
} catch (Exception ex)
{
logger.LogError(ex, "Unable to rename {OldCover} to {NewCover}, this cover will need manual refresh", existingCover, newCover);
}
}
}

View file

@ -3,7 +3,9 @@ using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Extensions;
using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
@ -21,6 +23,7 @@ public class UserProgressCsvRecord
public float MinNumber { get; set; }
public int SeriesId { get; set; }
public int VolumeId { get; set; }
public int ProgressId { get; set; }
}
/// <summary>
@ -28,7 +31,7 @@ public class UserProgressCsvRecord
/// </summary>
public static class MigrateMixedSpecials
{
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger<Program> logger)
{
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateMixedSpecials"))
{
@ -39,13 +42,13 @@ public static class MigrateMixedSpecials
"Running ManualMigrateMixedSpecials migration - Please be patient, this may take some time. This is not an error");
// First, group all the progresses into different series
// Get each series and move the specials from old volume to the new Volume()
// Create a new progress event from existing and store the Id of existing progress event to delete it
// Save per series
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
var extension = settings.EncodeMediaAs.GetExtension();
var progress = await dataContext.AppUserProgresses
.Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord
{
@ -56,10 +59,12 @@ public static class MigrateMixedSpecials
Number = c.Number,
MinNumber = c.MinNumber,
SeriesId = p.SeriesId,
VolumeId = p.VolumeId
VolumeId = p.VolumeId,
ProgressId = p.Id
})
.Where(d => d.IsSpecial || d.Number == "0")
.Join(dataContext.Volume, d => d.VolumeId, v => v.Id, (d, v) => new
.Join(dataContext.Volume, d => d.VolumeId, v => v.Id,
(d, v) => new
{
ProgressRecord = d,
Volume = v
@ -68,18 +73,19 @@ public static class MigrateMixedSpecials
.ToListAsync();
// First, group all the progresses into different series
logger.LogCritical("Migrating {Count} progress events to new Volume structure - This may take over 10 minutes depending on size of DB. Please wait", progress.Count);
logger.LogCritical("Migrating {Count} progress events to new Volume structure for Specials - This may take over 10 minutes depending on size of DB. Please wait", progress.Count);
var progressesGroupedBySeries = progress.GroupBy(p => p.ProgressRecord.SeriesId);
foreach (var seriesGroup in progressesGroupedBySeries)
{
// Get each series and move the specials from the old volume to the new Volume
var seriesId = seriesGroup.Key;
// Handle All Specials
var specialsInSeries = seriesGroup
.Where(p => p.ProgressRecord.IsSpecial)
.ToList();
// Get distinct Volumes by Id. For each one, create it then create the progress events
var distinctVolumes = specialsInSeries.DistinctBy(d => d.Volume.Id);
foreach (var distinctVolume in distinctVolumes)
@ -90,29 +96,43 @@ public static class MigrateMixedSpecials
var newVolume = new VolumeBuilder(Parser.SpecialVolume)
.WithSeriesId(seriesId)
.WithChapters(chapters)
.WithCreated(distinctVolume.Volume.Created)
.WithLastModified(distinctVolume.Volume.LastModified)
.Build();
newVolume.Pages = chapters.Sum(c => c.Pages);
newVolume.WordCount = chapters.Sum(c => c.WordCount);
newVolume.MinHoursToRead = chapters.Sum(c => c.MinHoursToRead);
newVolume.MaxHoursToRead = chapters.Sum(c => c.MaxHoursToRead);
newVolume.AvgHoursToRead = chapters.Sum(c => c.AvgHoursToRead);
dataContext.Volume.Add(newVolume);
await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId
// Migrate the progress event to the new volume
distinctVolume.ProgressRecord.VolumeId = newVolume.Id;
var oldVolumeProgresses = await dataContext.AppUserProgresses
.Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync();
foreach (var oldProgress in oldVolumeProgresses)
{
oldProgress.VolumeId = newVolume.Id;
}
logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
chapters.Count, distinctVolume.Volume.Id, newVolume.Id);
// Move the special chapters from the old volume to the new Volume
var specialChapters = await dataContext.Chapter
.Where(c => c.VolumeId == distinctVolume.ProgressRecord.VolumeId && c.IsSpecial)
.ToListAsync();
foreach (var specialChapter in specialChapters)
// Move the special chapters from the old volume to the new Volume
foreach (var specialChapter in chapters)
{
// Update the VolumeId on the existing progress event
specialChapter.VolumeId = newVolume.Id;
//UpdateCoverImage(directoryService, logger, specialChapter, extension, newVolume);
}
await dataContext.SaveChangesAsync();
}
}
// Save changes after processing all series
@ -121,10 +141,6 @@ public static class MigrateMixedSpecials
await dataContext.SaveChangesAsync();
}
// Update all Volumes with Name as "0" -> Special
logger.LogCritical("Updating all Volumes with Name 0 to SpecialNumber");
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
{
@ -137,4 +153,25 @@ public static class MigrateMixedSpecials
logger.LogCritical(
"Running ManualMigrateMixedSpecials migration - Completed. This is not an error");
}
private static void UpdateCoverImage(IDirectoryService directoryService, ILogger<Program> logger, Chapter specialChapter,
string extension, Volume newVolume)
{
// We need to migrate cover images as well
var existingCover = ImageService.GetChapterFormat(specialChapter.Id, specialChapter.VolumeId) + extension;
var newCover = ImageService.GetChapterFormat(specialChapter.Id, newVolume.Id) + extension;
try
{
if (!specialChapter.CoverImageLocked)
{
// First rename existing cover
File.Copy(Path.Join(directoryService.CoverImageDirectory, existingCover), Path.Join(directoryService.CoverImageDirectory, newCover));
specialChapter.CoverImage = newCover;
}
} catch (Exception ex)
{
logger.LogError(ex, "Unable to rename {OldCover} to {NewCover}, this cover will need manual refresh", existingCover, newCover);
}
}
}

View file

@ -0,0 +1,123 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Services;
using CsvHelper;
using CsvHelper.Configuration.Attributes;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
public class ProgressExport
{
[Name("Library Id")]
public int LibraryId { get; set; }
[Name("Library Name")]
public string LibraryName { get; set; }
[Name("Series Name")]
public string SeriesName { get; set; }
[Name("Volume Number")]
public string VolumeRange { get; set; }
[Name("Volume LookupName")]
public string VolumeLookupName { get; set; }
[Name("Chapter Number")]
public string ChapterRange { get; set; }
[Name("FileName")]
public string MangaFileName { get; set; }
[Name("FilePath")]
public string MangaFilePath { get; set; }
[Name("AppUser Name")]
public string AppUserName { get; set; }
[Name("AppUser Id")]
public int AppUserId { get; set; }
[Name("Pages Read")]
public int PagesRead { get; set; }
[Name("BookScrollId")]
public string BookScrollId { get; set; }
[Name("Progress Created")]
public DateTime Created { get; set; }
[Name("Progress LastModified")]
public DateTime LastModified { get; set; }
}
/// <summary>
/// v0.8.0 - Progress is extracted and saved in a csv
/// </summary>
public static class MigrateProgressExport
{
public static async Task Migrate(DataContext dataContext, IDirectoryService directoryService, ILogger<Program> logger)
{
try
{
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateProgressExport"))
{
return;
}
logger.LogCritical(
"Running MigrateProgressExport migration - Please be patient, this may take some time. This is not an error");
var data = await dataContext.AppUserProgresses
.Join(dataContext.Series, progress => progress.SeriesId, series => series.Id, (progress, series) => new { progress, series })
.Join(dataContext.Volume, ps => ps.progress.VolumeId, volume => volume.Id, (ps, volume) => new { ps.progress, ps.series, volume })
.Join(dataContext.Chapter, psv => psv.progress.ChapterId, chapter => chapter.Id, (psv, chapter) => new { psv.progress, psv.series, psv.volume, chapter })
.Join(dataContext.MangaFile, psvc => psvc.chapter.Id, mangaFile => mangaFile.ChapterId, (psvc, mangaFile) => new { psvc.progress, psvc.series, psvc.volume, psvc.chapter, mangaFile })
.Join(dataContext.AppUser, psvcm => psvcm.progress.AppUserId, appUser => appUser.Id, (psvcm, appUser) => new
{
LibraryId = psvcm.series.LibraryId,
LibraryName = psvcm.series.Library.Name,
SeriesName = psvcm.series.Name,
VolumeRange = psvcm.volume.MinNumber + "-" + psvcm.volume.MaxNumber,
VolumeLookupName = psvcm.volume.Name,
ChapterRange = psvcm.chapter.Range,
MangaFileName = psvcm.mangaFile.FileName,
MangaFilePath = psvcm.mangaFile.FilePath,
AppUserName = appUser.UserName,
AppUserId = appUser.Id,
PagesRead = psvcm.progress.PagesRead,
BookScrollId = psvcm.progress.BookScrollId,
ProgressCreated = psvcm.progress.Created,
ProgressLastModified = psvcm.progress.LastModified
}).ToListAsync();
// Write the mapped data to a CSV file
await using var writer = new StreamWriter(Path.Join(directoryService.ConfigDirectory, "progress_export.csv"));
await using var csv = new CsvWriter(writer, CultureInfo.InvariantCulture);
await csv.WriteRecordsAsync(data);
logger.LogCritical(
"Running MigrateProgressExport migration - Completed. This is not an error");
}
catch (Exception ex)
{
// On new installs, the db isn't setup yet, so this has nothing to do
}
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
{
Name = "MigrateProgressExport",
ProductVersion = BuildInfo.Version.ToString(),
RanAt = DateTime.UtcNow
});
await dataContext.SaveChangesAsync();
}
}