Scanner Fixes (#2800)

This commit is contained in:
Joe Milazzo 2024-03-19 18:48:42 -05:00 committed by GitHub
parent 123917fbec
commit 8167fc5a4f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 462 additions and 203 deletions

View file

@ -166,11 +166,36 @@ public class LibraryController : BaseApiController
return Ok(_directoryService.ListDirectory(path));
}
/// <summary>
/// Return a specific library
/// </summary>
/// <returns></returns>
[Authorize(Policy = "RequireAdminRole")]
[HttpGet]
public async Task<ActionResult<LibraryDto?>> GetLibrary(int libraryId)
{
var username = User.GetUsername();
if (string.IsNullOrEmpty(username)) return Unauthorized();
var cacheKey = CacheKey + username;
var result = await _libraryCacheProvider.GetAsync<IEnumerable<LibraryDto>>(cacheKey);
if (result.HasValue)
{
return Ok(result.Value.FirstOrDefault(l => l.Id == libraryId));
}
var ret = _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(username).ToList();
await _libraryCacheProvider.SetAsync(CacheKey, ret, TimeSpan.FromHours(24));
_logger.LogDebug("Caching libraries for {Key}", cacheKey);
return Ok(ret.Find(l => l.Id == libraryId));
}
/// <summary>
/// Return all libraries in the Server
/// </summary>
/// <returns></returns>
[HttpGet]
[HttpGet("libraries")]
public async Task<ActionResult<IEnumerable<LibraryDto>>> GetLibraries()
{
var username = User.GetUsername();

View file

@ -221,18 +221,18 @@ public class ServerController : BaseApiController
/// </summary>
/// <returns></returns>
[HttpGet("jobs")]
public ActionResult<IEnumerable<JobDto>> GetJobs()
public async Task<ActionResult<IEnumerable<JobDto>>> GetJobs()
{
var recurringJobs = JobStorage.Current.GetConnection().GetRecurringJobs().Select(
dto =>
new JobDto() {
Id = dto.Id,
Title = dto.Id.Replace('-', ' '),
Cron = dto.Cron,
LastExecutionUtc = dto.LastExecution.HasValue ? new DateTime(dto.LastExecution.Value.Ticks, DateTimeKind.Utc) : null
});
var jobDtoTasks = JobStorage.Current.GetConnection().GetRecurringJobs().Select(async dto =>
new JobDto()
{
Id = dto.Id,
Title = await _localizationService.Translate(User.GetUserId(), dto.Id),
Cron = dto.Cron,
LastExecutionUtc = dto.LastExecution.HasValue ? new DateTime(dto.LastExecution.Value.Ticks, DateTimeKind.Utc) : null
});
return Ok(recurringJobs);
return Ok(await Task.WhenAll(jobDtoTasks));
}
/// <summary>

View file

@ -0,0 +1,45 @@
using System;
using System.Threading.Tasks;
using API.Entities;
using API.Services.Tasks.Scanner.Parser;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.8.0 ensured that MangaFile Path is normalized. This will normalize existing data to avoid churn.
/// </summary>
public static class MigrateMangaFilePath
{
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
{
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateMangaFilePath"))
{
return;
}
logger.LogCritical(
"Running MigrateMangaFilePath migration - Please be patient, this may take some time. This is not an error");
foreach(var file in dataContext.MangaFile)
{
file.FilePath = Parser.NormalizePath(file.FilePath);
}
await dataContext.SaveChangesAsync();
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
{
Name = "MigrateMangaFilePath",
ProductVersion = BuildInfo.Version.ToString(),
RanAt = DateTime.UtcNow
});
await dataContext.SaveChangesAsync();
logger.LogCritical(
"Running MigrateMangaFilePath migration - Completed. This is not an error");
}
}

View file

@ -20,6 +20,7 @@ public static class MigrateWantToReadExport
{
try
{
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateWantToReadExport"))
{
return;

View file

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using API.Entities;
namespace API.Helpers;
@ -46,6 +47,7 @@ public static class OrderableHelper
public static void ReorderItems(List<ReadingListItem> items, int readingListItemId, int toPosition)
{
if (toPosition < 0) throw new ArgumentException("toPosition cannot be less than 0");
var item = items.Find(r => r.Id == readingListItemId);
if (item != null)
{

View file

@ -200,8 +200,19 @@
"volume-num": "Volume {0}",
"book-num": "Book {0}",
"issue-num": "Issue {0}{1}",
"chapter-num": "Chapter {0}"
"chapter-num": "Chapter {0}",
"check-updates": "Check Updates",
"license-check": "License Check",
"process-scrobbling-events": "Process Scrobbling Events",
"report-stats": "Report Stats",
"check-scrobbling-tokens": "Check Scrobbling Tokens",
"cleanup": "Cleanup",
"process-processed-scrobbling-events": "Process Processed Scrobbling Events",
"remove-from-want-to-read": "Want to Read Cleanup",
"scan-libraries": "Scan Libraries",
"kavita+-data-refresh": "Kavita+ Data Refresh",
"backup": "Backup",
"update-yearly-stats": "Update Yearly Stats"
}

View file

@ -88,7 +88,7 @@ public class Program
}
// Apply Before manual migrations that need to run before actual migrations
try
if (isDbCreated)
{
Task.Run(async () =>
{
@ -96,17 +96,22 @@ public class Program
logger.LogInformation("Running Migrations");
// v0.7.14
await MigrateWantToReadExport.Migrate(context, directoryService, logger);
try
{
await MigrateWantToReadExport.Migrate(context, directoryService, logger);
}
catch (Exception ex)
{
/* Swallow */
}
await unitOfWork.CommitAsync();
logger.LogInformation("Running Migrations - complete");
}).GetAwaiter()
.GetResult();
}
catch (Exception ex)
{
logger.LogCritical(ex, "An error occurred during migration");
}
await context.Database.MigrateAsync();

View file

@ -353,7 +353,15 @@ public class ArchiveService : IArchiveService
{
var tempPath = Path.Join(tempLocation, _directoryService.FileSystem.Path.GetFileNameWithoutExtension(_directoryService.FileSystem.FileInfo.New(path).Name));
progressCallback(Tuple.Create(_directoryService.FileSystem.FileInfo.New(path).Name, (1.0f * totalFiles) / count));
ExtractArchive(path, tempPath);
if (Tasks.Scanner.Parser.Parser.IsArchive(path))
{
ExtractArchive(path, tempPath);
}
else
{
_directoryService.CopyFileToDirectory(path, tempPath);
}
count++;
}
}
@ -392,7 +400,7 @@ public class ArchiveService : IArchiveService
return false;
}
if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath)) return true;
if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath)) return true;
_logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath);
return false;

View file

@ -781,7 +781,7 @@ public class BookService : IBookService
/// <returns></returns>
public ParserInfo? ParseInfo(string filePath)
{
if (!Parser.IsEpub(filePath)) return null;
if (!Parser.IsEpub(filePath) || !_directoryService.FileSystem.File.Exists(filePath)) return null;
try
{
@ -848,7 +848,7 @@ public class BookService : IBookService
Format = MangaFormat.Epub,
Filename = Path.GetFileName(filePath),
Title = specialName?.Trim() ?? string.Empty,
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
IsSpecial = false,
Series = series.Trim(),
SeriesSort = series.Trim(),
@ -870,7 +870,7 @@ public class BookService : IBookService
Format = MangaFormat.Epub,
Filename = Path.GetFileName(filePath),
Title = epubBook.Title.Trim(),
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
IsSpecial = false,
Series = epubBook.Title.Trim(),
Volumes = Parser.LooseLeafVolume,

View file

@ -440,22 +440,25 @@ public class ScrobblingService : IScrobblingService
// Might want to log this under ScrobbleError
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Too Many Requests"))
{
_logger.LogInformation("Hit Too many requests, sleeping to regain requests");
_logger.LogInformation("Hit Too many requests, sleeping to regain requests and retrying");
await Task.Delay(TimeSpan.FromMinutes(10));
} else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unauthorized"))
return await PostScrobbleUpdate(data, license, evt);
}
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unauthorized"))
{
_logger.LogCritical("Kavita+ responded with Unauthorized. Please check your subscription");
await _licenseService.HasActiveLicense(true);
evt.IsErrored = true;
evt.ErrorDetails = "Kavita+ subscription no longer active";
throw new KavitaException("Kavita+ responded with Unauthorized. Please check your subscription");
} else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Access token is invalid"))
}
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Access token is invalid"))
{
evt.IsErrored = true;
evt.ErrorDetails = AccessTokenErrorMessage;
throw new KavitaException("Access token is invalid");
}
else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series"))
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series"))
{
// Log the Series name and Id in ScrobbleErrors
_logger.LogInformation("Kavita+ was unable to match the series");
@ -490,10 +493,6 @@ public class ScrobblingService : IScrobblingService
evt.IsErrored = true;
evt.ErrorDetails = "Review was unable to be saved due to upstream requirements";
}
evt.IsErrored = true;
_logger.LogError("Scrobbling failed due to {ErrorMessage}: {SeriesName}", response.ErrorMessage, data.SeriesName);
throw new KavitaException($"Scrobbling failed due to {response.ErrorMessage}: {data.SeriesName}");
}
return response.RateLeft;

View file

@ -38,11 +38,12 @@ public class ReadingItemService : IReadingItemService
_directoryService = directoryService;
_logger = logger;
_comicVineParser = new ComicVineParser(directoryService);
_imageParser = new ImageParser(directoryService);
_bookParser = new BookParser(directoryService, bookService, _basicParser);
_pdfParser = new PdfParser(directoryService);
_basicParser = new BasicParser(directoryService, _imageParser);
_bookParser = new BookParser(directoryService, bookService, _basicParser);
_comicVineParser = new ComicVineParser(directoryService);
_pdfParser = new PdfParser(directoryService);
}
/// <summary>
@ -73,14 +74,22 @@ public class ReadingItemService : IReadingItemService
/// <param name="type">Library type to determine parsing to perform</param>
public ParserInfo? ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
{
var info = Parse(path, rootPath, libraryRoot, type);
if (info == null)
try
{
_logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path);
var info = Parse(path, rootPath, libraryRoot, type);
if (info == null)
{
_logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path);
return null;
}
return info;
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when parsing file {FilePath}", path);
return null;
}
return info;
}
/// <summary>

View file

@ -578,6 +578,13 @@ public class SeriesService : ISeriesService
return !chapter.IsSpecial && chapter.MinNumber.IsNot(Parser.DefaultChapterNumber);
}
/// <summary>
/// Should the volume be included and if so, this renames
/// </summary>
/// <param name="volume"></param>
/// <param name="libraryType"></param>
/// <param name="volumeLabel"></param>
/// <returns></returns>
public static bool RenameVolumeName(VolumeDto volume, LibraryType libraryType, string volumeLabel = "Volume")
{
if (libraryType is LibraryType.Book or LibraryType.LightNovel)

View file

@ -336,7 +336,7 @@ public class TaskScheduler : ITaskScheduler
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force, true));
// When we do a scan, force cache to re-unpack in case page numbers change
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheAndTempDirectories());
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
}
public void TurnOnScrobbling(int userId = 0)

View file

@ -104,8 +104,13 @@ public class BackupService : IBackupService
_directoryService.ExistOrCreate(tempDirectory);
_directoryService.ClearDirectory(tempDirectory);
await SendProgress(0.1F, "Copying config files");
_directoryService.CopyFilesToDirectory(
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory);
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)), tempDirectory);
// Copy any csv's as those are used for manual migrations
_directoryService.CopyFilesToDirectory(
_directoryService.GetFilesWithCertainExtensions(_directoryService.ConfigDirectory, @"\.csv"), tempDirectory);
await SendProgress(0.2F, "Copying logs");
CopyLogsToBackupDirectory(tempDirectory);

View file

@ -20,6 +20,7 @@ public interface ICleanupService
Task Cleanup();
Task CleanupDbEntries();
void CleanupCacheAndTempDirectories();
void CleanupCacheDirectory();
Task DeleteSeriesCoverImages();
Task DeleteChapterCoverImages();
Task DeleteTagCoverImages();
@ -178,6 +179,23 @@ public class CleanupService : ICleanupService
_logger.LogInformation("Cache and temp directory purged");
}
public void CleanupCacheDirectory()
{
_logger.LogInformation("Performing cleanup of Cache directories");
_directoryService.ExistOrCreate(_directoryService.CacheDirectory);
try
{
_directoryService.ClearDirectory(_directoryService.CacheDirectory);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup");
}
_logger.LogInformation("Cache directory purged");
}
/// <summary>
/// Removes Database backups older than configured total backups. If all backups are older than total backups days, only the latest is kept.
/// </summary>

View file

@ -170,6 +170,7 @@ public class ParseScannedFiles
library.Folders.FirstOrDefault(f =>
Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ??
folderPath;
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
{
result.Add(new ScanResult()
@ -313,6 +314,7 @@ public class ParseScannedFiles
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
var processedScannedSeries = new List<ScannedSeriesResult>();
//var processedScannedSeries = new ConcurrentBag<ScannedSeriesResult>();
foreach (var folderPath in folders)
{
try
@ -321,45 +323,15 @@ public class ParseScannedFiles
foreach (var scanResult in scanResults)
{
// scanResult is updated with the parsed infos
await ProcessScanResult(scanResult, seriesPaths, library);
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
// Combine everything into scannedSeries
foreach (var info in scanResult.ParserInfos)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex,
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
info?.FullFilePath);
}
}
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count <= 0) continue;
UpdateSortOrder(scannedSeries, series);
processedScannedSeries.Add(new ScannedSeriesResult()
{
HasChanged = scanResult.HasChanged,
ParsedSeries = series,
ParsedInfos = scannedSeries[series]
});
}
await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
}
// This reduced a 1.1k series networked scan by a little more than 1 hour, but the order series were added to Kavita was not alphabetical
// await Task.WhenAll(scanResults.Select(async scanResult =>
// {
// await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
// }));
}
catch (ArgumentException ex)
{
@ -369,10 +341,52 @@ public class ParseScannedFiles
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
return processedScannedSeries;
return processedScannedSeries.ToList();
}
private async Task ParseAndTrackSeries(Library library, IDictionary<string, IList<SeriesModified>> seriesPaths, ScanResult scanResult,
List<ScannedSeriesResult> processedScannedSeries)
{
// scanResult is updated with the parsed infos
await ProcessScanResult(scanResult, seriesPaths, library); // NOTE: This may be able to be parallelized
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
// Combine everything into scannedSeries
foreach (var info in scanResult.ParserInfos)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex,
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
info?.FullFilePath);
}
}
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count <= 0) continue;
UpdateSortOrder(scannedSeries, series);
processedScannedSeries.Add(new ScannedSeriesResult()
{
HasChanged = scanResult.HasChanged,
ParsedSeries = series,
ParsedInfos = scannedSeries[series]
});
}
}
/// <summary>
/// For a given ScanResult, sets the ParserInfos on the result
/// </summary>

View file

@ -27,7 +27,7 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Parser.RemoveExtensionIfSupported(fileName),
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Series = string.Empty,
ComicInfo = comicInfo
};

View file

@ -3,13 +3,15 @@ using API.Entities.Enums;
namespace API.Services.Tasks.Scanner.Parser;
public class BookParser(IDirectoryService directoryService, IBookService bookService, IDefaultParser basicParser) : DefaultParser(directoryService)
public class BookParser(IDirectoryService directoryService, IBookService bookService, BasicParser basicParser) : DefaultParser(directoryService)
{
public override ParserInfo Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo comicInfo = null)
{
var info = bookService.ParseInfo(filePath);
if (info == null) return null;
info.ComicInfo = comicInfo;
// This catches when original library type is Manga/Comic and when parsing with non
if (Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume?
{

View file

@ -34,7 +34,7 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Parser.RemoveExtensionIfSupported(fileName)!,
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Series = string.Empty,
ComicInfo = comicInfo,
Chapters = Parser.ParseComicChapter(fileName),
@ -102,4 +102,33 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
{
return type == LibraryType.ComicVine;
}
private new static void UpdateFromComicInfo(ParserInfo info)
{
if (info.ComicInfo == null) return;
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
{
info.Volumes = info.ComicInfo.Volume;
}
if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
{
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
}
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
{
info.Chapters = info.ComicInfo.Number;
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
{
info.IsSpecial = false;
info.Volumes = $"{Parser.SpecialVolumeNumber}";
}
}
// Patch is SeriesSort from ComicInfo
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
{
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
}
}
}

View file

@ -21,7 +21,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
ComicInfo = comicInfo,
Format = MangaFormat.Image,
Filename = Path.GetFileName(filePath),
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Title = fileName,
};
ParseFromFallbackFolders(filePath, libraryRoot, LibraryType.Image, ref ret);

View file

@ -14,7 +14,7 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Parser.RemoveExtensionIfSupported(fileName)!,
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Series = string.Empty,
ComicInfo = comicInfo,
Chapters = type == LibraryType.Comic

View file

@ -203,15 +203,16 @@ public class ProcessSeries : IProcessSeries
// Process reading list after commit as we need to commit per list
BackgroundJob.Enqueue(() => _readingListService.CreateReadingListsFromSeries(library.Id, series.Id));
await _readingListService.CreateReadingListsFromSeries(library.Id, series.Id);
if (seriesAdded)
{
// See if any recommendations can link up to the series and pre-fetch external metadata for the series
_logger.LogInformation("Linking up External Recommendations new series (if applicable)");
BackgroundJob.Enqueue(() =>
_externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
// BackgroundJob.Enqueue(() =>
// _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
await _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type);
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false);
@ -232,9 +233,11 @@ public class ProcessSeries : IProcessSeries
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
await _metadataService.GenerateCoversForSeries(series, settings.EncodeMediaAs, settings.CoverImageSize);
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
// BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
await _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate);
}
private async Task ReportDuplicateSeriesLookup(Library library, ParserInfo firstInfo, Exception ex)
{
var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format);
@ -581,7 +584,7 @@ public class ProcessSeries : IProcessSeries
{
// TODO: Push this to UI in some way
if (!ex.Message.Equals("Sequence contains more than one matching element")) throw;
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
_logger.LogCritical(ex, "[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
throw new KavitaException(
$"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan");
}
@ -705,7 +708,7 @@ public class ProcessSeries : IProcessSeries
{
// Ensure we remove any files that no longer exist AND order
existingChapter.Files = existingChapter.Files
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
.Where(f => parsedInfos.Any(p => Parser.Parser.NormalizePath(p.FullFilePath) == Parser.Parser.NormalizePath(f.FilePath)))
.OrderByNatural(f => f.FilePath).ToList();
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
}

View file

@ -325,7 +325,7 @@ public class ScannerService : IScannerService
await _metadataService.RemoveAbandonedMetadataKeys();
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(existingChapterIdsToClean));
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory));
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
}
private void TrackFoundSeriesAndFiles(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, IList<ScannedSeriesResult> seenSeries)
@ -485,7 +485,8 @@ public class ScannerService : IScannerService
public async Task ScanLibrary(int libraryId, bool forceUpdate = false, bool isSingleScan = true)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId,
LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
var libraryFolderPaths = library!.Folders.Select(fp => fp.Path).ToList();
if (!await CheckMounts(library.Name, libraryFolderPaths)) return;
@ -501,48 +502,16 @@ public class ScannerService : IScannerService
}
var totalFiles = 0;
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
var (scanElapsedTime, processedSeries) = await ScanFiles(library, libraryFolderPaths,
shouldUseLibraryScan, forceUpdate);
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
TrackFoundSeriesAndFiles(parsedSeries, processedSeries);
// We need to remove any keys where there is no actual parser info
var toProcess = parsedSeries.Keys
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
.ToList();
var totalFiles = await ProcessParsedSeries(forceUpdate, parsedSeries, library, scanElapsedTime);
if (toProcess.Count > 0)
{
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
await _processSeries.Prime();
}
var tasks = new List<Task>();
foreach (var pSeries in toProcess)
{
totalFiles += parsedSeries[pSeries].Count;
//tasks.Add();
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate);
}
//await Task.WhenAll(tasks);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
var time = DateTime.Now;
foreach (var folderPath in library.Folders)
{
folderPath.UpdateLastScanned(time);
}
library.UpdateLastScanned(time);
UpdateLastScanned(library);
_unitOfWork.LibraryRepository.Update(library);
@ -566,28 +535,7 @@ public class ScannerService : IScannerService
totalFiles, parsedSeries.Count, sw.ElapsedMilliseconds, library.Name);
}
try
{
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
// NOTE: This implementation is expensive
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
removedSeries.Count, removedSeries.Select(s => s.Name));
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
await _unitOfWork.CommitAsync();
foreach (var s in removedSeries)
{
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
}
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
}
await RemoveSeriesNotFound(parsedSeries, library);
}
else
{
@ -598,7 +546,77 @@ public class ScannerService : IScannerService
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, string.Empty));
await _metadataService.RemoveAbandonedMetadataKeys();
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory));
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
}
private async Task RemoveSeriesNotFound(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library)
{
try
{
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
// NOTE: This implementation is expensive
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
removedSeries.Count, removedSeries.Select(s => s.Name));
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
await _unitOfWork.CommitAsync();
foreach (var s in removedSeries)
{
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
}
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
}
}
private async Task<int> ProcessParsedSeries(bool forceUpdate, Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library, long scanElapsedTime)
{
var toProcess = parsedSeries.Keys
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
.ToList();
if (toProcess.Count > 0)
{
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
await _processSeries.Prime();
}
var totalFiles = 0;
//var tasks = new List<Task>();
foreach (var pSeries in toProcess)
{
totalFiles += parsedSeries[pSeries].Count;
//tasks.Add(_processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate));
// We can't do Task.WhenAll because of concurrency issues.
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate);
}
//await Task.WhenAll(tasks);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
return totalFiles;
}
private static void UpdateLastScanned(Library library)
{
var time = DateTime.Now;
foreach (var folderPath in library.Folders)
{
folderPath.UpdateLastScanned(time);
}
library.UpdateLastScanned(time);
}
private async Task<Tuple<long, IList<ScannedSeriesResult>>> ScanFiles(Library library, IEnumerable<string> dirs,

View file

@ -260,6 +260,7 @@ public class Startup
await MigrateLooseLeafChapters.Migrate(dataContext, unitOfWork, directoryService, logger);
await MigrateChapterFields.Migrate(dataContext, unitOfWork, logger);
await MigrateChapterRange.Migrate(dataContext, unitOfWork, logger);
await MigrateMangaFilePath.Migrate(dataContext, logger);
// Update the version in the DB after all migrations are run
var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion);