Scan Loop Fixes (#1572)

* Cleanup some messaging in the scan loop to be more context bearing

* Added Response Caching to Series Detail for 1 min, due to the heavy nature of the call.

* Refactored code to make it so that processing of series runs sync correctly.

Added a log to inform the user of corrupted volume from buggy code in v0.5.6.

* Moved folder watching out of experimental

* Fixed an issue where empty folders could break the scan loop

* Another fix for when dates aren't valid, the scanner wouldn't get the proper min and would throw an exception (develop)

* Implemented the ability to edit release year from the UI for a series.

* Added a unit test for some new logic

* Code smells
This commit is contained in:
Joe Milazzo 2022-10-05 21:30:37 -05:00 committed by GitHub
parent 78b043af74
commit 13226fecc1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 1867 additions and 77 deletions

View file

@ -81,6 +81,7 @@ public class ParseScannedFiles
if (scanDirectoryByDirectory)
{
// This is used in library scan, so we should check first for a ignore file and use that here as well
// TODO: We need to calculate all folders till library root and see if any kavitaignores
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile);
var matcher = _directoryService.CreateMatcherFromFile(potentialIgnoreFile);
var directories = _directoryService.GetDirectories(folderPath, matcher).ToList();
@ -228,62 +229,68 @@ public class ParseScannedFiles
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started));
async Task ProcessFolder(IList<string> files, string folder)
{
var normalizedFolder = Parser.Parser.NormalizePath(folder);
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck))
{
var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
{
Series = fp.SeriesName,
Format = fp.Format,
}).ToList();
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
_logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
return;
}
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(folder, libraryName, ProgressEventType.Updated));
if (files.Count == 0)
{
_logger.LogInformation("[ScannerService] {Folder} is empty", folder);
return;
}
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
var infos = files
.Select(file => _readingItemService.ParseFile(file, folder, libraryType))
.Where(info => info != null)
.ToList();
MergeLocalizedSeriesWithSeries(infos);
foreach (var info in infos)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex,
"There was an exception that occurred during tracking {FilePath}. Skipping this file",
info.FullFilePath);
}
}
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
{
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
}
}
}
foreach (var folderPath in folders)
{
try
{
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, async (files, folder) =>
{
var normalizedFolder = Parser.Parser.NormalizePath(folder);
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck))
{
var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
{
Series = fp.SeriesName,
Format = fp.Format,
}).ToList();
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
_logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
return;
}
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(folderPath, libraryName, ProgressEventType.Updated));
if (files.Count == 0)
{
_logger.LogInformation("[ScannerService] {Folder} is empty", folder);
return;
}
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
var infos = files
.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType))
.Where(info => info != null)
.ToList();
MergeLocalizedSeriesWithSeries(infos);
foreach (var info in infos)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception that occurred during tracking {FilePath}. Skipping this file", info.FullFilePath);
}
}
// It would be really cool if we can emit an event when a folder hasn't been changed so we don't parse everything, but the first item to ensure we don't delete it
// Otherwise, we can do a last step in the DB where we validate all files on disk exist and if not, delete them. (easy but slow)
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
{
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
}
}
}, forceCheck);
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, ProcessFolder, forceCheck);
}
catch (ArgumentException ex)
{