Scanner Fixes (#2800)
This commit is contained in:
parent
123917fbec
commit
8167fc5a4f
34 changed files with 462 additions and 203 deletions
|
|
@ -170,6 +170,7 @@ public class ParseScannedFiles
|
|||
library.Folders.FirstOrDefault(f =>
|
||||
Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ??
|
||||
folderPath;
|
||||
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
|
||||
{
|
||||
result.Add(new ScanResult()
|
||||
|
|
@ -313,6 +314,7 @@ public class ParseScannedFiles
|
|||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
|
||||
|
||||
var processedScannedSeries = new List<ScannedSeriesResult>();
|
||||
//var processedScannedSeries = new ConcurrentBag<ScannedSeriesResult>();
|
||||
foreach (var folderPath in folders)
|
||||
{
|
||||
try
|
||||
|
|
@ -321,45 +323,15 @@ public class ParseScannedFiles
|
|||
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
// scanResult is updated with the parsed infos
|
||||
await ProcessScanResult(scanResult, seriesPaths, library);
|
||||
|
||||
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
|
||||
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
|
||||
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
|
||||
|
||||
// Combine everything into scannedSeries
|
||||
foreach (var info in scanResult.ParserInfos)
|
||||
{
|
||||
try
|
||||
{
|
||||
TrackSeries(scannedSeries, info);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||
info?.FullFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count <= 0) continue;
|
||||
|
||||
UpdateSortOrder(scannedSeries, series);
|
||||
|
||||
processedScannedSeries.Add(new ScannedSeriesResult()
|
||||
{
|
||||
HasChanged = scanResult.HasChanged,
|
||||
ParsedSeries = series,
|
||||
ParsedInfos = scannedSeries[series]
|
||||
});
|
||||
}
|
||||
await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
|
||||
}
|
||||
|
||||
// This reduced a 1.1k series networked scan by a little more than 1 hour, but the order series were added to Kavita was not alphabetical
|
||||
// await Task.WhenAll(scanResults.Select(async scanResult =>
|
||||
// {
|
||||
// await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
|
||||
// }));
|
||||
|
||||
}
|
||||
catch (ArgumentException ex)
|
||||
{
|
||||
|
|
@ -369,10 +341,52 @@ public class ParseScannedFiles
|
|||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
|
||||
|
||||
return processedScannedSeries;
|
||||
return processedScannedSeries.ToList();
|
||||
|
||||
}
|
||||
|
||||
private async Task ParseAndTrackSeries(Library library, IDictionary<string, IList<SeriesModified>> seriesPaths, ScanResult scanResult,
|
||||
List<ScannedSeriesResult> processedScannedSeries)
|
||||
{
|
||||
// scanResult is updated with the parsed infos
|
||||
await ProcessScanResult(scanResult, seriesPaths, library); // NOTE: This may be able to be parallelized
|
||||
|
||||
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
|
||||
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
|
||||
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
|
||||
|
||||
// Combine everything into scannedSeries
|
||||
foreach (var info in scanResult.ParserInfos)
|
||||
{
|
||||
try
|
||||
{
|
||||
TrackSeries(scannedSeries, info);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||
info?.FullFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count <= 0) continue;
|
||||
|
||||
UpdateSortOrder(scannedSeries, series);
|
||||
|
||||
processedScannedSeries.Add(new ScannedSeriesResult()
|
||||
{
|
||||
HasChanged = scanResult.HasChanged,
|
||||
ParsedSeries = series,
|
||||
ParsedInfos = scannedSeries[series]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For a given ScanResult, sets the ParserInfos on the result
|
||||
/// </summary>
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
|
|||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName),
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo
|
||||
};
|
||||
|
|
|
|||
|
|
@ -3,13 +3,15 @@ using API.Entities.Enums;
|
|||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
public class BookParser(IDirectoryService directoryService, IBookService bookService, IDefaultParser basicParser) : DefaultParser(directoryService)
|
||||
public class BookParser(IDirectoryService directoryService, IBookService bookService, BasicParser basicParser) : DefaultParser(directoryService)
|
||||
{
|
||||
public override ParserInfo Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo comicInfo = null)
|
||||
{
|
||||
var info = bookService.ParseInfo(filePath);
|
||||
if (info == null) return null;
|
||||
|
||||
info.ComicInfo = comicInfo;
|
||||
|
||||
// This catches when original library type is Manga/Comic and when parsing with non
|
||||
if (Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume?
|
||||
{
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
|
|||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo,
|
||||
Chapters = Parser.ParseComicChapter(fileName),
|
||||
|
|
@ -102,4 +102,33 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
|
|||
{
|
||||
return type == LibraryType.ComicVine;
|
||||
}
|
||||
|
||||
private new static void UpdateFromComicInfo(ParserInfo info)
|
||||
{
|
||||
if (info.ComicInfo == null) return;
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
|
||||
{
|
||||
info.Volumes = info.ComicInfo.Volume;
|
||||
}
|
||||
if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
|
||||
{
|
||||
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
|
||||
{
|
||||
info.Chapters = info.ComicInfo.Number;
|
||||
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
|
||||
{
|
||||
info.IsSpecial = false;
|
||||
info.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
}
|
||||
}
|
||||
|
||||
// Patch is SeriesSort from ComicInfo
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
|
||||
{
|
||||
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
|
|||
ComicInfo = comicInfo,
|
||||
Format = MangaFormat.Image,
|
||||
Filename = Path.GetFileName(filePath),
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Title = fileName,
|
||||
};
|
||||
ParseFromFallbackFolders(filePath, libraryRoot, LibraryType.Image, ref ret);
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc
|
|||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo,
|
||||
Chapters = type == LibraryType.Comic
|
||||
|
|
|
|||
|
|
@ -203,15 +203,16 @@ public class ProcessSeries : IProcessSeries
|
|||
|
||||
|
||||
// Process reading list after commit as we need to commit per list
|
||||
BackgroundJob.Enqueue(() => _readingListService.CreateReadingListsFromSeries(library.Id, series.Id));
|
||||
await _readingListService.CreateReadingListsFromSeries(library.Id, series.Id);
|
||||
|
||||
if (seriesAdded)
|
||||
{
|
||||
// See if any recommendations can link up to the series and pre-fetch external metadata for the series
|
||||
_logger.LogInformation("Linking up External Recommendations new series (if applicable)");
|
||||
|
||||
BackgroundJob.Enqueue(() =>
|
||||
_externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
|
||||
// BackgroundJob.Enqueue(() =>
|
||||
// _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
|
||||
await _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type);
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
|
||||
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false);
|
||||
|
|
@ -232,9 +233,11 @@ public class ProcessSeries : IProcessSeries
|
|||
|
||||
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
|
||||
await _metadataService.GenerateCoversForSeries(series, settings.EncodeMediaAs, settings.CoverImageSize);
|
||||
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
|
||||
// BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
|
||||
await _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate);
|
||||
}
|
||||
|
||||
|
||||
private async Task ReportDuplicateSeriesLookup(Library library, ParserInfo firstInfo, Exception ex)
|
||||
{
|
||||
var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format);
|
||||
|
|
@ -581,7 +584,7 @@ public class ProcessSeries : IProcessSeries
|
|||
{
|
||||
// TODO: Push this to UI in some way
|
||||
if (!ex.Message.Equals("Sequence contains more than one matching element")) throw;
|
||||
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||
_logger.LogCritical(ex, "[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||
throw new KavitaException(
|
||||
$"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan");
|
||||
}
|
||||
|
|
@ -705,7 +708,7 @@ public class ProcessSeries : IProcessSeries
|
|||
{
|
||||
// Ensure we remove any files that no longer exist AND order
|
||||
existingChapter.Files = existingChapter.Files
|
||||
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
|
||||
.Where(f => parsedInfos.Any(p => Parser.Parser.NormalizePath(p.FullFilePath) == Parser.Parser.NormalizePath(f.FilePath)))
|
||||
.OrderByNatural(f => f.FilePath).ToList();
|
||||
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue