Merged develop in

This commit is contained in:
Joseph Milazzo 2024-04-07 14:13:06 -05:00
commit 5423526484
260 changed files with 15553 additions and 2369 deletions

View file

@ -104,8 +104,13 @@ public class BackupService : IBackupService
_directoryService.ExistOrCreate(tempDirectory);
_directoryService.ClearDirectory(tempDirectory);
await SendProgress(0.1F, "Copying config files");
_directoryService.CopyFilesToDirectory(
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory);
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)), tempDirectory);
// Copy any csv's as those are used for manual migrations
_directoryService.CopyFilesToDirectory(
_directoryService.GetFilesWithCertainExtensions(_directoryService.ConfigDirectory, @"\.csv"), tempDirectory);
await SendProgress(0.2F, "Copying logs");
CopyLogsToBackupDirectory(tempDirectory);

View file

@ -20,6 +20,7 @@ public interface ICleanupService
Task Cleanup();
Task CleanupDbEntries();
void CleanupCacheAndTempDirectories();
void CleanupCacheDirectory();
Task DeleteSeriesCoverImages();
Task DeleteChapterCoverImages();
Task DeleteTagCoverImages();
@ -106,7 +107,7 @@ public class CleanupService : ICleanupService
await _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters();
await _unitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
await _unitOfWork.GenreRepository.RemoveAllGenreNoLongerAssociated();
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
await _unitOfWork.CollectionTagRepository.RemoveCollectionsWithoutSeries();
await _unitOfWork.ReadingListRepository.RemoveReadingListsWithoutSeries();
}
@ -178,6 +179,23 @@ public class CleanupService : ICleanupService
_logger.LogInformation("Cache and temp directory purged");
}
public void CleanupCacheDirectory()
{
_logger.LogInformation("Performing cleanup of Cache directories");
_directoryService.ExistOrCreate(_directoryService.CacheDirectory);
try
{
_directoryService.ClearDirectory(_directoryService.CacheDirectory);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup");
}
_logger.LogInformation("Cache directory purged");
}
/// <summary>
/// Removes Database backups older than configured total backups. If all backups are older than total backups days, only the latest is kept.
/// </summary>

View file

@ -148,14 +148,14 @@ public class LibraryWatcher : ILibraryWatcher
private void OnChanged(object sender, FileSystemEventArgs e)
{
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}, {ChangeType}", e.FullPath, e.Name, e.ChangeType);
_logger.LogTrace("[LibraryWatcher] Changed: {FullPath}, {Name}, {ChangeType}", e.FullPath, e.Name, e.ChangeType);
if (e.ChangeType != WatcherChangeTypes.Changed) return;
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name))));
}
private void OnCreated(object sender, FileSystemEventArgs e)
{
_logger.LogDebug("[LibraryWatcher] Created: {FullPath}, {Name}", e.FullPath, e.Name);
_logger.LogTrace("[LibraryWatcher] Created: {FullPath}, {Name}", e.FullPath, e.Name);
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name)));
}
@ -167,7 +167,7 @@ public class LibraryWatcher : ILibraryWatcher
private void OnDeleted(object sender, FileSystemEventArgs e) {
var isDirectory = string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name));
if (!isDirectory) return;
_logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name);
_logger.LogTrace("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name);
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, true));
}
@ -285,10 +285,10 @@ public class LibraryWatcher : ILibraryWatcher
var rootFolder = _directoryService.GetFoldersTillRoot(libraryFolder, filePath).ToList();
_logger.LogTrace("[LibraryWatcher] Root Folders: {RootFolders}", rootFolder);
if (!rootFolder.Any()) return string.Empty;
if (rootFolder.Count == 0) return string.Empty;
// Select the first folder and join with library folder, this should give us the folder to scan.
return Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder[rootFolder.Count - 1]));
return Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder[rootFolder.Count - 1]));
}

View file

@ -9,6 +9,7 @@ using API.Entities.Enums;
using API.Extensions;
using API.Services.Tasks.Scanner.Parser;
using API.SignalR;
using ExCSS;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
@ -166,13 +167,18 @@ public class ParseScannedFiles
}
normalizedPath = Parser.Parser.NormalizePath(folderPath);
var libraryRoot =
library.Folders.FirstOrDefault(f =>
Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ??
folderPath;
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
{
result.Add(new ScanResult()
{
Files = ArraySegment<string>.Empty,
Folder = folderPath,
LibraryRoot = folderPath,
LibraryRoot = libraryRoot,
HasChanged = false
});
}
@ -181,7 +187,7 @@ public class ParseScannedFiles
{
Files = _directoryService.ScanFiles(folderPath, fileExtensions),
Folder = folderPath,
LibraryRoot = folderPath,
LibraryRoot = libraryRoot,
HasChanged = true
});
@ -309,6 +315,7 @@ public class ParseScannedFiles
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
var processedScannedSeries = new List<ScannedSeriesResult>();
//var processedScannedSeries = new ConcurrentBag<ScannedSeriesResult>();
foreach (var folderPath in folders)
{
try
@ -317,45 +324,15 @@ public class ParseScannedFiles
foreach (var scanResult in scanResults)
{
// scanResult is updated with the parsed infos
await ProcessScanResult(scanResult, seriesPaths, library);
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
// Combine everything into scannedSeries
foreach (var info in scanResult.ParserInfos)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex,
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
info?.FullFilePath);
}
}
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count <= 0) continue;
UpdateSortOrder(scannedSeries, series);
processedScannedSeries.Add(new ScannedSeriesResult()
{
HasChanged = scanResult.HasChanged,
ParsedSeries = series,
ParsedInfos = scannedSeries[series]
});
}
await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
}
// This reduced a 1.1k series networked scan by a little more than 1 hour, but the order series were added to Kavita was not alphabetical
// await Task.WhenAll(scanResults.Select(async scanResult =>
// {
// await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
// }));
}
catch (ArgumentException ex)
{
@ -365,10 +342,52 @@ public class ParseScannedFiles
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
return processedScannedSeries;
return processedScannedSeries.ToList();
}
private async Task ParseAndTrackSeries(Library library, IDictionary<string, IList<SeriesModified>> seriesPaths, ScanResult scanResult,
List<ScannedSeriesResult> processedScannedSeries)
{
// scanResult is updated with the parsed infos
await ProcessScanResult(scanResult, seriesPaths, library); // NOTE: This may be able to be parallelized
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
// Combine everything into scannedSeries
foreach (var info in scanResult.ParserInfos)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex,
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
info?.FullFilePath);
}
}
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count <= 0) continue;
UpdateSortOrder(scannedSeries, series);
processedScannedSeries.Add(new ScannedSeriesResult()
{
HasChanged = scanResult.HasChanged,
ParsedSeries = series,
ParsedInfos = scannedSeries[series]
});
}
}
/// <summary>
/// For a given ScanResult, sets the ParserInfos on the result
/// </summary>
@ -397,21 +416,18 @@ public class ParseScannedFiles
var folder = result.Folder;
var libraryRoot = result.LibraryRoot;
// When processing files for a folder and we do enter, we need to parse the information and combine parser infos
// NOTE: We might want to move the merge step later in the process, like return and combine.
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent($"{files.Count} files in {folder}", library.Name, ProgressEventType.Updated));
if (files.Count == 0)
{
_logger.LogInformation("[ScannerService] {Folder} is empty, no longer in this location, or has no file types that match Library File Types", folder);
result.ParserInfos = ArraySegment<ParserInfo>.Empty;
_logger.LogInformation("[ScannerService] {Folder} is empty or is no longer in this location", folder);
return;
}
// Multiple Series can exist within a folder. We should instead put these infos on the result and perform merging above
IList<ParserInfo> infos = files
.Select(file => _readingItemService.ParseFile(file, folder, libraryRoot, library))
.Select(file => _readingItemService.ParseFile(file, folder, libraryRoot, library.Type))
.Where(info => info != null)
.ToList()!;
@ -430,22 +446,44 @@ public class ParseScannedFiles
var infos = scannedSeries[series].Where(info => info.Volumes == volume.Key).ToList();
IList<ParserInfo> chapters;
var specialTreatment = infos.TrueForAll(info => info.IsSpecial);
var hasAnySpMarker = infos.Exists(info => info.SpecialIndex > 0);
var counter = 0f;
if (specialTreatment)
if (specialTreatment && hasAnySpMarker)
{
chapters = infos
.OrderBy(info => info.SpecialIndex)
.ToList();
foreach (var chapter in chapters)
{
chapter.IssueOrder = counter;
counter++;
}
return;
}
else
// If everything is a special but we don't have any SpecialIndex, then order naturally and use 0, 1, 2
if (specialTreatment)
{
chapters = infos
.OrderByNatural(info => info.Chapters)
.OrderByNatural(info => Parser.Parser.RemoveExtensionIfSupported(info.Filename)!)
.ToList();
foreach (var chapter in chapters)
{
chapter.IssueOrder = counter;
counter++;
}
return;
}
chapters = infos
.OrderByNatural(info => info.Chapters)
.ToList();
var counter = 0f;
counter = 0f;
var prevIssue = string.Empty;
foreach (var chapter in chapters)
{

View file

@ -1,5 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.IO;
using System.Collections.Generic;
using API.Data.Metadata;
using API.Entities.Enums;
@ -29,7 +29,7 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Parser.RemoveExtensionIfSupported(fileName),
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Series = string.Empty,
ComicInfo = comicInfo
};
@ -97,6 +97,11 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
// Patch in other information from ComicInfo
UpdateFromComicInfo(ret);
if (ret.Volumes == Parser.LooseLeafVolume && ret.Chapters == Parser.DefaultChapter)
{
ret.IsSpecial = true;
}
// v0.8.x: Introducing a change where Specials will go in a separate Volume with a reserved number
if (ret.IsSpecial)
{

View file

@ -12,6 +12,8 @@ public class BookParser(IDirectoryService directoryService, IBookService bookSer
var info = bookService.ParseInfo(filePath);
if (info == null) return null;
info.ComicInfo = comicInfo;
// This catches when original library type is Manga/Comic and when parsing with non
if (Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume?
{
@ -22,6 +24,7 @@ public class BookParser(IDirectoryService directoryService, IBookService bookSer
if (string.IsNullOrEmpty(info.ComicInfo?.Volume) && hasVolumeInTitle && (hasVolumeInSeries || string.IsNullOrEmpty(info.Series)))
{
// NOTE: I'm not sure the comment is true. I've never seen this triggered
// This is likely a light novel for which we can set series from parsed title
info.Series = Parser.ParseSeries(info.Title);
info.Volumes = Parser.ParseVolume(info.Title);
@ -30,6 +33,12 @@ public class BookParser(IDirectoryService directoryService, IBookService bookSer
{
var info2 = basicParser.Parse(filePath, rootPath, libraryRoot, LibraryType.Book, comicInfo);
info.Merge(info2);
if (hasVolumeInSeries && info2 != null && Parser.ParseVolume(info2.Series)
.Equals(Parser.LooseLeafVolume))
{
// Override the Series name so it groups appropriately
info.Series = info2.Series;
}
}
}

View file

@ -32,7 +32,7 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Parser.RemoveExtensionIfSupported(fileName)!,
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Series = string.Empty,
ComicInfo = comicInfo,
Chapters = Parser.ParseComicChapter(fileName),
@ -100,4 +100,33 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
{
return type == LibraryType.ComicVine;
}
private new static void UpdateFromComicInfo(ParserInfo info)
{
if (info.ComicInfo == null) return;
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
{
info.Volumes = info.ComicInfo.Volume;
}
if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
{
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
}
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
{
info.Chapters = info.ComicInfo.Number;
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
{
info.IsSpecial = false;
info.Volumes = $"{Parser.SpecialVolumeNumber}";
}
}
// Patch is SeriesSort from ComicInfo
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
{
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
}
}
}

View file

@ -27,6 +27,7 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
/// <param name="filePath"></param>
/// <param name="rootPath">Root folder</param>
/// <param name="type">Allows different Regex to be used for parsing.</param>
/// <param name="type">Allows different Regex to be used for parsing.</param>
/// <param name="comicInfo">ComicInfo if present (for epub it si always present)</param>
/// <param name="extraRegex">The regex for the Generic Parser</param>
/// <returns><see cref="ParserInfo"/> or null if Series was empty</returns>
@ -111,21 +112,29 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
{
info.Volumes = info.ComicInfo.Volume;
}
if (string.IsNullOrEmpty(info.Series) && !string.IsNullOrEmpty(info.ComicInfo.Series))
if (!string.IsNullOrEmpty(info.ComicInfo.Series))
{
info.Series = info.ComicInfo.Series.Trim();
}
if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
if (!string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
{
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
}
if (!string.IsNullOrEmpty(info.ComicInfo.Format) && Parser.HasComicInfoSpecial(info.ComicInfo.Format))
{
info.IsSpecial = true;
info.Chapters = Parser.DefaultChapter;
info.Volumes = Parser.SpecialVolume;
}
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
{
info.Chapters = info.ComicInfo.Number;
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
{
info.IsSpecial = false;
info.Volumes = $"{Parser.SpecialVolumeNumber}";
info.Volumes = Parser.SpecialVolume;
}
}
@ -134,6 +143,7 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
{
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
}
}
public abstract bool IsApplicable(string filePath, LibraryType type);

View file

@ -1,5 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.IO;
using System.Collections.Generic;
using API.Data.Metadata;
using API.Entities.Enums;
@ -23,7 +23,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
ComicInfo = comicInfo,
Format = MangaFormat.Image,
Filename = Path.GetFileName(filePath),
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Title = fileName,
};
ParseFromFallbackFolders(filePath, libraryRoot, LibraryType.Image, ref ret);

View file

@ -121,6 +121,10 @@ public static class Parser
private static readonly Regex[] MangaVolumeRegex = new[]
{
// Thai Volume: เล่ม n -> Volume n
new Regex(
@"(เล่ม|เล่มที่)(\s)?(\.?)(\s|_)?(?<Volume>\d+(\-\d+)?(\.\d+)?)",
MatchOptions, RegexTimeout),
// Dance in the Vampire Bund v16-17
new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
@ -194,6 +198,10 @@ public static class Parser
private static readonly Regex[] MangaSeriesRegex = new[]
{
// Thai Volume: เล่ม n -> Volume n
new Regex(
@"(?<Series>.+?)(เล่ม|เล่มที่)(\s)?(\.?)(\s|_)?(?<Volume>\d+(\-\d+)?(\.\d+)?)",
MatchOptions, RegexTimeout),
// Russian Volume: Том n -> Volume n, Тома n -> Volume
new Regex(
@"(?<Series>.+?)Том(а?)(\.?)(\s|_)?(?<Volume>\d+(?:(\-)\d+)?)",
@ -232,7 +240,7 @@ public static class Parser
RegexTimeout),
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
new Regex(
@"(?<Series>.*)( - )(?:v|vo|c|chapters)\d",
@"(?<Series>.+?)( - )(?:v|vo|c|chapters)\d",
MatchOptions, RegexTimeout),
// Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip
new Regex(
@ -368,6 +376,10 @@ public static class Parser
private static readonly Regex[] ComicSeriesRegex = new[]
{
// Thai Volume: เล่ม n -> Volume n
new Regex(
@"(?<Series>.+?)(เล่ม|เล่มที่)(\s)?(\.?)(\s|_)?(?<Volume>\d+(\-\d+)?(\.\d+)?)",
MatchOptions, RegexTimeout),
// Russian Volume: Том n -> Volume n, Тома n -> Volume
new Regex(
@"(?<Series>.+?)Том(а?)(\.?)(\s|_)?(?<Volume>\d+(?:(\-)\d+)?)",
@ -456,6 +468,10 @@ public static class Parser
private static readonly Regex[] ComicVolumeRegex = new[]
{
// Thai Volume: เล่ม n -> Volume n
new Regex(
@"(เล่ม|เล่มที่)(\s)?(\.?)(\s|_)?(?<Volume>\d+(\-\d+)?(\.\d+)?)",
MatchOptions, RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.+?)(?: |_)(t|v)(?<Volume>" + NumberRange + @")",
@ -492,6 +508,10 @@ public static class Parser
private static readonly Regex[] ComicChapterRegex = new[]
{
// Thai Volume: บทที่ n -> Chapter n, ตอนที่ n -> Chapter n
new Regex(
@"(บทที่|ตอนที่)(\s)?(\.?)(\s|_)?(?<Chapter>\d+(\-\d+)?(\.\d+)?)",
MatchOptions, RegexTimeout),
// Batman & Wildcat (1 of 3)
new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
@ -557,6 +577,10 @@ public static class Parser
private static readonly Regex[] MangaChapterRegex = new[]
{
// Thai Chapter: บทที่ n -> Chapter n, ตอนที่ n -> Chapter n, เล่ม n -> Volume n, เล่มที่ n -> Volume n
new Regex(
@"(?<Volume>((เล่ม|เล่มที่))?(\s|_)?\.?\d+)(\s|_)(บทที่|ตอนที่)\.?(\s|_)?(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5
new Regex(
@"(\b|_)(c|ch)(\.?\s?)(?<Chapter>(\d+(\.\d)?)(-c?\d+(\.\d)?)?)",

View file

@ -1,5 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.IO;
using System.Collections.Generic;
using API.Data.Metadata;
using API.Entities.Enums;
@ -16,7 +16,7 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Parser.RemoveExtensionIfSupported(fileName)!,
FullFilePath = filePath,
FullFilePath = Parser.NormalizePath(filePath),
Series = string.Empty,
ComicInfo = comicInfo,
Chapters = type == LibraryType.Comic
@ -24,6 +24,11 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc
: Parser.ParseChapter(fileName)
};
if (type == LibraryType.Book)
{
ret.Chapters = Parser.DefaultChapter;
}
ret.Series = type == LibraryType.Comic ? Parser.ParseComicSeries(fileName) : Parser.ParseSeries(fileName);
ret.Volumes = type == LibraryType.Comic ? Parser.ParseComicVolume(fileName) : Parser.ParseVolume(fileName);

View file

@ -6,6 +6,7 @@ using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
@ -80,7 +81,14 @@ public class ProcessSeries : IProcessSeries
/// </summary>
public async Task Prime()
{
await _tagManagerService.Prime();
try
{
await _tagManagerService.Prime();
}
catch (Exception ex)
{
_logger.LogCritical(ex, "Unable to prime tag manager. Scan cannot proceed. Report to Kavita dev");
}
}
/// <summary>
@ -196,15 +204,16 @@ public class ProcessSeries : IProcessSeries
// Process reading list after commit as we need to commit per list
BackgroundJob.Enqueue(() => _readingListService.CreateReadingListsFromSeries(library.Id, series.Id));
await _readingListService.CreateReadingListsFromSeries(library.Id, series.Id);
if (seriesAdded)
{
// See if any recommendations can link up to the series and pre-fetch external metadata for the series
_logger.LogInformation("Linking up External Recommendations new series (if applicable)");
BackgroundJob.Enqueue(() =>
_externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
// BackgroundJob.Enqueue(() =>
// _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
await _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type);
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false);
@ -225,9 +234,11 @@ public class ProcessSeries : IProcessSeries
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
await _metadataService.GenerateCoversForSeries(series, settings.EncodeMediaAs, settings.CoverImageSize);
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
// BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
await _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate);
}
private async Task ReportDuplicateSeriesLookup(Library library, ParserInfo firstInfo, Exception ex)
{
var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format);
@ -361,12 +372,26 @@ public class ProcessSeries : IProcessSeries
if (!string.IsNullOrEmpty(firstChapter?.SeriesGroup) && library.ManageCollections)
{
// Get the default admin to associate these tags to
var defaultAdmin = await _unitOfWork.UserRepository.GetDefaultAdminUser(AppUserIncludes.Collections);
if (defaultAdmin == null) return;
_logger.LogDebug("Collection tag(s) found for {SeriesName}, updating collections", series.Name);
foreach (var collection in firstChapter.SeriesGroup.Split(',', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries))
{
var t = await _tagManagerService.GetCollectionTag(collection);
if (t == null) continue;
_collectionTagService.AddTagToSeriesMetadata(t, series.Metadata);
var t = await _tagManagerService.GetCollectionTag(collection, defaultAdmin);
if (t.Item1 == null) continue;
var tag = t.Item1;
// Check if the Series is already on the tag
if (tag.Items.Any(s => s.MatchesSeriesByName(series.NormalizedName, series.NormalizedLocalizedName)))
{
continue;
}
tag.Items.Add(series);
await _unitOfWork.CollectionTagRepository.UpdateCollectionAgeRating(tag);
}
}
@ -574,7 +599,7 @@ public class ProcessSeries : IProcessSeries
{
// TODO: Push this to UI in some way
if (!ex.Message.Equals("Sequence contains more than one matching element")) throw;
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
_logger.LogCritical(ex, "[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
throw new KavitaException(
$"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan");
}
@ -691,14 +716,15 @@ public class ProcessSeries : IProcessSeries
{
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
{
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}",
existingChapter.Range, volume.Name, parsedInfos[0].Series);
volume.Chapters.Remove(existingChapter);
}
else
{
// Ensure we remove any files that no longer exist AND order
existingChapter.Files = existingChapter.Files
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
.Where(f => parsedInfos.Any(p => Parser.Parser.NormalizePath(p.FullFilePath) == Parser.Parser.NormalizePath(f.FilePath)))
.OrderByNatural(f => f.FilePath).ToList();
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
}
@ -717,6 +743,7 @@ public class ProcessSeries : IProcessSeries
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
existingFile.Extension = fileInfo.Extension.ToLowerInvariant();
existingFile.FileName = Parser.Parser.RemoveExtensionIfSupported(existingFile.FilePath);
existingFile.FilePath = Parser.Parser.NormalizePath(existingFile.FilePath);
existingFile.Bytes = fileInfo.Length;
// We skip updating DB here with last modified time so that metadata refresh can do it
}

View file

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
@ -28,7 +29,7 @@ public interface ITagManagerService
Task<Genre?> GetGenre(string genre);
Task<Tag?> GetTag(string tag);
Task<Person?> GetPerson(string name, PersonRole role);
Task<CollectionTag?> GetCollectionTag(string name);
Task<Tuple<AppUserCollection?, bool>> GetCollectionTag(string? tag, AppUser userWithCollections);
}
/// <summary>
@ -41,7 +42,7 @@ public class TagManagerService : ITagManagerService
private Dictionary<string, Genre> _genres;
private Dictionary<string, Tag> _tags;
private Dictionary<string, Person> _people;
private Dictionary<string, CollectionTag> _collectionTags;
private Dictionary<string, AppUserCollection> _collectionTags;
private readonly SemaphoreSlim _genreSemaphore = new SemaphoreSlim(1, 1);
private readonly SemaphoreSlim _tagSemaphore = new SemaphoreSlim(1, 1);
@ -57,18 +58,22 @@ public class TagManagerService : ITagManagerService
public void Reset()
{
_genres = new Dictionary<string, Genre>();
_tags = new Dictionary<string, Tag>();
_people = new Dictionary<string, Person>();
_collectionTags = new Dictionary<string, CollectionTag>();
_genres = [];
_tags = [];
_people = [];
_collectionTags = [];
}
public async Task Prime()
{
_genres = (await _unitOfWork.GenreRepository.GetAllGenresAsync()).ToDictionary(t => t.NormalizedTitle);
_tags = (await _unitOfWork.TagRepository.GetAllTagsAsync()).ToDictionary(t => t.NormalizedTitle);
_people = (await _unitOfWork.PersonRepository.GetAllPeople()).ToDictionary(GetPersonKey);
_collectionTags = (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync(CollectionTagIncludes.SeriesMetadata))
_people = (await _unitOfWork.PersonRepository.GetAllPeople())
.GroupBy(GetPersonKey)
.Select(g => g.First())
.ToDictionary(GetPersonKey);
var defaultAdmin = await _unitOfWork.UserRepository.GetDefaultAdminUser()!;
_collectionTags = (await _unitOfWork.CollectionTagRepository.GetCollectionsForUserAsync(defaultAdmin.Id, CollectionIncludes.Series))
.ToDictionary(t => t.NormalizedTitle);
}
@ -180,28 +185,30 @@ public class TagManagerService : ITagManagerService
/// </summary>
/// <param name="tag"></param>
/// <returns></returns>
public async Task<CollectionTag?> GetCollectionTag(string tag)
public async Task<Tuple<AppUserCollection?, bool>> GetCollectionTag(string? tag, AppUser userWithCollections)
{
if (string.IsNullOrEmpty(tag)) return null;
if (string.IsNullOrEmpty(tag)) return Tuple.Create<AppUserCollection?, bool>(null, false);
await _collectionTagSemaphore.WaitAsync();
AppUserCollection? result;
try
{
if (_collectionTags.TryGetValue(tag.ToNormalized(), out var result))
if (_collectionTags.TryGetValue(tag.ToNormalized(), out result))
{
return result;
return Tuple.Create<AppUserCollection?, bool>(result, false);
}
// We need to create a new Genre
result = new CollectionTagBuilder(tag).Build();
_unitOfWork.CollectionTagRepository.Add(result);
result = new AppUserCollectionBuilder(tag).Build();
userWithCollections.Collections.Add(result);
_unitOfWork.UserRepository.Update(userWithCollections);
await _unitOfWork.CommitAsync();
_collectionTags.Add(result.NormalizedTitle, result);
return result;
}
finally
{
_collectionTagSemaphore.Release();
}
return Tuple.Create<AppUserCollection?, bool>(result, true);
}
}

View file

@ -325,7 +325,7 @@ public class ScannerService : IScannerService
await _metadataService.RemoveAbandonedMetadataKeys();
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(existingChapterIdsToClean));
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory));
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
}
private void TrackFoundSeriesAndFiles(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, IList<ScannedSeriesResult> seenSeries)
@ -485,7 +485,8 @@ public class ScannerService : IScannerService
public async Task ScanLibrary(int libraryId, bool forceUpdate = false, bool isSingleScan = true)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId,
LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
var libraryFolderPaths = library!.Folders.Select(fp => fp.Path).ToList();
if (!await CheckMounts(library.Name, libraryFolderPaths)) return;
@ -501,47 +502,16 @@ public class ScannerService : IScannerService
}
var totalFiles = 0;
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
var (scanElapsedTime, processedSeries) = await ScanFiles(library, libraryFolderPaths,
shouldUseLibraryScan, forceUpdate);
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
TrackFoundSeriesAndFiles(parsedSeries, processedSeries);
// We need to remove any keys where there is no actual parser info
var toProcess = parsedSeries.Keys
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
.ToList();
var totalFiles = await ProcessParsedSeries(forceUpdate, parsedSeries, library, scanElapsedTime);
if (toProcess.Count > 0)
{
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
await _processSeries.Prime();
}
var tasks = new List<Task>();
foreach (var pSeries in toProcess)
{
totalFiles += parsedSeries[pSeries].Count;
tasks.Add(_processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate));
}
await Task.WhenAll(tasks);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
var time = DateTime.Now;
foreach (var folderPath in library.Folders)
{
folderPath.UpdateLastScanned(time);
}
library.UpdateLastScanned(time);
UpdateLastScanned(library);
_unitOfWork.LibraryRepository.Update(library);
@ -565,28 +535,7 @@ public class ScannerService : IScannerService
totalFiles, parsedSeries.Count, sw.ElapsedMilliseconds, library.Name);
}
try
{
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
// NOTE: This implementation is expensive
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
removedSeries.Count, removedSeries.Select(s => s.Name));
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
await _unitOfWork.CommitAsync();
foreach (var s in removedSeries)
{
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
}
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
}
await RemoveSeriesNotFound(parsedSeries, library);
}
else
{
@ -597,7 +546,77 @@ public class ScannerService : IScannerService
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, string.Empty));
await _metadataService.RemoveAbandonedMetadataKeys();
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory));
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
}
private async Task RemoveSeriesNotFound(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library)
{
try
{
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
// NOTE: This implementation is expensive
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
removedSeries.Count, removedSeries.Select(s => s.Name));
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
await _unitOfWork.CommitAsync();
foreach (var s in removedSeries)
{
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
}
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
}
}
private async Task<int> ProcessParsedSeries(bool forceUpdate, Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library, long scanElapsedTime)
{
var toProcess = parsedSeries.Keys
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
.ToList();
if (toProcess.Count > 0)
{
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
await _processSeries.Prime();
}
var totalFiles = 0;
//var tasks = new List<Task>();
foreach (var pSeries in toProcess)
{
totalFiles += parsedSeries[pSeries].Count;
//tasks.Add(_processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate));
// We can't do Task.WhenAll because of concurrency issues.
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate);
}
//await Task.WhenAll(tasks);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
return totalFiles;
}
private static void UpdateLastScanned(Library library)
{
var time = DateTime.Now;
foreach (var folderPath in library.Folders)
{
folderPath.UpdateLastScanned(time);
}
library.UpdateLastScanned(time);
}
private async Task<Tuple<long, IList<ScannedSeriesResult>>> ScanFiles(Library library, IEnumerable<string> dirs,

View file

@ -134,7 +134,7 @@ public class StatsService : IStatsService
HasBookmarks = (await _unitOfWork.UserRepository.GetAllBookmarksAsync()).Any(),
NumberOfLibraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).Count(),
NumberOfCollections = (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count(),
NumberOfCollections = (await _unitOfWork.CollectionTagRepository.GetAllCollectionsAsync()).Count(),
NumberOfReadingLists = await _unitOfWork.ReadingListRepository.Count(),
OPDSEnabled = serverSettings.EnableOpds,
NumberOfUsers = (await _unitOfWork.UserRepository.GetAllUsersAsync()).Count(),