Scan Loop Fortification (#1573)

* Cleanup some messaging in the scan loop to be more context bearing

* Added Response Caching to Series Detail for 1 min, due to the heavy nature of the call.

* Refactored code to make it so that processing of series runs sync correctly.

Added a log to inform the user of corrupted volume from buggy code in v0.5.6.

* Moved folder watching out of experimental

* Fixed an issue where empty folders could break the scan loop

* Another fix for when dates aren't valid, the scanner wouldn't get the proper min and would throw an exception (develop)

* Implemented the ability to edit release year from the UI for a series.

* Added a unit test for some new logic

* Code smells

* Rewrote the handler for suspending watching to be more resilient and ensure no two threads have a race condition.

* More error handling for when a ScanFolder is invoked but multiple series belong to that folder, log it to the user and default to a library scan.

* ScanSeries now will check for kavitaignores higher than it's own folder and respect library level.

* Fixed an issue where image series with a folder name containing the word "folder" could get ignored as it thought the image was a cover image.

When a series folder is moved or deleted, skip parent ignore finding.

* Removed some old files, added in scanFolder a check if the series found for a folder is in a book library and if so to always do a library scan (as books are often nested into one folder with  multiple series). Added some unit tests

* Refactored some scan loop logic into ComicInfo, wrote tests and updated some documentation to make the fields more clear.

* Added a test for GetLastWriteTime based on recent bug

* Cleaned up some redundant code

* Fixed a bad merge

* Code smells

* Removed a package that's no longer used.

* Ensure we check against ScanQueue on ScanFolder enqueuing

* Documentation and more bullet proofing to ensure Hangfire checks work more as expected
This commit is contained in:
Joe Milazzo 2022-10-09 11:23:41 -05:00 committed by GitHub
parent 5a75a204db
commit 6ea9f2c73e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 391 additions and 250 deletions

View file

@ -124,5 +124,23 @@ public class ComicInfo
info.CoverArtist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.CoverArtist);
}
/// <summary>
/// Uses both Volume and Number to make an educated guess as to what count refers to and it's highest number.
/// </summary>
/// <returns></returns>
public int CalculatedCount()
{
if (!string.IsNullOrEmpty(Number) && float.Parse(Number) > 0)
{
return (int) Math.Floor(float.Parse(Number));
}
if (!string.IsNullOrEmpty(Volume) && float.Parse(Volume) > 0)
{
return Math.Max(Count, (int) Math.Floor(float.Parse(Volume)));
}
return Count;
}
}

View file

@ -33,8 +33,7 @@ public enum SeriesIncludes
Volumes = 2,
Metadata = 4,
Related = 8,
//Related = 16,
//UserPreferences = 32
Library = 16,
}
internal class RecentlyAddedSeries
@ -120,8 +119,7 @@ public interface ISeriesRepository
Task<SeriesDto> GetSeriesForChapter(int chapterId, int userId);
Task<PagedList<SeriesDto>> GetWantToReadForUserAsync(int userId, UserParams userParams, FilterDto filter);
Task<int> GetSeriesIdByFolder(string folder);
Task<Series> GetSeriesByFolderPath(string folder);
Task<Series> GetFullSeriesByName(string series, int libraryId);
Task<Series> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None);
Task<Series> GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId, MangaFormat format, bool withFullIncludes = true);
Task<List<Series>> RemoveSeriesNotInList(IList<ParsedSeries> seenSeries, int libraryId);
Task<IDictionary<string, IList<SeriesModified>>> GetFolderPathMap(int libraryId);
@ -1173,52 +1171,16 @@ public class SeriesRepository : ISeriesRepository
/// Return a Series by Folder path. Null if not found.
/// </summary>
/// <param name="folder">This will be normalized in the query</param>
/// <param name="includes">Additional relationships to include with the base query</param>
/// <returns></returns>
public async Task<Series> GetSeriesByFolderPath(string folder)
public async Task<Series> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None)
{
var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder);
return await _context.Series.SingleOrDefaultAsync(s => s.FolderPath.Equals(normalized));
}
var query = _context.Series.Where(s => s.FolderPath.Equals(normalized));
/// <summary>
/// Finds a series by series name for a given library.
/// </summary>
/// <remarks>This pulls everything with the Series, so should be used only when needing tracking on all related tables</remarks>
/// <param name="series"></param>
/// <param name="libraryId"></param>
/// <returns></returns>
public Task<Series> GetFullSeriesByName(string series, int libraryId)
{
var localizedSeries = Services.Tasks.Scanner.Parser.Parser.Normalize(series);
return _context.Series
.Where(s => (s.NormalizedName.Equals(localizedSeries)
|| s.LocalizedName.Equals(series)) && s.LibraryId == libraryId)
.Include(s => s.Metadata)
.ThenInclude(m => m.People)
.Include(s => s.Metadata)
.ThenInclude(m => m.Genres)
.Include(s => s.Library)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(cm => cm.People)
query = AddIncludesToQuery(query, includes);
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Tags)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Genres)
.Include(s => s.Metadata)
.ThenInclude(m => m.Tags)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.AsSplitQuery()
.SingleOrDefaultAsync();
return await query.SingleOrDefaultAsync();
}
/// <summary>
@ -1240,6 +1202,7 @@ public class SeriesRepository : ISeriesRepository
.Where(s => s.Format == format && format != MangaFormat.Unknown)
.Where(s => s.NormalizedName.Equals(normalizedSeries)
|| (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty));
if (!string.IsNullOrEmpty(normalizedLocalized))
{
query = query.Where(s =>
@ -1516,7 +1479,8 @@ public class SeriesRepository : ISeriesRepository
LastScanned = s.LastFolderScanned,
SeriesName = s.Name,
FolderPath = s.FolderPath,
Format = s.Format
Format = s.Format,
LibraryRoots = s.Library.Folders.Select(f => f.Path)
}).ToListAsync();
var map = new Dictionary<string, IList<SeriesModified>>();
@ -1538,4 +1502,30 @@ public class SeriesRepository : ISeriesRepository
return map;
}
private static IQueryable<Series> AddIncludesToQuery(IQueryable<Series> query, SeriesIncludes includeFlags)
{
if (includeFlags.HasFlag(SeriesIncludes.Library))
{
query = query.Include(u => u.Library);
}
if (includeFlags.HasFlag(SeriesIncludes.Related))
{
query = query.Include(u => u.Relations);
}
if (includeFlags.HasFlag(SeriesIncludes.Metadata))
{
query = query.Include(u => u.Metadata);
}
if (includeFlags.HasFlag(SeriesIncludes.Volumes))
{
query = query.Include(u => u.Volumes);
}
return query;
}
}

View file

@ -65,11 +65,12 @@ public class Chapter : IEntityDate, IHasReadTimeEstimate
/// </summary>
public string Language { get; set; }
/// <summary>
/// Total number of issues in the series
/// Total number of issues or volumes in the series
/// </summary>
/// <remarks>Users may use Volume count or issue count. Kavita performs some light logic to help Count match up with TotalCount</remarks>
public int TotalCount { get; set; } = 0;
/// <summary>
/// Number in the Total Count
/// Number of the Total Count (progress the Series is complete)
/// </summary>
public int Count { get; set; } = 0;

View file

@ -31,4 +31,14 @@ public static class ChapterListExtensions
? chapters.FirstOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath)))
: chapters.FirstOrDefault(c => c.Range == info.Chapters);
}
/// <summary>
/// Returns the minimum Release Year from all Chapters that meets the year requirement (>= 1000)
/// </summary>
/// <param name="chapters"></param>
/// <returns></returns>
public static int MinimumReleaseYear(this IList<Chapter> chapters)
{
return chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).DefaultIfEmpty().Min();
}
}

View file

@ -651,7 +651,7 @@ public class DirectoryService : IDirectoryService
public DateTime GetLastWriteTime(string folderPath)
{
if (!FileSystem.Directory.Exists(folderPath)) throw new IOException($"{folderPath} does not exist");
var fileEntries = Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories);
var fileEntries = FileSystem.Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories);
if (fileEntries.Length == 0) return DateTime.MaxValue;
return fileEntries.Max(path => FileSystem.File.GetLastWriteTime(path));
}

View file

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Entities.Enums;
@ -18,6 +19,7 @@ public interface ITaskScheduler
Task ScheduleTasks();
Task ScheduleStatsTasks();
void ScheduleUpdaterTasks();
void ScanFolder(string folderPath, TimeSpan delay);
void ScanFolder(string folderPath);
void ScanLibrary(int libraryId, bool force = false);
void CleanupChapters(int[] chapterIds);
@ -179,9 +181,32 @@ public class TaskScheduler : ITaskScheduler
RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Daily(Rnd.Next(12, 18)), TimeZoneInfo.Local);
}
public void ScanFolder(string folderPath, TimeSpan delay)
{
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] { normalizedFolder }))
{
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
normalizedFolder);
return;
}
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
BackgroundJob.Schedule(() => _scannerService.ScanFolder(normalizedFolder), delay);
}
public void ScanFolder(string folderPath)
{
_scannerService.ScanFolder(Tasks.Scanner.Parser.Parser.NormalizePath(folderPath));
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {normalizedFolder}))
{
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
normalizedFolder);
return;
}
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
_scannerService.ScanFolder(normalizedFolder);
}
#endregion
@ -298,15 +323,32 @@ public class TaskScheduler : ITaskScheduler
await _versionUpdaterService.PushUpdate(update);
}
/// <summary>
/// If there is an enqueued or scheduled tak for <see cref="ScannerService.ScanLibrary"/> method
/// </summary>
/// <param name="libraryId"></param>
/// <returns></returns>
public static bool HasScanTaskRunningForLibrary(int libraryId)
{
return
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) ||
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue);
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true}, ScanQueue) ||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false}, ScanQueue);
}
/// <summary>
/// Checks if this same invocation is already enqueued
/// If there is an enqueued or scheduled tak for <see cref="ScannerService.ScanSeries"/> method
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public static bool HasScanTaskRunningForSeries(int seriesId)
{
return
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, true}, ScanQueue) ||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, false}, ScanQueue);
}
/// <summary>
/// Checks if this same invocation is already enqueued or scheduled
/// </summary>
/// <param name="methodName">Method name that was enqueued</param>
/// <param name="className">Class name the method resides on</param>
@ -316,16 +358,33 @@ public class TaskScheduler : ITaskScheduler
public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
{
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
return enqueuedJobs.Any(j => j.Value.InEnqueuedState &&
var ret = enqueuedJobs.Any(j => j.Value.InEnqueuedState &&
j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) &&
j.Value.Job.Method.Name.Equals(methodName) &&
j.Value.Job.Method.DeclaringType.Name.Equals(className));
if (ret) return true;
var scheduledJobs = JobStorage.Current.GetMonitoringApi().ScheduledJobs(0, int.MaxValue);
return scheduledJobs.Any(j =>
j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) &&
j.Value.Job.Method.Name.Equals(methodName) &&
j.Value.Job.Method.DeclaringType.Name.Equals(className));
}
/// <summary>
/// Checks against any jobs that are running or about to run
/// </summary>
/// <param name="classNames"></param>
/// <param name="queue"></param>
/// <returns></returns>
public static bool RunningAnyTasksByMethod(IEnumerable<string> classNames, string queue = DefaultQueue)
{
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
return enqueuedJobs.Any(j => !j.Value.InEnqueuedState &&
var ret = enqueuedJobs.Any(j => !j.Value.InEnqueuedState &&
classNames.Contains(j.Value.Job.Method.DeclaringType?.Name));
if (ret) return true;
var runningJobs = JobStorage.Current.GetMonitoringApi().ProcessingJobs(0, int.MaxValue);
return runningJobs.Any(j => classNames.Contains(j.Value.Job.Method.DeclaringType?.Name));
}
}

View file

@ -38,7 +38,7 @@ public class LibraryWatcher : ILibraryWatcher
private readonly IDirectoryService _directoryService;
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<LibraryWatcher> _logger;
private readonly IScannerService _scannerService;
private readonly ITaskScheduler _taskScheduler;
private static readonly Dictionary<string, IList<FileSystemWatcher>> WatcherDictionary = new ();
/// <summary>
@ -54,18 +54,19 @@ public class LibraryWatcher : ILibraryWatcher
/// <summary>
/// Counts within a time frame how many times the buffer became full. Is used to reschedule LibraryWatcher to start monitoring much later rather than instantly
/// </summary>
private int _bufferFullCounter = 0;
private int _bufferFullCounter;
/// <summary>
/// Used to lock buffer Full Counter
/// </summary>
private static readonly object Lock = new ();
private DateTime _lastBufferOverflow = DateTime.MinValue;
public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger<LibraryWatcher> logger, IScannerService scannerService, IHostEnvironment environment)
public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork,
ILogger<LibraryWatcher> logger, IHostEnvironment environment, ITaskScheduler taskScheduler)
{
_directoryService = directoryService;
_unitOfWork = unitOfWork;
_logger = logger;
_scannerService = scannerService;
_taskScheduler = taskScheduler;
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(30) : TimeSpan.FromMinutes(5);
@ -91,8 +92,8 @@ public class LibraryWatcher : ILibraryWatcher
watcher.Created += OnCreated;
watcher.Deleted += OnDeleted;
watcher.Error += OnError;
watcher.Disposed += (sender, args) =>
_logger.LogError("[LibraryWatcher] watcher was disposed when it shouldn't have been");
watcher.Disposed += (_, _) =>
_logger.LogError("[LibraryWatcher] watcher was disposed when it shouldn't have been. Please report this to Kavita dev");
watcher.Filter = "*.*";
watcher.IncludeSubdirectories = true;
@ -127,16 +128,14 @@ public class LibraryWatcher : ILibraryWatcher
{
_logger.LogDebug("[LibraryWatcher] Restarting watcher");
UpdateBufferOverflow();
StopWatching();
await StartWatching();
}
private void OnChanged(object sender, FileSystemEventArgs e)
{
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}, {ChangeType}", e.FullPath, e.Name, e.ChangeType);
if (e.ChangeType != WatcherChangeTypes.Changed) return;
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name);
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name))));
}
@ -158,20 +157,31 @@ public class LibraryWatcher : ILibraryWatcher
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, true));
}
/// <summary>
/// On error, we count the number of errors that have occured. If the number of errors has been more than 2 in last 10 minutes, then we suspend listening for an hour
/// </summary>
/// <remarks>This will schedule jobs to decrement the buffer full counter</remarks>
/// <param name="sender"></param>
/// <param name="e"></param>
private void OnError(object sender, ErrorEventArgs e)
{
_logger.LogError(e.GetException(), "[LibraryWatcher] An error occured, likely too many changes occured at once or the folder being watched was deleted. Restarting Watchers");
_bufferFullCounter += 1;
_lastBufferOverflow = DateTime.Now;
bool condition;
lock (Lock)
{
_bufferFullCounter += 1;
condition = _bufferFullCounter >= 3;
}
if (_bufferFullCounter >= 3)
if (condition)
{
_logger.LogInformation("[LibraryWatcher] Internal buffer has been overflown multiple times in past 10 minutes. Suspending file watching for an hour");
StopWatching();
BackgroundJob.Schedule(() => RestartWatching(), TimeSpan.FromHours(1));
return;
}
Task.Run(RestartWatching);
BackgroundJob.Schedule(() => UpdateLastBufferOverflow(), TimeSpan.FromMinutes(10));
}
@ -185,8 +195,6 @@ public class LibraryWatcher : ILibraryWatcher
// ReSharper disable once MemberCanBePrivate.Global
public async Task ProcessChange(string filePath, bool isDirectoryChange = false)
{
UpdateBufferOverflow();
var sw = Stopwatch.StartNew();
_logger.LogDebug("[LibraryWatcher] Processing change of {FilePath}", filePath);
try
@ -214,29 +222,16 @@ public class LibraryWatcher : ILibraryWatcher
return;
}
// Check if this task has already enqueued or is being processed, before enqueing
var alreadyScheduled =
TaskScheduler.HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {fullPath});
if (!alreadyScheduled)
{
_logger.LogInformation("[LibraryWatcher] Scheduling ScanFolder for {Folder}", fullPath);
BackgroundJob.Schedule(() => _scannerService.ScanFolder(fullPath), _queueWaitTime);
}
else
{
_logger.LogInformation("[LibraryWatcher] Skipped scheduling ScanFolder for {Folder} as a job already queued",
fullPath);
}
_taskScheduler.ScanFolder(fullPath, _queueWaitTime);
}
catch (Exception ex)
{
_logger.LogError(ex, "[LibraryWatcher] An error occured when processing a watch event");
}
_logger.LogDebug("[LibraryWatcher] ProcessChange ran in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
_logger.LogDebug("[LibraryWatcher] ProcessChange completed in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
}
private string GetFolder(string filePath, IList<string> libraryFolders)
private string GetFolder(string filePath, IEnumerable<string> libraryFolders)
{
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
_logger.LogDebug("[LibraryWatcher] Parent Directory: {ParentDirectory}", parentDirectory);
@ -256,14 +251,17 @@ public class LibraryWatcher : ILibraryWatcher
return Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.First()));
}
private void UpdateBufferOverflow()
/// <summary>
/// This is called via Hangfire to decrement the counter. Must work around a lock
/// </summary>
// ReSharper disable once MemberCanBePrivate.Global
public void UpdateLastBufferOverflow()
{
if (_bufferFullCounter == 0) return;
// If the last buffer overflow is over 5 mins back, we can remove a buffer count
if (_lastBufferOverflow < DateTime.Now.Subtract(TimeSpan.FromMinutes(5)))
lock (Lock)
{
_bufferFullCounter = Math.Min(0, _bufferFullCounter - 1);
_lastBufferOverflow = DateTime.Now;
if (_bufferFullCounter == 0) return;
_bufferFullCounter -= 1;
}
}
}

View file

@ -1,12 +1,14 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities.Enums;
using API.Extensions;
using API.Parser;
using API.SignalR;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner;
@ -39,6 +41,7 @@ public class SeriesModified
public string SeriesName { get; set; }
public DateTime LastScanned { get; set; }
public MangaFormat Format { get; set; }
public IEnumerable<string> LibraryRoots { get; set; }
}
@ -109,7 +112,41 @@ public class ParseScannedFiles
await folderAction(new List<string>(), folderPath);
return;
}
await folderAction(_directoryService.ScanFiles(folderPath), folderPath);
// We need to calculate all folders till library root and see if any kavitaignores
var seriesMatcher = new GlobMatcher();
try
{
var roots = seriesPaths[folderPath][0].LibraryRoots.Select(Scanner.Parser.Parser.NormalizePath).ToList();
var libraryFolder = roots.SingleOrDefault(folderPath.Contains);
if (string.IsNullOrEmpty(libraryFolder) || !Directory.Exists(folderPath))
{
await folderAction(_directoryService.ScanFiles(folderPath, seriesMatcher), folderPath);
return;
}
var allParents = _directoryService.GetFoldersTillRoot(libraryFolder, folderPath);
var path = libraryFolder;
// Apply the library root level kavitaignore
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile);
seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile));
// Then apply kavitaignores for each folder down to where the series folder is
foreach (var folderPart in allParents.Reverse())
{
path = Parser.Parser.NormalizePath(Path.Join(libraryFolder, folderPart));
potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile);
seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile));
}
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an error trying to find and apply .kavitaignores above the Series Folder. Scanning without them present");
}
await folderAction(_directoryService.ScanFiles(folderPath, seriesMatcher), folderPath);
}

View file

@ -62,7 +62,7 @@ public class DefaultParser : IDefaultParser
};
}
if (Parser.IsCoverImage(filePath)) return null;
if (Parser.IsCoverImage(_directoryService.FileSystem.Path.GetFileName(filePath))) return null;
if (Parser.IsImage(filePath))
{

View file

@ -238,13 +238,7 @@ public class ProcessSeries : IProcessSeries
// Update Metadata based on Chapter metadata
if (!series.Metadata.ReleaseYearLocked)
{
series.Metadata.ReleaseYear = chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).DefaultIfEmpty().Min();
if (series.Metadata.ReleaseYear < 1000)
{
// Not a valid year, default to 0
series.Metadata.ReleaseYear = 0;
}
series.Metadata.ReleaseYear = chapters.MinimumReleaseYear();
}
// Set the AgeRating as highest in all the comicInfos
@ -637,14 +631,7 @@ public class ProcessSeries : IProcessSeries
}
// This needs to check against both Number and Volume to calculate Count
if (!string.IsNullOrEmpty(comicInfo.Number) && float.Parse(comicInfo.Number) > 0)
{
chapter.Count = (int) Math.Floor(float.Parse(comicInfo.Number));
}
if (!string.IsNullOrEmpty(comicInfo.Volume) && float.Parse(comicInfo.Volume) > 0)
{
chapter.Count = Math.Max(chapter.Count, (int) Math.Floor(float.Parse(comicInfo.Volume)));
}
chapter.Count = comicInfo.CalculatedCount();
void AddPerson(Person person)
{
@ -755,7 +742,6 @@ public class ProcessSeries : IProcessSeries
/// <param name="action"></param>
private void UpdatePeople(IEnumerable<string> names, PersonRole role, Action<Person> action)
{
var allPeopleTypeRole = _people.Where(p => p.Role == role).ToList();
foreach (var name in names)

View file

@ -8,6 +8,7 @@ using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Parser;
@ -97,24 +98,39 @@ public class ScannerService : IScannerService
_wordCountAnalyzerService = wordCountAnalyzerService;
}
/// <summary>
/// Given a generic folder path, will invoke a Series scan or Library scan.
/// </summary>
/// <remarks>This will Schedule the job to run 1 minute in the future to allow for any close-by duplicate requests to be dropped</remarks>
/// <param name="folder"></param>
public async Task ScanFolder(string folder)
{
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
if (seriesId > 0)
Series series = null;
try
{
if (TaskScheduler.HasAlreadyEnqueuedTask(Name, "ScanSeries",
new object[] {seriesId, true}))
series = await _unitOfWork.SeriesRepository.GetSeriesByFolderPath(folder, SeriesIncludes.Library);
}
catch (InvalidOperationException ex)
{
if (ex.Message.Equals("Sequence contains more than one element."))
{
_logger.LogCritical("[ScannerService] Multiple series map to this folder. Library scan will be used for ScanFolder");
}
}
if (series != null && series.Library.Type != LibraryType.Book)
{
if (TaskScheduler.HasScanTaskRunningForSeries(series.Id))
{
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
return;
}
BackgroundJob.Enqueue(() => ScanSeries(seriesId, true));
BackgroundJob.Schedule(() => ScanSeries(series.Id, true), TimeSpan.FromMinutes(1));
return;
}
// This is basically rework of what's already done in Library Watcher but is needed if invoked via API
var parentDirectory = _directoryService.GetParentDirectoryName(folder);
if (string.IsNullOrEmpty(parentDirectory)) return; // This should never happen as it's calculated before enqueing
if (string.IsNullOrEmpty(parentDirectory)) return;
var libraries = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()).ToList();
var libraryFolders = libraries.SelectMany(l => l.Folders);
@ -125,18 +141,17 @@ public class ScannerService : IScannerService
var library = libraries.FirstOrDefault(l => l.Folders.Select(Scanner.Parser.Parser.NormalizePath).Contains(libraryFolder));
if (library != null)
{
if (TaskScheduler.HasAlreadyEnqueuedTask(Name, "ScanLibrary",
new object[] {library.Id, false}))
if (TaskScheduler.HasScanTaskRunningForLibrary(library.Id))
{
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this library. Dropping request", folder);
return;
}
BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false));
BackgroundJob.Schedule(() => ScanLibrary(library.Id, false), TimeSpan.FromMinutes(1));
}
}
/// <summary>
///
/// Scans just an existing Series for changes. If the series doesn't exist, will delete it.
/// </summary>
/// <param name="seriesId"></param>
/// <param name="bypassFolderOptimizationChecks">Not Used. Scan series will always force</param>
@ -186,6 +201,7 @@ public class ScannerService : IScannerService
return;
}
// If the series path doesn't exist anymore, it was either moved or renamed. We need to essentially delete it
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
@ -213,11 +229,13 @@ public class ScannerService : IScannerService
}
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, true);
var scanElapsedTime = await ScanFiles(library, new []{ folderPath }, false, TrackFiles, true);
_logger.LogInformation("ScanFiles for {Series} took {Time}", series.Name, scanElapsedTime);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
RemoveParsedInfosNotForSeries(parsedSeries, series);