More Scan Loop Bugfixes (#1471)

* Updated scan time for watcher to 30 seconds for non-dev. Moved ScanFolder off the Scan queue as it doesn't need to be there. Updated loggers

* Fixed jumpbar missing

* Tweaked the messaging for CoverGen

* When we return early due to nothing being done on library and series scan, make sure we kick off other tasks that need to occur.

* Fixed a foreign constraint issue on Volumes when we were adding to a new series.

* Fixed a case where when picking normalized series, capitalization differences wouldn't stack when they should.

* Reduced the logging output on dev and prod settings.

* Fixed a bug in the code that finds the highest directory from a file, where we were not checking against a normalized path.

* Cleaned up some code

* Fixed broken unit tests
This commit is contained in:
Joseph Milazzo 2022-08-24 11:27:32 -05:00 committed by GitHub
parent fc0121e7a8
commit 1e535a8184
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 77 additions and 62 deletions

View file

@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.Entities;
using AutoMapper;
@ -26,7 +27,6 @@ public interface IUnitOfWork
bool Commit();
Task<bool> CommitAsync();
bool HasChanges();
bool Rollback();
Task<bool> RollbackAsync();
}
public class UnitOfWork : IUnitOfWork
@ -93,16 +93,15 @@ public class UnitOfWork : IUnitOfWork
/// <returns></returns>
public async Task<bool> RollbackAsync()
{
await _context.DisposeAsync();
return true;
}
/// <summary>
/// Rollback transaction
/// </summary>
/// <returns></returns>
public bool Rollback()
{
_context.Dispose();
try
{
await _context.Database.RollbackTransactionAsync();
}
catch (Exception)
{
// Swallow exception (this might be used in places where a transaction isn't setup)
}
return true;
}
}

View file

@ -508,10 +508,10 @@ namespace API.Services
break;
}
var fullPath = Path.Join(folder, parts.Last());
var fullPath = Parser.Parser.NormalizePath(Path.Join(folder, parts.Last()));
if (!dirs.ContainsKey(fullPath))
{
dirs.Add(Parser.Parser.NormalizePath(fullPath), string.Empty);
dirs.Add(fullPath, string.Empty);
}
}
}

View file

@ -161,7 +161,7 @@ public class MetadataService : IMetadataService
/// <param name="forceUpdate"></param>
private async Task ProcessSeriesCoverGen(Series series, bool forceUpdate)
{
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
_logger.LogDebug("[MetadataService] Generating cover images for series: {SeriesName}", series.OriginalName);
try
{
var volumeIndex = 0;

View file

@ -78,7 +78,7 @@ public class LibraryWatcher : ILibraryWatcher
_logger = logger;
_scannerService = scannerService;
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromMinutes(5);
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(10) : TimeSpan.FromSeconds(30);
}
@ -142,18 +142,18 @@ public class LibraryWatcher : ILibraryWatcher
private void OnChanged(object sender, FileSystemEventArgs e)
{
if (e.ChangeType != WatcherChangeTypes.Changed) return;
Console.WriteLine($"Changed: {e.FullPath}, {e.Name}");
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name);
ProcessChange(e.FullPath);
}
private void OnCreated(object sender, FileSystemEventArgs e)
{
Console.WriteLine($"Created: {e.FullPath}, {e.Name}");
_logger.LogDebug("[LibraryWatcher] Created: {FullPath}, {Name}", e.FullPath, e.Name);
ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name));
}
private void OnDeleted(object sender, FileSystemEventArgs e) {
Console.WriteLine($"Deleted: {e.FullPath}, {e.Name}");
_logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name);
// On deletion, we need another type of check. We need to check if e.Name has an extension or not
// NOTE: File deletion will trigger a folder change event, so this might not be needed
@ -164,9 +164,9 @@ public class LibraryWatcher : ILibraryWatcher
private void OnRenamed(object sender, RenamedEventArgs e)
{
Console.WriteLine($"Renamed:");
Console.WriteLine($" Old: {e.OldFullPath}");
Console.WriteLine($" New: {e.FullPath}");
_logger.LogDebug($"[LibraryWatcher] Renamed:");
_logger.LogDebug(" Old: {OldFullPath}", e.OldFullPath);
_logger.LogDebug(" New: {FullPath}", e.FullPath);
ProcessChange(e.FullPath, _directoryService.FileSystem.Directory.Exists(e.FullPath));
}
@ -179,14 +179,6 @@ public class LibraryWatcher : ILibraryWatcher
{
// We need to check if directory or not
if (!isDirectoryChange && !new Regex(Parser.Parser.SupportedExtensions).IsMatch(new FileInfo(filePath).Extension)) return;
// Don't do anything if a Library or ScanSeries in progress
// if (TaskScheduler.RunningAnyTasksByMethod(new[] {"MetadataService", "ScannerService"}))
// {
// // NOTE: I'm not sure we need this to be honest. Now with the speed of the new loop and the queue, we should just put in queue for processing
// _logger.LogDebug("Suppressing Change due to scan being inprogress");
// return;
// }
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
if (string.IsNullOrEmpty(parentDirectory)) return;
@ -206,14 +198,12 @@ public class LibraryWatcher : ILibraryWatcher
FolderPath = fullPath,
QueueTime = DateTime.Now
};
if (_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
if (!_scanQueue.Contains(queueItem, _folderScanQueueableComparer))
{
ProcessQueue();
return;
_logger.LogDebug("[LibraryWatcher] Queuing job for {Folder}", fullPath);
_scanQueue.Enqueue(queueItem);
}
_scanQueue.Enqueue(queueItem);
ProcessQueue();
}
@ -228,7 +218,7 @@ public class LibraryWatcher : ILibraryWatcher
var item = _scanQueue.Peek();
if (item.QueueTime < DateTime.Now.Subtract(_queueWaitTime))
{
_logger.LogDebug("Scheduling ScanSeriesFolder for {Folder}", item.FolderPath);
_logger.LogDebug("[LibraryWatcher] Scheduling ScanSeriesFolder for {Folder}", item.FolderPath);
BackgroundJob.Enqueue(() => _scannerService.ScanFolder(item.FolderPath));
_scanQueue.Dequeue();
i++;

View file

@ -320,7 +320,10 @@ namespace API.Services.Tasks.Scanner
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
string nonLocalizedSeries;
var nonLocalizedSeriesFound = infos.Where(i => !i.IsSpecial).Select(i => i.Series).Distinct().ToList();
// Normalize this as many of the cases is a capitalization difference
var nonLocalizedSeriesFound = infos
.Where(i => !i.IsSpecial)
.Select(i => i.Series).DistinctBy(Parser.Parser.Normalize).ToList();
if (nonLocalizedSeriesFound.Count == 1)
{
nonLocalizedSeries = nonLocalizedSeriesFound.First();
@ -330,7 +333,7 @@ namespace API.Services.Tasks.Scanner
// There can be a case where there are multiple series in a folder that causes merging.
if (nonLocalizedSeriesFound.Count > 2)
{
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder or ensure there is only 2 potential series (localized and series): {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
}
nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries));
}

View file

@ -183,7 +183,7 @@ public class ProcessSeries : IProcessSeries
}
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id, false);
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id);
}
private async Task UpdateSeriesFolderPath(IEnumerable<ParserInfo> parsedInfos, Library library, Series series)
@ -431,7 +431,6 @@ public class ProcessSeries : IProcessSeries
volume = DbFactory.Volume(volumeNumber);
volume.SeriesId = series.Id;
series.Volumes.Add(volume);
_unitOfWork.VolumeRepository.Add(volume);
}
volume.Name = volumeNumber;

View file

@ -1,10 +1,8 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
@ -41,9 +39,6 @@ public interface IScannerService
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true);
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanFolder(string folder);
}
@ -81,11 +76,12 @@ public class ScannerService : IScannerService
private readonly IDirectoryService _directoryService;
private readonly IReadingItemService _readingItemService;
private readonly IProcessSeries _processSeries;
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger,
IMetadataService metadataService, ICacheService cacheService, IEventHub eventHub,
IDirectoryService directoryService, IReadingItemService readingItemService,
IProcessSeries processSeries)
IProcessSeries processSeries, IWordCountAnalyzerService wordCountAnalyzerService)
{
_unitOfWork = unitOfWork;
_logger = logger;
@ -95,9 +91,9 @@ public class ScannerService : IScannerService
_directoryService = directoryService;
_readingItemService = readingItemService;
_processSeries = processSeries;
_wordCountAnalyzerService = wordCountAnalyzerService;
}
[Queue(TaskScheduler.ScanQueue)]
public async Task ScanFolder(string folder)
{
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
@ -138,7 +134,12 @@ public class ScannerService : IScannerService
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel) return;
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel)
{
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForSeries(series.LibraryId, seriesId, false));
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(library.Id, seriesId, false));
return;
}
var folderPath = series.FolderPath;
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
@ -420,6 +421,9 @@ public class ScannerService : IScannerService
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"{library.Name} scan has no work to do",
"All folders have not been changed since last scan. Scan will be aborted."));
BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForLibrary(library.Id, false));
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(library.Id, false));
return;
}
}
@ -455,7 +459,7 @@ public class ScannerService : IScannerService
Format = parsedFiles.First().Format
};
// NOTE: Could we check if there are multiple found series (different series) and process each one?
// NOTE: Could we check if there are multiple found series (different series) and process each one?
if (skippedScan)
{

View file

@ -6,10 +6,10 @@
"Logging": {
"LogLevel": {
"Default": "Debug",
"Microsoft": "Information",
"Microsoft": "Error",
"Microsoft.Hosting.Lifetime": "Error",
"Hangfire": "Information",
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Information"
"Hangfire": "Error",
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error"
},
"File": {
"Path": "config//logs/kavita.log",

View file

@ -0,0 +1,22 @@
{
"ConnectionStrings": {
"DefaultConnection": "Data source=config/kavita.db"
},
"TokenKey": "super secret unguessable key",
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft": "Error",
"Microsoft.Hosting.Lifetime": "Error",
"Hangfire": "Error",
"Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error"
},
"File": {
"Path": "config/logs/kavita.log",
"Append": "True",
"FileSizeLimitBytes": 10485760,
"MaxRollingFiles": 1
}
},
"Port": 5000
}