Scan Loop Fixes (#1459)
* Added Last Folder Scanned time to series info modal. Tweaked the info event detail modal to have a primary and thus be auto-dismissable * Added an error event when multiple series are found in processing a series. * Fixed a bug where a series could get stuck with other series due to a bad select query. Started adding the force flag hook for the UI and designing the confirm. Confirm service now also has ability to hide the close button. Updated error events and logging in the loop, to be more informative * Fixed a bug where confirm service wasn't showing the proper body content. * Hooked up force scan series * refresh metadata now has force update * Fixed up the messaging with the prompt on scan, hooked it up properly in the scan library to avoid the check if the whole library needs to even be scanned. Fixed a bug where NormalizedLocalizedName wasn't being calculated on new entities. Started adding unit tests for this problematic repo method. * Fixed a bug where we updated NormalizedLocalizedName before we set it. * Send an info to the UI when series are spread between multiple library level folders. * Added some logger output when there are no files found in a folder. Return early if there are no files found, so we can avoid some small loops of code. * Fixed an issue where multiple series in a folder with localized series would cause unintended grouping. This is not supported and hence we will warn them and allow the bad grouping. * Added a case where scan series fails due to the folder being removed. We will now log an error * Normalize paths when finding the highest directory till root. * Fixed an issue with Scan Series where changing a series' folder to a different path but the original series folder existed with another series in it, would cause the series to not be deleted. * Fixed some bugs around specials causing a series merge issue on scan series. * Removed a bug marker * Cleaned up some of the scan loop and removed a test I don't need. * Remove any prompts for force flow, it doesn't work well. Leave the API as is though. * Fixed up a check for duplicate ScanLibrary calls
This commit is contained in:
parent
354be09c4c
commit
1c9544fc47
27 changed files with 367 additions and 222 deletions
|
@ -168,17 +168,17 @@ namespace API.Controllers
|
|||
|
||||
[Authorize(Policy = "RequireAdminRole")]
|
||||
[HttpPost("scan")]
|
||||
public ActionResult Scan(int libraryId)
|
||||
public ActionResult Scan(int libraryId, bool force = false)
|
||||
{
|
||||
_taskScheduler.ScanLibrary(libraryId);
|
||||
_taskScheduler.ScanLibrary(libraryId, force);
|
||||
return Ok();
|
||||
}
|
||||
|
||||
[Authorize(Policy = "RequireAdminRole")]
|
||||
[HttpPost("refresh-metadata")]
|
||||
public ActionResult RefreshMetadata(int libraryId)
|
||||
public ActionResult RefreshMetadata(int libraryId, bool force = true)
|
||||
{
|
||||
_taskScheduler.RefreshMetadata(libraryId);
|
||||
_taskScheduler.RefreshMetadata(libraryId, force);
|
||||
return Ok();
|
||||
}
|
||||
|
||||
|
|
|
@ -58,5 +58,9 @@ namespace API.DTOs
|
|||
/// The highest level folder for this Series
|
||||
/// </summary>
|
||||
public string FolderPath { get; set; }
|
||||
/// <summary>
|
||||
/// The last time the folder for this series was scanned
|
||||
/// </summary>
|
||||
public DateTime LastFolderScanned { get; set; }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,26 @@ namespace API.Data
|
|||
OriginalName = name,
|
||||
LocalizedName = name,
|
||||
NormalizedName = Parser.Parser.Normalize(name),
|
||||
NormalizedLocalizedName = Parser.Parser.Normalize(name),
|
||||
SortName = name,
|
||||
Volumes = new List<Volume>(),
|
||||
Metadata = SeriesMetadata(Array.Empty<CollectionTag>())
|
||||
};
|
||||
}
|
||||
|
||||
public static Series Series(string name, string localizedName)
|
||||
{
|
||||
if (string.IsNullOrEmpty(localizedName))
|
||||
{
|
||||
localizedName = name;
|
||||
}
|
||||
return new Series
|
||||
{
|
||||
Name = name,
|
||||
OriginalName = name,
|
||||
LocalizedName = localizedName,
|
||||
NormalizedName = Parser.Parser.Normalize(name),
|
||||
NormalizedLocalizedName = Parser.Parser.Normalize(localizedName),
|
||||
SortName = name,
|
||||
Volumes = new List<Volume>(),
|
||||
Metadata = SeriesMetadata(Array.Empty<CollectionTag>())
|
||||
|
|
|
@ -1220,15 +1220,19 @@ public class SeriesRepository : ISeriesRepository
|
|||
/// <returns></returns>
|
||||
public Task<Series> GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId)
|
||||
{
|
||||
var localizedSeries = Parser.Parser.Normalize(seriesName);
|
||||
var normalizedSeries = Parser.Parser.Normalize(seriesName);
|
||||
var normalizedLocalized = Parser.Parser.Normalize(localizedName);
|
||||
return _context.Series
|
||||
.Where(s => s.NormalizedName.Equals(localizedSeries)
|
||||
|| s.NormalizedName.Equals(normalizedLocalized)
|
||||
|| s.NormalizedLocalizedName.Equals(localizedSeries)
|
||||
|| s.NormalizedLocalizedName.Equals(normalizedLocalized))
|
||||
var query = _context.Series
|
||||
.Where(s => s.LibraryId == libraryId)
|
||||
.Include(s => s.Metadata)
|
||||
.Where(s => s.NormalizedName.Equals(normalizedSeries)
|
||||
|| (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty));
|
||||
if (!string.IsNullOrEmpty(normalizedLocalized))
|
||||
{
|
||||
query = query.Where(s =>
|
||||
s.NormalizedName.Equals(normalizedLocalized) || s.NormalizedLocalizedName.Equals(normalizedLocalized));
|
||||
}
|
||||
|
||||
return query.Include(s => s.Metadata)
|
||||
.ThenInclude(m => m.People)
|
||||
.Include(s => s.Metadata)
|
||||
.ThenInclude(m => m.Genres)
|
||||
|
|
|
@ -492,10 +492,10 @@ namespace API.Services
|
|||
{
|
||||
var stopLookingForDirectories = false;
|
||||
var dirs = new Dictionary<string, string>();
|
||||
foreach (var folder in libraryFolders)
|
||||
foreach (var folder in libraryFolders.Select(Parser.Parser.NormalizePath))
|
||||
{
|
||||
if (stopLookingForDirectories) break;
|
||||
foreach (var file in filePaths)
|
||||
foreach (var file in filePaths.Select(Parser.Parser.NormalizePath))
|
||||
{
|
||||
if (!file.Contains(folder)) continue;
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ public interface ITaskScheduler
|
|||
Task ScheduleTasks();
|
||||
Task ScheduleStatsTasks();
|
||||
void ScheduleUpdaterTasks();
|
||||
void ScanLibrary(int libraryId);
|
||||
void ScanLibrary(int libraryId, bool force = false);
|
||||
void CleanupChapters(int[] chapterIds);
|
||||
void RefreshMetadata(int libraryId, bool forceUpdate = true);
|
||||
void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
|
@ -174,9 +174,12 @@ public class TaskScheduler : ITaskScheduler
|
|||
_scannerService.ScanLibraries();
|
||||
}
|
||||
|
||||
public void ScanLibrary(int libraryId)
|
||||
public void ScanLibrary(int libraryId, bool force = false)
|
||||
{
|
||||
if (HasAlreadyEnqueuedTask("ScannerService","ScanLibrary", new object[] {libraryId}, ScanQueue))
|
||||
var alreadyEnqueued =
|
||||
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) ||
|
||||
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue);
|
||||
if (alreadyEnqueued)
|
||||
{
|
||||
_logger.LogInformation("A duplicate request to scan library for library occured. Skipping");
|
||||
return;
|
||||
|
@ -184,12 +187,12 @@ public class TaskScheduler : ITaskScheduler
|
|||
if (RunningAnyTasksByMethod(new List<string>() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue))
|
||||
{
|
||||
_logger.LogInformation("A Scan is already running, rescheduling ScanLibrary in 3 hours");
|
||||
BackgroundJob.Schedule(() => ScanLibrary(libraryId), TimeSpan.FromHours(3));
|
||||
BackgroundJob.Schedule(() => ScanLibrary(libraryId, force), TimeSpan.FromHours(3));
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
|
||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId));
|
||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force));
|
||||
// When we do a scan, force cache to re-unpack in case page numbers change
|
||||
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
|
||||
}
|
||||
|
@ -201,7 +204,11 @@ public class TaskScheduler : ITaskScheduler
|
|||
|
||||
public void RefreshMetadata(int libraryId, bool forceUpdate = true)
|
||||
{
|
||||
if (HasAlreadyEnqueuedTask("MetadataService","GenerateCoversForLibrary", new object[] {libraryId, forceUpdate}))
|
||||
var alreadyEnqueued = HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary",
|
||||
new object[] {libraryId, true}) ||
|
||||
HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary",
|
||||
new object[] {libraryId, false});
|
||||
if (alreadyEnqueued)
|
||||
{
|
||||
_logger.LogInformation("A duplicate request to refresh metadata for library occured. Skipping");
|
||||
return;
|
||||
|
@ -232,7 +239,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
}
|
||||
if (RunningAnyTasksByMethod(new List<string>() {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}, ScanQueue))
|
||||
{
|
||||
_logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 mins");
|
||||
_logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 minutes");
|
||||
BackgroundJob.Schedule(() => ScanSeries(libraryId, seriesId, forceUpdate), TimeSpan.FromMinutes(10));
|
||||
return;
|
||||
}
|
||||
|
@ -276,7 +283,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
/// <param name="args">object[] of arguments in the order they are passed to enqueued job</param>
|
||||
/// <param name="queue">Queue to check against. Defaults to "default"</param>
|
||||
/// <returns></returns>
|
||||
public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
|
||||
private static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
|
||||
{
|
||||
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
|
||||
return enqueuedJobs.Any(j => j.Value.InEnqueuedState &&
|
||||
|
|
|
@ -3,10 +3,8 @@ using System.Collections.Concurrent;
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Parser;
|
||||
using API.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
@ -68,26 +66,6 @@ namespace API.Services.Tasks.Scanner
|
|||
_eventHub = eventHub;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of all parserInfos given a Series (Will match on Name, LocalizedName, OriginalName). If the series does not exist within, return empty list.
|
||||
/// </summary>
|
||||
/// <param name="parsedSeries"></param>
|
||||
/// <param name="series"></param>
|
||||
/// <returns></returns>
|
||||
public static IList<ParserInfo> GetInfosByName(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Series series)
|
||||
{
|
||||
var allKeys = parsedSeries.Keys.Where(ps =>
|
||||
SeriesHelper.FindSeries(series, ps));
|
||||
|
||||
var infos = new List<ParserInfo>();
|
||||
foreach (var key in allKeys)
|
||||
{
|
||||
infos.AddRange(parsedSeries[key]);
|
||||
}
|
||||
|
||||
return infos;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained
|
||||
|
@ -192,7 +170,7 @@ namespace API.Services.Tasks.Scanner
|
|||
/// </summary>
|
||||
/// <param name="info"></param>
|
||||
/// <returns>Series Name to group this info into</returns>
|
||||
public string MergeName(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParserInfo info)
|
||||
private string MergeName(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParserInfo info)
|
||||
{
|
||||
var normalizedSeries = Parser.Parser.Normalize(info.Series);
|
||||
var normalizedLocalSeries = Parser.Parser.Normalize(info.LocalizedSeries);
|
||||
|
@ -230,7 +208,7 @@ namespace API.Services.Tasks.Scanner
|
|||
|
||||
|
||||
/// <summary>
|
||||
/// This is a new version which will process series by folder groups.
|
||||
/// This will process series by folder groups.
|
||||
/// </summary>
|
||||
/// <param name="libraryType"></param>
|
||||
/// <param name="folders"></param>
|
||||
|
@ -263,8 +241,16 @@ namespace API.Services.Tasks.Scanner
|
|||
}
|
||||
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(folderPath, libraryName, ProgressEventType.Updated));
|
||||
if (files.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] {Folder} is empty", folder);
|
||||
return;
|
||||
}
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
var infos = files.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType)).Where(info => info != null).ToList();
|
||||
var infos = files
|
||||
.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType))
|
||||
.Where(info => info != null)
|
||||
.ToList();
|
||||
|
||||
|
||||
MergeLocalizedSeriesWithSeries(infos);
|
||||
|
@ -320,17 +306,36 @@ namespace API.Services.Tasks.Scanner
|
|||
/// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration"
|
||||
/// </example>
|
||||
/// <param name="infos">A collection of ParserInfos</param>
|
||||
private static void MergeLocalizedSeriesWithSeries(IReadOnlyCollection<ParserInfo> infos)
|
||||
private void MergeLocalizedSeriesWithSeries(IReadOnlyCollection<ParserInfo> infos)
|
||||
{
|
||||
var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries));
|
||||
if (!hasLocalizedSeries) return;
|
||||
|
||||
var localizedSeries = infos.Select(i => i.LocalizedSeries).Distinct()
|
||||
var localizedSeries = infos
|
||||
.Where(i => !i.IsSpecial)
|
||||
.Select(i => i.LocalizedSeries)
|
||||
.Distinct()
|
||||
.FirstOrDefault(i => !string.IsNullOrEmpty(i));
|
||||
if (string.IsNullOrEmpty(localizedSeries)) return;
|
||||
|
||||
var nonLocalizedSeries = infos.Select(i => i.Series).Distinct()
|
||||
.FirstOrDefault(series => !series.Equals(localizedSeries));
|
||||
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
|
||||
string nonLocalizedSeries;
|
||||
var nonLocalizedSeriesFound = infos.Where(i => !i.IsSpecial).Select(i => i.Series).Distinct().ToList();
|
||||
if (nonLocalizedSeriesFound.Count == 1)
|
||||
{
|
||||
nonLocalizedSeries = nonLocalizedSeriesFound.First();
|
||||
}
|
||||
else
|
||||
{
|
||||
// There can be a case where there are multiple series in a folder that causes merging.
|
||||
if (nonLocalizedSeriesFound.Count > 2)
|
||||
{
|
||||
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
|
||||
}
|
||||
nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries));
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(nonLocalizedSeries)) return;
|
||||
|
||||
var normalizedNonLocalizedSeries = Parser.Parser.Normalize(nonLocalizedSeries);
|
||||
foreach (var infoNeedingMapping in infos.Where(i =>
|
||||
|
|
|
@ -88,7 +88,7 @@ public class ProcessSeries : IProcessSeries
|
|||
|
||||
// Check if there is a Series
|
||||
var firstInfo = parsedInfos.First();
|
||||
Series series = null;
|
||||
Series series;
|
||||
try
|
||||
{
|
||||
series =
|
||||
|
@ -97,29 +97,29 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries);
|
||||
_logger.LogError(ex, "There was an exception finding existing series for {SeriesName} with Localized name of {LocalizedName} for library {LibraryId}. This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan", firstInfo.Series, firstInfo.LocalizedSeries, library.Id);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
||||
MessageFactory.ErrorEvent($"There was an exception finding existing series for {firstInfo.Series} with Localized name of {firstInfo.LocalizedSeries} for library {library.Id}",
|
||||
"This indicates you have duplicate series with same name or localized name in the library. Correct this and rescan."));
|
||||
return;
|
||||
}
|
||||
|
||||
if (series == null)
|
||||
{
|
||||
seriesAdded = true;
|
||||
series = DbFactory.Series(firstInfo.Series);
|
||||
series.LocalizedName = firstInfo.LocalizedSeries;
|
||||
series = DbFactory.Series(firstInfo.Series, firstInfo.LocalizedSeries);
|
||||
}
|
||||
|
||||
if (series.LibraryId == 0) series.LibraryId = library.Id;
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
|
||||
|
||||
UpdateVolumes(series, parsedInfos);
|
||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||
|
||||
series.NormalizedName = Parser.Parser.Normalize(series.Name);
|
||||
series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName);
|
||||
series.OriginalName ??= parsedInfos[0].Series;
|
||||
if (series.Format == MangaFormat.Unknown)
|
||||
{
|
||||
|
@ -144,13 +144,17 @@ public class ProcessSeries : IProcessSeries
|
|||
if (!series.LocalizedNameLocked && !string.IsNullOrEmpty(localizedSeries))
|
||||
{
|
||||
series.LocalizedName = localizedSeries;
|
||||
series.NormalizedLocalizedName = Parser.Parser.Normalize(series.LocalizedName);
|
||||
}
|
||||
|
||||
// Update series FolderPath here (TODO: Move this into it's own private method)
|
||||
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(l => l.Path), parsedInfos.Select(f => f.FullFilePath).ToList());
|
||||
if (seriesDirs.Keys.Count == 0)
|
||||
{
|
||||
_logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are in a folder");
|
||||
_logger.LogCritical("Scan Series has files spread outside a main series folder. This has negative performance effects. Please ensure all series are under a single folder from library");
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
||||
MessageFactory.InfoEvent($"{series.Name} has files spread outside a single series folder",
|
||||
"This has negative performance effects. Please ensure all series are under a single folder from library"));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
@ -29,7 +29,7 @@ public interface IScannerService
|
|||
[Queue(TaskScheduler.ScanQueue)]
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task ScanLibrary(int libraryId);
|
||||
Task ScanLibrary(int libraryId, bool forceUpdate = false);
|
||||
|
||||
[Queue(TaskScheduler.ScanQueue)]
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
|
@ -62,6 +62,10 @@ public enum ScanCancelReason
|
|||
/// There has been no change to the filesystem since last scan
|
||||
/// </summary>
|
||||
NoChange = 2,
|
||||
/// <summary>
|
||||
/// The underlying folder is missing
|
||||
/// </summary>
|
||||
FolderMissing = 3
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,10 +121,15 @@ public class ScannerService : IScannerService
|
|||
var library = libraries.FirstOrDefault(l => l.Folders.Select(Parser.Parser.NormalizePath).Contains(libraryFolder));
|
||||
if (library != null)
|
||||
{
|
||||
BackgroundJob.Enqueue(() => ScanLibrary(library.Id));
|
||||
BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <param name="bypassFolderOptimizationChecks">Not Used. Scan series will always force</param>
|
||||
[Queue(TaskScheduler.ScanQueue)]
|
||||
public async Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true)
|
||||
{
|
||||
|
@ -130,12 +139,7 @@ public class ScannerService : IScannerService
|
|||
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
|
||||
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
|
||||
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, bypassFolderOptimizationChecks) != ScanCancelReason.NoCancel) return;
|
||||
|
||||
|
||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
var seenSeries = new List<ParsedSeries>();
|
||||
var processTasks = new List<Task>();
|
||||
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel) return;
|
||||
|
||||
var folderPath = series.FolderPath;
|
||||
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
|
||||
|
@ -150,22 +154,32 @@ public class ScannerService : IScannerService
|
|||
}
|
||||
|
||||
folderPath = seriesDirs.Keys.FirstOrDefault();
|
||||
|
||||
// We should check if folderPath is a library folder path and if so, return early and tell user to correct their setup.
|
||||
if (libraryPaths.Contains(folderPath))
|
||||
{
|
||||
_logger.LogCritical("[ScannerSeries] {SeriesName} scan aborted. Files for series are not in a nested folder under library path. Correct this and rescan", series.Name);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Files for series are not in a nested folder under library path. Correct this and rescan."));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(folderPath))
|
||||
{
|
||||
_logger.LogCritical("Scan Series could not find a single, valid folder root for files");
|
||||
_logger.LogCritical("[ScannerSeries] Scan Series could not find a single, valid folder root for files");
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Scan Series could not find a single, valid folder root for files"));
|
||||
return;
|
||||
}
|
||||
|
||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
var processTasks = new List<Task>();
|
||||
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
|
||||
|
||||
await _processSeries.Prime();
|
||||
void TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
|
||||
{
|
||||
var skippedScan = parsedInfo.Item1;
|
||||
var parsedFiles = parsedInfo.Item2;
|
||||
if (parsedFiles.Count == 0) return;
|
||||
|
||||
|
@ -176,44 +190,21 @@ public class ScannerService : IScannerService
|
|||
Format = parsedFiles.First().Format
|
||||
};
|
||||
|
||||
if (skippedScan)
|
||||
if (!foundParsedSeries.NormalizedName.Equals(series.NormalizedName))
|
||||
{
|
||||
seenSeries.AddRange(parsedFiles.Select(pf => new ParsedSeries()
|
||||
{
|
||||
Name = pf.Series,
|
||||
NormalizedName = Parser.Parser.Normalize(pf.Series),
|
||||
Format = pf.Format
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
seenSeries.Add(foundParsedSeries);
|
||||
processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library));
|
||||
parsedSeries.Add(foundParsedSeries, parsedFiles);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
|
||||
var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, bypassFolderOptimizationChecks);
|
||||
var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, true);
|
||||
_logger.LogInformation("ScanFiles for {Series} took {Time}", series.Name, scanElapsedTime);
|
||||
|
||||
await Task.WhenAll(processTasks);
|
||||
|
||||
// At this point, we've already inserted the series into the DB OR we haven't and seenSeries has our series
|
||||
// We now need to do any leftover work, like removing
|
||||
// We need to handle if parsedSeries is empty but seenSeries has our series
|
||||
if (seenSeries.Any(s => s.NormalizedName.Equals(series.NormalizedName)) && parsedSeries.Keys.Count == 0)
|
||||
{
|
||||
// Nothing has changed
|
||||
_logger.LogInformation("[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan", series.Name);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
||||
MessageFactory.InfoEvent($"{series.Name} scan has no work to do",
|
||||
"All folders have not been changed since last scan. Scan will be aborted."));
|
||||
|
||||
_processSeries.EnqueuePostSeriesProcessTasks(series.LibraryId, seriesId, false);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
|
||||
return;
|
||||
}
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
|
||||
|
||||
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
|
||||
|
@ -222,8 +213,8 @@ public class ScannerService : IScannerService
|
|||
// If nothing was found, first validate any of the files still exist. If they don't then we have a deletion and can skip the rest of the logic flow
|
||||
if (parsedSeries.Count == 0)
|
||||
{
|
||||
var anyFilesExist =
|
||||
(await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)).Any(m => File.Exists(m.FilePath));
|
||||
var seriesFiles = (await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id));
|
||||
var anyFilesExist = seriesFiles.Where(f => f.FilePath.Contains(series.FolderPath)).Any(m => File.Exists(m.FilePath));
|
||||
|
||||
if (!anyFilesExist)
|
||||
{
|
||||
|
@ -287,21 +278,34 @@ public class ScannerService : IScannerService
|
|||
}
|
||||
|
||||
// If all series Folder paths haven't been modified since last scan, abort
|
||||
// NOTE: On windows, the parent folder will not update LastWriteTime if a subfolder was updated with files. Need to do a bit of light I/O.
|
||||
if (!bypassFolderChecks)
|
||||
{
|
||||
|
||||
var allFolders = seriesFolderPaths.SelectMany(path => _directoryService.GetDirectories(path)).ToList();
|
||||
allFolders.AddRange(seriesFolderPaths);
|
||||
|
||||
if (allFolders.All(folder => _directoryService.GetLastWriteTime(folder) <= series.LastFolderScanned))
|
||||
try
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan",
|
||||
if (allFolders.All(folder => _directoryService.GetLastWriteTime(folder) <= series.LastFolderScanned))
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"[ScannerService] {SeriesName} scan has no work to do. All folders have not been changed since last scan",
|
||||
series.Name);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
||||
MessageFactory.InfoEvent($"{series.Name} scan has no work to do",
|
||||
"All folders have not been changed since last scan. Scan will be aborted."));
|
||||
return ScanCancelReason.NoChange;
|
||||
}
|
||||
}
|
||||
catch (IOException ex)
|
||||
{
|
||||
// If there is an exception it means that the folder doesn't exist. So we should delete the series
|
||||
_logger.LogError(ex, "[ScannerService] Scan series for {SeriesName} found the folder path no longer exists",
|
||||
series.Name);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Info,
|
||||
MessageFactory.InfoEvent($"{series.Name} scan has no work to do", "All folders have not been changed since last scan. Scan will be aborted."));
|
||||
return ScanCancelReason.NoChange;
|
||||
MessageFactory.ErrorEvent($"{series.Name} scan has no work to do",
|
||||
"The folder the series is in is missing. Delete series manually or perform a library scan."));
|
||||
return ScanCancelReason.NoCancel;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -393,7 +397,7 @@ public class ScannerService : IScannerService
|
|||
[Queue(TaskScheduler.ScanQueue)]
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public async Task ScanLibrary(int libraryId)
|
||||
public async Task ScanLibrary(int libraryId, bool forceUpdate = false)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
|
||||
|
@ -405,7 +409,7 @@ public class ScannerService : IScannerService
|
|||
var wasLibraryUpdatedSinceLastScan = (library.LastModified.Truncate(TimeSpan.TicksPerMinute) >
|
||||
library.LastScanned.Truncate(TimeSpan.TicksPerMinute))
|
||||
&& library.LastScanned != DateTime.MinValue;
|
||||
if (!wasLibraryUpdatedSinceLastScan)
|
||||
if (!forceUpdate && !wasLibraryUpdatedSinceLastScan)
|
||||
{
|
||||
var haveFoldersChangedSinceLastScan = library.Folders
|
||||
.All(f => _directoryService.GetLastWriteTime(f.Path).Truncate(TimeSpan.TicksPerMinute) > f.LastScanned.Truncate(TimeSpan.TicksPerMinute));
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue