Performance, Scan Loop, Specials, and cleanup (#150)
* More cases for parsing regex * Fixed a bug where chapter cover images weren't being updated due to a missed not. * Removed a piece of code that was needed for upgrading, since all beta users agreed to wipe db. * Fixed InProgress to properly respect order and show more recent activity first. Issue is with IEntityDate LastModified not updating in DataContext. * Updated dependencies to lastest stable. * LastModified on Volumes wasn't updating, validated it does update when data is changed. * Rewrote a check to avoid a small heap object warning. * Ensure UpdateSeries checks all libraries for unique name. * Took care of some todos, removed unused imports, on dev go ahead and schedule reoocuring jobs since LiteDB caused the locking issue. * No Tracking when we aren't using entities. * Added code to remove abandoned progress rows after a chapter gets deleted. * RefreshMetadata uses one large query rather than many trips to DB for updating metadata. Significantly faster. * Fixed a bug where UpdateSeries would always complain about a unique name even when we weren't updating name. * Files that are linked to a series but can't parse out Vol/Chapter information are properly grouped like other Specials. * Refresh metadata on UI should call the task directly * Fixed a bug on updating series to make sure we don't complain if we aren't trying to update the name to an existing name. * Fixed #142 - Library cards should be sorted. * Refactored the name of some variables to be more agnostic to comics. * Implemented ScanLibrary but abandoning it. * Code Cleanup & removing ScanSeries code. * Some more tests and new Comparators for natural sorting. * Fixed #137 - When performing I/O on archives, ignore __MACOSX folders completely. * Fixed #137 - When performing I/O on archives, ignore __MACOSX folders completely. * All entities that will show under specials tab should be marked special, rather than just what has a special keyword. * Don't let specials generate cover images * Don't let specials generate cover images * SearchResults should send LocalizedName back since we are searching against it. * Added some tests around macosx folders found from my actual server. * Put extra notes about a case where duplicates come about, logger will now tell user about this issue. * Missed a build issue somehow... * Some code smells
This commit is contained in:
parent
7790cf31fd
commit
d3c14863d6
39 changed files with 401 additions and 184 deletions
|
|
@ -10,6 +10,7 @@ using API.Extensions;
|
|||
using API.Interfaces.Services;
|
||||
using API.Services.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.IO;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Common;
|
||||
using Image = NetVips.Image;
|
||||
|
|
@ -22,7 +23,8 @@ namespace API.Services
|
|||
public class ArchiveService : IArchiveService
|
||||
{
|
||||
private readonly ILogger<ArchiveService> _logger;
|
||||
private const int ThumbnailWidth = 320; // 153w x 230h TODO: Look into optimizing the images to be smaller
|
||||
private const int ThumbnailWidth = 320; // 153w x 230h
|
||||
private static readonly RecyclableMemoryStreamManager _streamManager = new RecyclableMemoryStreamManager();
|
||||
|
||||
public ArchiveService(ILogger<ArchiveService> logger)
|
||||
{
|
||||
|
|
@ -74,13 +76,15 @@ namespace API.Services
|
|||
{
|
||||
_logger.LogDebug("Using default compression handling");
|
||||
using ZipArchive archive = ZipFile.OpenRead(archivePath);
|
||||
return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
|
||||
return archive.Entries.Count(e => !e.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(e.FullName));
|
||||
}
|
||||
case ArchiveLibrary.SharpCompress:
|
||||
{
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
return archive.Entries.Count(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key));
|
||||
return archive.Entries.Count(entry => !entry.IsDirectory &&
|
||||
!(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& Parser.Parser.IsImage(entry.Key));
|
||||
}
|
||||
case ArchiveLibrary.NotSupported:
|
||||
_logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
|
||||
|
|
@ -117,8 +121,8 @@ namespace API.Services
|
|||
{
|
||||
_logger.LogDebug("Using default compression handling");
|
||||
using var archive = ZipFile.OpenRead(archivePath);
|
||||
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
||||
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
|
||||
var folder = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
||||
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && !x.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
|
||||
var entry = folder ?? entries[0];
|
||||
|
||||
return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry);
|
||||
|
|
@ -127,7 +131,9 @@ namespace API.Services
|
|||
{
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
|
||||
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory
|
||||
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& Parser.Parser.IsImage(entry.Key)), createThumbnail);
|
||||
}
|
||||
case ArchiveLibrary.NotSupported:
|
||||
_logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath);
|
||||
|
|
@ -152,10 +158,11 @@ namespace API.Services
|
|||
{
|
||||
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder")
|
||||
{
|
||||
using var ms = new MemoryStream();
|
||||
using var ms = _streamManager.GetStream();
|
||||
entry.WriteTo(ms);
|
||||
ms.Position = 0;
|
||||
return createThumbnail ? CreateThumbnail(ms.ToArray(), Path.GetExtension(entry.Key)) : ms.ToArray();
|
||||
var data = ms.ToArray();
|
||||
return createThumbnail ? CreateThumbnail(data, Path.GetExtension(entry.Key)) : data;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -163,7 +170,7 @@ namespace API.Services
|
|||
{
|
||||
var entry = images.OrderBy(e => e.Key).FirstOrDefault();
|
||||
if (entry == null) return Array.Empty<byte>();
|
||||
using var ms = new MemoryStream();
|
||||
using var ms = _streamManager.GetStream();
|
||||
entry.WriteTo(ms);
|
||||
ms.Position = 0;
|
||||
var data = ms.ToArray();
|
||||
|
|
@ -176,11 +183,9 @@ namespace API.Services
|
|||
private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry)
|
||||
{
|
||||
using var stream = entry.Open();
|
||||
using var ms = new MemoryStream();
|
||||
using var ms = _streamManager.GetStream();
|
||||
stream.CopyTo(ms);
|
||||
var data = ms.ToArray();
|
||||
|
||||
return data;
|
||||
return ms.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -194,7 +199,7 @@ namespace API.Services
|
|||
// Sometimes ZipArchive will list the directory and others it will just keep it in the FullName
|
||||
return archive.Entries.Count > 0 &&
|
||||
!Path.HasExtension(archive.Entries.ElementAt(0).FullName) ||
|
||||
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar));
|
||||
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !e.FullName.Contains("__MACOSX"));
|
||||
}
|
||||
|
||||
private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg")
|
||||
|
|
@ -211,7 +216,7 @@ namespace API.Services
|
|||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
|
||||
_logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image. Format Extension {Extension}", formatExtension);
|
||||
}
|
||||
|
||||
return Array.Empty<byte>();
|
||||
|
|
@ -263,7 +268,7 @@ namespace API.Services
|
|||
{
|
||||
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key))
|
||||
{
|
||||
using var ms = new MemoryStream();
|
||||
using var ms = _streamManager.GetStream();
|
||||
entry.WriteTo(ms);
|
||||
ms.Position = 0;
|
||||
|
||||
|
|
@ -295,7 +300,7 @@ namespace API.Services
|
|||
{
|
||||
_logger.LogDebug("Using default compression handling");
|
||||
using var archive = ZipFile.OpenRead(archivePath);
|
||||
var entry = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
|
||||
var entry = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
|
||||
if (entry != null)
|
||||
{
|
||||
using var stream = entry.Open();
|
||||
|
|
@ -308,7 +313,9 @@ namespace API.Services
|
|||
{
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
|
||||
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
|
||||
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& Parser.Parser.IsXml(entry.Key)));
|
||||
break;
|
||||
}
|
||||
case ArchiveLibrary.NotSupported:
|
||||
|
|
@ -392,7 +399,9 @@ namespace API.Services
|
|||
{
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
|
||||
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
|
||||
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& Parser.Parser.IsImage(entry.Key)), extractPath);
|
||||
break;
|
||||
}
|
||||
case ArchiveLibrary.NotSupported:
|
||||
|
|
|
|||
|
|
@ -2,14 +2,14 @@
|
|||
{
|
||||
public class ComicInfo
|
||||
{
|
||||
public string Summary;
|
||||
public string Title;
|
||||
public string Series;
|
||||
public string Notes;
|
||||
public string Publisher;
|
||||
public string Genre;
|
||||
public int PageCount;
|
||||
public string LanguageISO;
|
||||
public string Web;
|
||||
public string Summary { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string Series { get; set; }
|
||||
public string Notes { get; set; }
|
||||
public string Publisher { get; set; }
|
||||
public string Genre { get; set; }
|
||||
public int PageCount { get; set; }
|
||||
public string LanguageISO { get; set; }
|
||||
public string Web { get; set; }
|
||||
}
|
||||
}
|
||||
|
|
@ -6,10 +6,8 @@ using System.Linq;
|
|||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using API.DTOs;
|
||||
using API.Interfaces.Services;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NetVips;
|
||||
|
||||
namespace API.Services
|
||||
{
|
||||
|
|
@ -60,6 +58,7 @@ namespace API.Services
|
|||
{
|
||||
rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
|
||||
}
|
||||
// NOTE: I Could use Path.GetRelativePath and split on separator character instead.
|
||||
|
||||
var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath;
|
||||
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ using System.Diagnostics;
|
|||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Comparators;
|
||||
using API.Entities;
|
||||
using API.Extensions;
|
||||
using API.Interfaces;
|
||||
|
|
@ -45,9 +46,9 @@ namespace API.Services
|
|||
{
|
||||
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
|
||||
{
|
||||
// TODO: Create a custom sorter for Chapters so it's consistent across the application
|
||||
// TODO: Replace this with ChapterSortComparator
|
||||
volume.Chapters ??= new List<Chapter>();
|
||||
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
|
||||
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number)).FirstOrDefault();
|
||||
|
||||
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
|
||||
// Skip calculating Cover Image (I/O) if the chapter already has it set
|
||||
|
|
@ -67,16 +68,29 @@ namespace API.Services
|
|||
|
||||
public void UpdateMetadata(Series series, bool forceUpdate)
|
||||
{
|
||||
// TODO: Use new ChapterSortComparer() here instead
|
||||
if (series == null) return;
|
||||
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
|
||||
{
|
||||
series.Volumes ??= new List<Volume>();
|
||||
var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
|
||||
byte[] coverImage = null;
|
||||
if (firstCover == null && series.Volumes.Any())
|
||||
{
|
||||
firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0);
|
||||
// If firstCover is null and one volume, the whole series is Chapters under Vol 0.
|
||||
if (series.Volumes.Count == 1)
|
||||
{
|
||||
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number))
|
||||
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
|
||||
}
|
||||
|
||||
if (coverImage == null)
|
||||
{
|
||||
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number))
|
||||
.FirstOrDefault()?.CoverImage;
|
||||
}
|
||||
}
|
||||
series.CoverImage = firstCover?.CoverImage;
|
||||
series.CoverImage = firstCover?.CoverImage ?? coverImage;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
|
||||
|
|
@ -88,22 +102,20 @@ namespace API.Services
|
|||
if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified))
|
||||
{
|
||||
series.Summary = _archiveService.GetSummaryInfo(firstFile.FilePath);
|
||||
firstFile.LastModified = DateTime.Now;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
|
||||
var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
|
||||
|
||||
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
|
||||
|
||||
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
|
||||
foreach (var series in allSeries)
|
||||
foreach (var series in library.Series)
|
||||
{
|
||||
series.NormalizedName = Parser.Parser.Normalize(series.Name);
|
||||
|
||||
var volumes = Task.Run(() => _unitOfWork.SeriesRepository.GetVolumes(series.Id)).Result.ToList();
|
||||
foreach (var volume in volumes)
|
||||
foreach (var volume in series.Volumes)
|
||||
{
|
||||
foreach (var chapter in volume.Chapters)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -5,8 +5,6 @@ using API.Helpers.Converters;
|
|||
using API.Interfaces;
|
||||
using API.Interfaces.Services;
|
||||
using Hangfire;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services
|
||||
|
|
@ -25,8 +23,7 @@ namespace API.Services
|
|||
|
||||
|
||||
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
|
||||
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
|
||||
IWebHostEnvironment env)
|
||||
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService)
|
||||
{
|
||||
_cacheService = cacheService;
|
||||
_logger = logger;
|
||||
|
|
@ -36,17 +33,7 @@ namespace API.Services
|
|||
_backupService = backupService;
|
||||
_cleanupService = cleanupService;
|
||||
|
||||
if (!env.IsDevelopment())
|
||||
{
|
||||
ScheduleTasks();
|
||||
}
|
||||
else
|
||||
{
|
||||
RecurringJob.RemoveIfExists("scan-libraries");
|
||||
RecurringJob.RemoveIfExists("backup");
|
||||
RecurringJob.RemoveIfExists("cleanup");
|
||||
}
|
||||
|
||||
ScheduleTasks();
|
||||
}
|
||||
|
||||
public void ScheduleTasks()
|
||||
|
|
@ -56,8 +43,9 @@ namespace API.Services
|
|||
string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
|
||||
if (setting != null)
|
||||
{
|
||||
_logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting);
|
||||
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting));
|
||||
_logger.LogDebug("Scheduling Scan Library Task for {Setting}", setting);
|
||||
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(),
|
||||
() => CronConverter.ConvertToCronNotation(setting));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
@ -67,7 +55,7 @@ namespace API.Services
|
|||
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
|
||||
if (setting != null)
|
||||
{
|
||||
_logger.LogDebug("Scheduling Backup Task for {Cron}", setting);
|
||||
_logger.LogDebug("Scheduling Backup Task for {Setting}", setting);
|
||||
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting));
|
||||
}
|
||||
else
|
||||
|
|
@ -80,10 +68,10 @@ namespace API.Services
|
|||
|
||||
public void ScanLibrary(int libraryId, bool forceUpdate = false)
|
||||
{
|
||||
// TODO: We shouldn't queue up a job if one is already in progress
|
||||
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
|
||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
|
||||
BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change
|
||||
// When we do a scan, force cache to re-unpack in case page numbers change
|
||||
BackgroundJob.Enqueue(() => _cleanupService.Cleanup());
|
||||
}
|
||||
|
||||
public void CleanupChapters(int[] chapterIds)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ namespace API.Services.Tasks
|
|||
private readonly IArchiveService _archiveService;
|
||||
private readonly IMetadataService _metadataService;
|
||||
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
|
||||
private bool _forceUpdate;
|
||||
|
||||
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
|
||||
IMetadataService metadataService)
|
||||
|
|
@ -34,8 +33,9 @@ namespace API.Services.Tasks
|
|||
_metadataService = metadataService;
|
||||
}
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 5)]
|
||||
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 360)]
|
||||
//[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public void ScanLibraries()
|
||||
{
|
||||
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
|
||||
|
|
@ -60,22 +60,15 @@ namespace API.Services.Tasks
|
|||
//return false;
|
||||
}
|
||||
|
||||
private void Cleanup()
|
||||
{
|
||||
_scannedSeries = null;
|
||||
}
|
||||
|
||||
[DisableConcurrentExecution(5)]
|
||||
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
[DisableConcurrentExecution(360)]
|
||||
//[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public void ScanLibrary(int libraryId, bool forceUpdate)
|
||||
{
|
||||
_forceUpdate = forceUpdate;
|
||||
var sw = Stopwatch.StartNew();
|
||||
Cleanup();
|
||||
Library library;
|
||||
Library library;
|
||||
try
|
||||
{
|
||||
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
|
||||
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
|
@ -84,8 +77,10 @@ namespace API.Services.Tasks
|
|||
return;
|
||||
}
|
||||
|
||||
_scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
|
||||
|
||||
_logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate);
|
||||
|
||||
_scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
|
||||
|
||||
var totalFiles = 0;
|
||||
var skippedFolders = 0;
|
||||
|
|
@ -104,7 +99,7 @@ namespace API.Services.Tasks
|
|||
{
|
||||
_logger.LogError(exception, "The file {Filename} could not be found", f);
|
||||
}
|
||||
}, Parser.Parser.MangaFileExtensions);
|
||||
}, Parser.Parser.ArchiveFileExtensions);
|
||||
}
|
||||
catch (ArgumentException ex) {
|
||||
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path);
|
||||
|
|
@ -120,15 +115,15 @@ namespace API.Services.Tasks
|
|||
{
|
||||
_logger.LogInformation("All Folders were skipped due to no modifications to the directories");
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
_scannedSeries = null;
|
||||
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name);
|
||||
Cleanup();
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// Remove any series where there were no parsed infos
|
||||
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
|
||||
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
|
||||
|
||||
|
||||
UpdateLibrary(library, series);
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
|
||||
|
|
@ -140,8 +135,22 @@ namespace API.Services.Tasks
|
|||
{
|
||||
_logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan");
|
||||
}
|
||||
_scannedSeries = null;
|
||||
|
||||
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
|
||||
|
||||
// Cleanup any user progress that doesn't exist
|
||||
var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result;
|
||||
if (cleanedUp)
|
||||
{
|
||||
_logger.LogInformation("Removed all abandoned progress rows");
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogWarning("There are abandoned user progress entities in the DB. In Progress activity stream will be skewed");
|
||||
}
|
||||
|
||||
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
|
||||
}
|
||||
|
||||
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
|
||||
|
|
@ -191,7 +200,6 @@ namespace API.Services.Tasks
|
|||
_logger.LogInformation("Processing series {SeriesName}", series.Name);
|
||||
UpdateVolumes(series, parsedSeries[series.Name].ToArray());
|
||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||
_metadataService.UpdateMetadata(series, _forceUpdate);
|
||||
});
|
||||
|
||||
|
||||
|
|
@ -221,28 +229,16 @@ namespace API.Services.Tasks
|
|||
series.Volumes.Add(volume);
|
||||
}
|
||||
|
||||
volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); // TODO: I don't think we need this as chapters now handle specials
|
||||
// NOTE: I don't think we need this as chapters now handle specials
|
||||
volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial);
|
||||
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
||||
// Remove any instances of Chapters with Range of 0. Range of 0 chapters are no longer supported.
|
||||
//volume.Chapters = volume.Chapters.Where(c => c.IsSpecial && c.Files.Count > 1).ToList();
|
||||
|
||||
UpdateChapters(volume, infos);
|
||||
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
||||
_metadataService.UpdateMetadata(volume, _forceUpdate);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
|
||||
var existingVolumes = series.Volumes.ToList();
|
||||
foreach (var volume in existingVolumes)
|
||||
{
|
||||
// I can't remove based on chapter count as I haven't updated Chapters || volume.Chapters.Count == 0
|
||||
var hasInfo = parsedInfos.Any(v => v.Volumes == volume.Name);
|
||||
if (!hasInfo)
|
||||
{
|
||||
series.Volumes.Remove(volume);
|
||||
}
|
||||
}
|
||||
series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList();
|
||||
|
||||
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
|
||||
series.Name, startingVolumeCount, series.Volumes.Count);
|
||||
|
|
@ -256,51 +252,62 @@ namespace API.Services.Tasks
|
|||
// Add new chapters
|
||||
foreach (var info in parsedInfos)
|
||||
{
|
||||
// Specials go into their own chapters with Range being their filename and IsSpecial = True
|
||||
// BUG: If we have an existing chapter with Range == 0 and it has our file, we wont split.
|
||||
var chapter = info.IsSpecial ? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath)))
|
||||
: volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
|
||||
var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
|
||||
// Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
|
||||
// also are treated like specials
|
||||
_logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment);
|
||||
// If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest
|
||||
// vs Historys strongest), this code will break and the duplicate will be skipped.
|
||||
Chapter chapter = null;
|
||||
try
|
||||
{
|
||||
chapter = specialTreatment
|
||||
? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename
|
||||
|| (c.Files.Select(f => f.FilePath)
|
||||
.Contains(info.FullFilePath)))
|
||||
: volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (chapter == null)
|
||||
{
|
||||
chapter = new Chapter()
|
||||
{
|
||||
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
|
||||
Range = info.IsSpecial ? info.Filename : info.Chapters,
|
||||
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty,
|
||||
Range = specialTreatment ? info.Filename : info.Chapters,
|
||||
Files = new List<MangaFile>(),
|
||||
IsSpecial = info.IsSpecial
|
||||
IsSpecial = specialTreatment
|
||||
};
|
||||
volume.Chapters.Add(chapter);
|
||||
}
|
||||
|
||||
if (info.IsSpecial && chapter.Files.Count > 1)
|
||||
{
|
||||
// Split the Manga files into 2 separate chapters
|
||||
}
|
||||
|
||||
chapter.Files ??= new List<MangaFile>();
|
||||
chapter.IsSpecial = info.IsSpecial;
|
||||
chapter.IsSpecial = specialTreatment;
|
||||
}
|
||||
|
||||
// Add files
|
||||
foreach (var info in parsedInfos)
|
||||
{
|
||||
var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
|
||||
Chapter chapter = null;
|
||||
try
|
||||
{
|
||||
chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (info.IsSpecial && c.Range == info.Filename));
|
||||
chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (specialTreatment && c.Range == info.Filename));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an exception parsing chapter. Skipping Vol {VolumeNumber} Chapter {ChapterNumber}", volume.Name, info.Chapters);
|
||||
_logger.LogError(ex, "There was an exception parsing chapter. Skipping {SeriesName} Vol {VolumeNumber} Chapter {ChapterNumber} - Special treatment: {NeedsSpecialTreatment}", info.Series, volume.Name, info.Chapters, specialTreatment);
|
||||
}
|
||||
if (chapter == null) continue;
|
||||
AddOrUpdateFileForChapter(chapter, info);
|
||||
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
|
||||
chapter.Range = info.IsSpecial ? info.Filename : info.Chapters;
|
||||
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
||||
chapter.Pages = chapter.Files.Sum(f => f.Pages);
|
||||
_metadataService.UpdateMetadata(chapter, _forceUpdate);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -309,18 +316,14 @@ namespace API.Services.Tasks
|
|||
var existingChapters = volume.Chapters.ToList();
|
||||
foreach (var existingChapter in existingChapters)
|
||||
{
|
||||
var hasInfo = existingChapter.IsSpecial ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
|
||||
var specialTreatment = (existingChapter.IsSpecial || (existingChapter.Number == "0" && !int.TryParse(existingChapter.Range, out int i)));
|
||||
var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
|
||||
: parsedInfos.Any(v => v.Chapters == existingChapter.Range);
|
||||
|
||||
if (!hasInfo || !existingChapter.Files.Any())
|
||||
{
|
||||
volume.Chapters.Remove(existingChapter);
|
||||
}
|
||||
|
||||
// if (hasInfo && existingChapter.IsSpecial && existingChapter.Files.Count > 1)
|
||||
// {
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
|
||||
|
|
@ -328,7 +331,7 @@ namespace API.Services.Tasks
|
|||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing.
|
||||
/// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
|
||||
/// </summary>
|
||||
/// <param name="info"></param>
|
||||
private void TrackSeries(ParserInfo info)
|
||||
|
|
@ -337,6 +340,7 @@ namespace API.Services.Tasks
|
|||
|
||||
// Check if normalized info.Series already exists and if so, update info to use that name instead
|
||||
var normalizedSeries = Parser.Parser.Normalize(info.Series);
|
||||
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
|
||||
var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
|
||||
.Key;
|
||||
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue