Bugs, Enhancements, and Performance (#580)
* Added parser case for "The Duke of Death and His Black Maid - Ch. 177 - The Ball (3).cbz" * Removed a file that is created and modified every test run. * Fixed a bad parser case for "Batman Beyond 02 (of 6) (1999)" which was consuming too many characters * Removed a lot of "Volume" parsing for Comics that don't make sense. This is prep work for the upcoming Comic Rework release. * Reworked a lot of parsing cases for comics based on naming conventions observed from releases found online. * Added a way for external scripts to use a user api key to authenticate * Fixed an issue if the manga only had one page, the bottom menu would be missing page and chapter controls. * Fixed a bug where on small phones, nav bar could overflow due to scroll to top * Tweaked a lot of regex for manga parsing to handle some cases where poorly named files, like "Vol. 03 Ch. 21" would end up parsing as Series "Vol. 03". * Even more handling of parser cases. Manga parser should be as it was but more robust to handle bad naming. * Fixed: Don't force metadata refresh on Scan Series, only on refresh metadata * Implemented the ability to automatically refresh after a series scan based on when server finishes. Remove a duplicate API call from series detail. * Removed another API call for series metadata that isn't needed. * Refactored Message creation to a factory, hardcoded strings are centralized, and RefreshSeriesMetadata sends an event and is refactored to be async. * Fixed a bug when really poorly named files are within a folder that contains the series name, fallback couldn't occur due to it being taken as root folder. Now we detect said condition and will go one level higher, resulting in potentially more I/O, but the series will not be deleted. * Added the Read in Incognito context item for Chapter cards * Skip an additional check for series summary for series that aren't EPUB or Archive formats. * Fixed an issue where cover image generation could occur due to a bad check on LastWriteTime on the underlying file. * Added some extra comic parser tests * Added a ScanLibrary event (not hooked up in UI) * Performance improvement on metadata service. Now when we scan for cover image changes, we emit when a change occurs and only then do we update parent entities (array copy). * Removed an hr from series detail and ensure we update the cover image for series when scan series finishes. * Updated the infinite scroller to use a Flags pattern for the debug mode. Updated a few logical conditions for mobile. * Removed the concurrency check on row progress as if too many calls hit the DB, it will throw, but it doesn't matter. Fixed a bad logic code which could cause scrolling after hitting the bottom of the chapter. * Ensure prefetching uses totalPages + 1 since we pass in totalPages as - 1 from manga reader * Fixed issue where last page of webtoon wouldn't be prefetched due to a < instead of <= on prefetching code * Implemented ability to send images from archives to the UI without incurring any extra memory pressure. * Dropdown menus now have a darker background * Webtoon reader now works on mobile. * Fixed how keyboard presses for up/down/left/right work with MANGA_UD reading mode. See issue #579 * Fixed cont reader for webtoons on mobile * Fixed a small issue where top spacer would too quickly switch to prev chapter * Updated user preferences to use same slider style. Removed some css that is not used. * Added comic parser case for "Saga 001 (2012) (Digital) (Empire-Zone)" * Added accessibility toggle to reading list order and aligned sliders to all use the same style. * Removed a todo for checking on new image serving code. It works great. * Fixed a missing await * Auth guard will now check if an existing toast is present giving same message before poping the toast. * Fixed alignment on phones for reading lists * Moved sorters so they aren't resused between multiple threads. Slightly higher memory footprint. * Fixed a broken unit test * Code smells * More unit test fixing
This commit is contained in:
parent
b62d581491
commit
cf4fd2cb9c
52 changed files with 685 additions and 336 deletions
|
|
@ -23,7 +23,7 @@ namespace API.Comparators
|
|||
{
|
||||
if (x == y) return 0;
|
||||
|
||||
// BUG: Operations that change non-concurrent collections must have exclusive access. A concurrent update was performed on this collection and corrupted its state. The collection's state is no longer correct.
|
||||
// Should be fixed: Operations that change non-concurrent collections must have exclusive access. A concurrent update was performed on this collection and corrupted its state. The collection's state is no longer correct.
|
||||
if (!_table.TryGetValue(x ?? Empty, out var x1))
|
||||
{
|
||||
x1 = Regex.Split(x ?? Empty, "([0-9]+)");
|
||||
|
|
@ -33,6 +33,7 @@ namespace API.Comparators
|
|||
if (!_table.TryGetValue(y ?? Empty, out var y1))
|
||||
{
|
||||
y1 = Regex.Split(y ?? Empty, "([0-9]+)");
|
||||
// Should be fixed: EXCEPTION: An item with the same key has already been added. Key: M:\Girls of the Wild's\Girls of the Wild's - Ep. 083 (Season 1) [LINE Webtoon].cbz
|
||||
_table.Add(y ?? Empty, y1);
|
||||
}
|
||||
|
||||
|
|
|
|||
45
API/Controllers/PluginController.cs
Normal file
45
API/Controllers/PluginController.cs
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
using System.Threading.Tasks;
|
||||
using API.DTOs;
|
||||
using API.Interfaces;
|
||||
using API.Interfaces.Services;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Controllers
|
||||
{
|
||||
public class PluginController : BaseApiController
|
||||
{
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ITokenService _tokenService;
|
||||
private readonly ILogger<PluginController> _logger;
|
||||
|
||||
public PluginController(IUnitOfWork unitOfWork, ITokenService tokenService, ILogger<PluginController> logger)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_tokenService = tokenService;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Authenticate with the Server given an apiKey. This will log you in by returning the user object and the JWT token.
|
||||
/// </summary>
|
||||
/// <param name="apiKey"></param>
|
||||
/// <param name="pluginName">Name of the Plugin</param>
|
||||
/// <returns></returns>
|
||||
[HttpPost("authenticate")]
|
||||
public async Task<ActionResult<UserDto>> Authenticate(string apiKey, string pluginName)
|
||||
{
|
||||
// NOTE: In order to log information about plugins, we need some Plugin Description information for each request
|
||||
// Should log into access table so we can tell the user
|
||||
var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
||||
_logger.LogInformation("Plugin {PluginName} has authenticated with {UserName} ({UserId})'s API Key", pluginName, user.UserName, userId);
|
||||
return new UserDto
|
||||
{
|
||||
Username = user.UserName,
|
||||
Token = await _tokenService.CreateToken(user),
|
||||
ApiKey = user.ApiKey,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,7 +3,6 @@ using System.Collections.Generic;
|
|||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Comparators;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs;
|
||||
using API.DTOs.Reader;
|
||||
|
|
@ -21,17 +20,15 @@ namespace API.Controllers
|
|||
/// </summary>
|
||||
public class ReaderController : BaseApiController
|
||||
{
|
||||
private readonly IDirectoryService _directoryService;
|
||||
private readonly ICacheService _cacheService;
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ILogger<ReaderController> _logger;
|
||||
private readonly IReaderService _readerService;
|
||||
|
||||
/// <inheritdoc />
|
||||
public ReaderController(IDirectoryService directoryService, ICacheService cacheService,
|
||||
public ReaderController(ICacheService cacheService,
|
||||
IUnitOfWork unitOfWork, ILogger<ReaderController> logger, IReaderService readerService)
|
||||
{
|
||||
_directoryService = directoryService;
|
||||
_cacheService = cacheService;
|
||||
_unitOfWork = unitOfWork;
|
||||
_logger = logger;
|
||||
|
|
@ -55,14 +52,9 @@ namespace API.Controllers
|
|||
{
|
||||
var (path, _) = await _cacheService.GetCachedPagePath(chapter, page);
|
||||
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}");
|
||||
|
||||
var content = await _directoryService.ReadFileAsync(path);
|
||||
var format = Path.GetExtension(path).Replace(".", "");
|
||||
|
||||
// Calculates SHA1 Hash for byte[]
|
||||
Response.AddCacheHeader(content);
|
||||
|
||||
return File(content, "image/" + format);
|
||||
return PhysicalFile(path, "image/" + format);
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ namespace API.Controllers
|
|||
if (_unitOfWork.HasChanges())
|
||||
{
|
||||
await _unitOfWork.CommitAsync();
|
||||
_taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id);
|
||||
_taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id, true);
|
||||
return Ok();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ namespace API.Data.Repositories
|
|||
{
|
||||
private readonly DataContext _context;
|
||||
private readonly IMapper _mapper;
|
||||
private readonly NaturalSortComparer _naturalSortComparer = new ();
|
||||
public SeriesRepository(DataContext context, IMapper mapper)
|
||||
{
|
||||
_context = context;
|
||||
|
|
@ -118,11 +117,12 @@ namespace API.Data.Repositories
|
|||
return volumes;
|
||||
}
|
||||
|
||||
private void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
|
||||
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
|
||||
{
|
||||
var sorter = new NaturalSortComparer();
|
||||
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
|
||||
{
|
||||
v.Chapters = v.Chapters.OrderBy(x => x.Range, _naturalSortComparer).ToList();
|
||||
v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ namespace API.Entities
|
|||
/// Represents the progress a single user has on a given Chapter.
|
||||
/// </summary>
|
||||
//[Index(nameof(SeriesId), nameof(VolumeId), nameof(ChapterId), nameof(AppUserId), IsUnique = true)]
|
||||
public class AppUserProgress : IEntityDate, IHasConcurrencyToken
|
||||
public class AppUserProgress : IEntityDate
|
||||
{
|
||||
/// <summary>
|
||||
/// Id of Entity
|
||||
|
|
@ -55,16 +55,5 @@ namespace API.Entities
|
|||
/// Last date this was updated
|
||||
/// </summary>
|
||||
public DateTime LastModified { get; set; }
|
||||
|
||||
/// <inheritdoc />
|
||||
[ConcurrencyCheck]
|
||||
public uint RowVersion { get; private set; }
|
||||
|
||||
|
||||
/// <inheritdoc />
|
||||
public void OnSavingChanges()
|
||||
{
|
||||
RowVersion++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,9 +30,13 @@ namespace API.Entities
|
|||
public int ChapterId { get; set; }
|
||||
|
||||
// Methods
|
||||
/// <summary>
|
||||
/// If the File on disk's last modified time is after what is stored in MangaFile
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public bool HasFileBeenModified()
|
||||
{
|
||||
return !File.GetLastWriteTime(FilePath).Equals(LastModified);
|
||||
return File.GetLastWriteTime(FilePath) > LastModified;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ namespace API.Interfaces
|
|||
void CleanupChapters(int[] chapterIds);
|
||||
void RefreshMetadata(int libraryId, bool forceUpdate = true);
|
||||
void CleanupTemp();
|
||||
void RefreshSeriesMetadata(int libraryId, int seriesId);
|
||||
void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
void CancelStatsTasks();
|
||||
void RunStatCollection();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using API.Entities;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
|
||||
namespace API.Interfaces.Services
|
||||
{
|
||||
|
|
@ -11,14 +12,14 @@ namespace API.Interfaces.Services
|
|||
/// <param name="forceUpdate"></param>
|
||||
void RefreshMetadata(int libraryId, bool forceUpdate = false);
|
||||
|
||||
public void UpdateMetadata(Chapter chapter, bool forceUpdate);
|
||||
public void UpdateMetadata(Volume volume, bool forceUpdate);
|
||||
public void UpdateMetadata(Series series, bool forceUpdate);
|
||||
public bool UpdateMetadata(Chapter chapter, bool forceUpdate);
|
||||
public bool UpdateMetadata(Volume volume, bool forceUpdate);
|
||||
public bool UpdateMetadata(Series series, bool forceUpdate);
|
||||
/// <summary>
|
||||
/// Performs a forced refresh of metatdata just for a series and it's nested entities
|
||||
/// </summary>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <param name="seriesId"></param>
|
||||
void RefreshMetadataForSeries(int libraryId, int seriesId);
|
||||
Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ namespace API.Interfaces.Services
|
|||
/// </summary>
|
||||
/// <param name="libraryId">Library to scan against</param>
|
||||
/// <param name="forceUpdate">Force overwriting for cover images</param>
|
||||
void ScanLibrary(int libraryId, bool forceUpdate);
|
||||
void ScanLibraries();
|
||||
Task ScanLibrary(int libraryId, bool forceUpdate);
|
||||
Task ScanLibraries();
|
||||
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Comparators;
|
||||
|
|
@ -17,7 +18,6 @@ namespace API.Interfaces.Services
|
|||
private readonly ILogger<ReaderService> _logger;
|
||||
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
|
||||
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
|
||||
private readonly NaturalSortComparer _naturalSortComparer = new NaturalSortComparer();
|
||||
|
||||
public ReaderService(IUnitOfWork unitOfWork, ILogger<ReaderService> logger)
|
||||
{
|
||||
|
|
@ -44,7 +44,8 @@ namespace API.Interfaces.Services
|
|||
if (userProgress == null)
|
||||
{
|
||||
// Create a user object
|
||||
var userWithProgress = await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress);
|
||||
var userWithProgress =
|
||||
await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress);
|
||||
userWithProgress.Progresses ??= new List<AppUserProgress>();
|
||||
userWithProgress.Progresses.Add(new AppUserProgress
|
||||
{
|
||||
|
|
@ -74,7 +75,6 @@ namespace API.Interfaces.Services
|
|||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
// When opening a fresh chapter, this seems to fail (sometimes)
|
||||
_logger.LogError(exception, "Could not save progress");
|
||||
await _unitOfWork.RollbackAsync();
|
||||
}
|
||||
|
|
@ -118,7 +118,7 @@ namespace API.Interfaces.Services
|
|||
if (currentVolume.Number == 0)
|
||||
{
|
||||
// Handle specials by sorting on their Filename aka Range
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, _naturalSortComparer), currentChapter.Number);
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()), currentChapter.Number);
|
||||
if (chapterId > 0) return chapterId;
|
||||
}
|
||||
|
||||
|
|
@ -169,7 +169,7 @@ namespace API.Interfaces.Services
|
|||
|
||||
if (currentVolume.Number == 0)
|
||||
{
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, _naturalSortComparer).Reverse(), currentChapter.Number);
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()).Reverse(), currentChapter.Number);
|
||||
if (chapterId > 0) return chapterId;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -102,11 +102,17 @@ namespace API.Parser
|
|||
@"^(?<Series>.*)( |_)Vol\.?(\d+|tbd)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex),
|
||||
// The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake
|
||||
new Regex(
|
||||
@"(?<Series>.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?<Chapter>\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip
|
||||
new Regex(
|
||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)(\s|_|-)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)(\s|_|-)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
|
||||
new Regex(
|
||||
@"(?<Series>.*)( - )(?:v|vo|c)\d",
|
||||
|
|
@ -117,11 +123,6 @@ namespace API.Parser
|
|||
@"(?<Series>.*)(?:, Chapter )(?<Chapter>\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex)
|
||||
new Regex(
|
||||
@"(?<Series>.*)(\s|_|-)(?:Volume(\s|_|-)+\d+)(\s|_|-)+(?:Chapter)(\s|_|-)(?<Chapter>\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz
|
||||
new Regex(
|
||||
@"(?<Series>.*)(\s|_|-)(?!Vol)(\s|_|-)(?:Chapter)(\s|_|-)(?<Chapter>\d+)",
|
||||
|
|
@ -149,7 +150,7 @@ namespace API.Parser
|
|||
RegexTimeout),
|
||||
// Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz
|
||||
new Regex(
|
||||
@"(?<Series>.*)(\b|_|-|\s)(?:(chapter(\b|_|-|\s))|sp)\d",
|
||||
@"^(?<Series>(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
|
||||
|
|
@ -294,9 +295,14 @@ namespace API.Parser
|
|||
@"^(?<Series>.*)(?: |_)i(ssue) #\d+",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Batman Wayne Family Adventures - Ep. 001 - Moving In
|
||||
new Regex(
|
||||
@"^(?<Series>.+?)(\s|_|-)?(?:Ep\.?)(\s|_|-)+\d+",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
|
||||
new Regex(
|
||||
@"^(?<Series>.*)(?: \d+)",
|
||||
@"^(?<Series>.+?)(?: \d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Batman & Robin the Teen Wonder #0
|
||||
|
|
@ -323,41 +329,44 @@ namespace API.Parser
|
|||
|
||||
private static readonly Regex[] ComicVolumeRegex = new[]
|
||||
{
|
||||
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
|
||||
new Regex(
|
||||
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// 01 Spider-Man & Wolverine 01.cbr
|
||||
new Regex(
|
||||
@"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Batman & Wildcat (1 of 3)
|
||||
new Regex(
|
||||
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
|
||||
// new Regex(
|
||||
// @"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
|
||||
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
// RegexTimeout),
|
||||
// // 01 Spider-Man & Wolverine 01.cbr
|
||||
// new Regex(
|
||||
// @"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
|
||||
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
// RegexTimeout),
|
||||
// // Batman & Wildcat (1 of 3)
|
||||
// new Regex(
|
||||
// @"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
|
||||
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
// RegexTimeout),
|
||||
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
|
||||
new Regex(
|
||||
@"^(?<Series>.*)(?: |_)v(?<Volume>\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
|
||||
new Regex(
|
||||
@"^(?<Series>.*)(?<!c(hapter)|i(ssue))(?<!of)(?: |_)(?<!of )(?<Volume>\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// BUG: Negative lookbehind has to be fixed width
|
||||
// NOTE: The case this is built for does not make much sense.
|
||||
// new Regex(
|
||||
// @"^(?<Series>.+?)(?<!c(hapter)|i(ssue))(?<!of)(?: |_)(?<!of )(?<Volume>\d+)",
|
||||
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
// RegexTimeout),
|
||||
|
||||
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
|
||||
new Regex(
|
||||
@"^(?<Series>.*)(?<!c(hapter)|i(ssue))(?<!of)(?: (?<Volume>\d+))",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Batman & Robin the Teen Wonder #0
|
||||
new Regex(
|
||||
@"^(?<Series>.*)(?: |_)#(?<Volume>\d+)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// new Regex(
|
||||
// @"^(?<Series>.+?)(?<!c(hapter)|i(ssue))(?<!of)(?: (?<Volume>\d+))",
|
||||
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
// RegexTimeout),
|
||||
// // Batman & Robin the Teen Wonder #0
|
||||
// new Regex(
|
||||
// @"^(?<Series>.*)(?: |_)#(?<Volume>\d+)",
|
||||
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
// RegexTimeout),
|
||||
};
|
||||
|
||||
private static readonly Regex[] ComicChapterRegex = new[]
|
||||
|
|
@ -387,6 +396,11 @@ namespace API.Parser
|
|||
@"^(?<Series>.*)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Saga 001 (2012) (Digital) (Empire-Zone)
|
||||
new Regex(
|
||||
@"(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled,
|
||||
RegexTimeout),
|
||||
// Amazing Man Comics chapter 25
|
||||
new Regex(
|
||||
@"^(?!Vol)(?<Series>.*)( |_)c(hapter)( |_)(?<Chapter>\d*)",
|
||||
|
|
@ -930,6 +944,9 @@ namespace API.Parser
|
|||
|
||||
/// <summary>
|
||||
/// Translates _ -> spaces, trims front and back of string, removes release groups
|
||||
/// <example>
|
||||
/// Hippos_the_Great [Digital], -> Hippos the Great
|
||||
/// </example>
|
||||
/// </summary>
|
||||
/// <param name="title"></param>
|
||||
/// <returns></returns>
|
||||
|
|
@ -942,7 +959,7 @@ namespace API.Parser
|
|||
title = RemoveSpecialTags(title);
|
||||
|
||||
title = title.Replace("_", " ").Trim();
|
||||
if (title.EndsWith("-"))
|
||||
if (title.EndsWith("-") || title.EndsWith(","))
|
||||
{
|
||||
title = title.Substring(0, title.Length - 1);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -305,6 +305,44 @@ namespace API.Services
|
|||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Finds the highest directories from a set of MangaFiles
|
||||
/// </summary>
|
||||
/// <param name="libraryFolders">List of top level folders which files belong to</param>
|
||||
/// <param name="filePaths">List of file paths that belong to libraryFolders</param>
|
||||
/// <returns></returns>
|
||||
public static Dictionary<string, string> FindHighestDirectoriesFromFiles(IEnumerable<string> libraryFolders, IList<string> filePaths)
|
||||
{
|
||||
var stopLookingForDirectories = false;
|
||||
var dirs = new Dictionary<string, string>();
|
||||
foreach (var folder in libraryFolders)
|
||||
{
|
||||
if (stopLookingForDirectories) break;
|
||||
foreach (var file in filePaths)
|
||||
{
|
||||
if (!file.Contains(folder)) continue;
|
||||
|
||||
var parts = GetFoldersTillRoot(folder, file).ToList();
|
||||
if (parts.Count == 0)
|
||||
{
|
||||
// Break from all loops, we done, just scan folder.Path (library root)
|
||||
dirs.Add(folder, string.Empty);
|
||||
stopLookingForDirectories = true;
|
||||
break;
|
||||
}
|
||||
|
||||
var fullPath = Path.Join(folder, parts.Last());
|
||||
if (!dirs.ContainsKey(fullPath))
|
||||
{
|
||||
dirs.Add(fullPath, string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dirs;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed
|
||||
/// up processing.
|
||||
|
|
|
|||
|
|
@ -14,13 +14,11 @@ namespace API.Services
|
|||
{
|
||||
private readonly ILogger<ImageService> _logger;
|
||||
private readonly IDirectoryService _directoryService;
|
||||
private readonly NaturalSortComparer _naturalSortComparer;
|
||||
|
||||
public ImageService(ILogger<ImageService> logger, IDirectoryService directoryService)
|
||||
{
|
||||
_logger = logger;
|
||||
_directoryService = directoryService;
|
||||
_naturalSortComparer = new NaturalSortComparer();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -38,7 +36,7 @@ namespace API.Services
|
|||
}
|
||||
|
||||
var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions)
|
||||
.OrderBy(f => f, _naturalSortComparer).FirstOrDefault();
|
||||
.OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault();
|
||||
|
||||
return firstImage;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ using API.Entities.Enums;
|
|||
using API.Extensions;
|
||||
using API.Interfaces;
|
||||
using API.Interfaces.Services;
|
||||
using API.SignalR;
|
||||
using Microsoft.AspNetCore.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services
|
||||
|
|
@ -20,6 +22,7 @@ namespace API.Services
|
|||
private readonly IArchiveService _archiveService;
|
||||
private readonly IBookService _bookService;
|
||||
private readonly IImageService _imageService;
|
||||
private readonly IHubContext<MessageHub> _messageHub;
|
||||
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
|
||||
/// <summary>
|
||||
/// Width of the Thumbnail generation
|
||||
|
|
@ -27,13 +30,14 @@ namespace API.Services
|
|||
public static readonly int ThumbnailWidth = 320; // 153w x 230h
|
||||
|
||||
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger,
|
||||
IArchiveService archiveService, IBookService bookService, IImageService imageService)
|
||||
IArchiveService archiveService, IBookService bookService, IImageService imageService, IHubContext<MessageHub> messageHub)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_logger = logger;
|
||||
_archiveService = archiveService;
|
||||
_bookService = bookService;
|
||||
_imageService = imageService;
|
||||
_messageHub = messageHub;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -81,14 +85,17 @@ namespace API.Services
|
|||
/// </summary>
|
||||
/// <param name="chapter"></param>
|
||||
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
|
||||
public void UpdateMetadata(Chapter chapter, bool forceUpdate)
|
||||
public bool UpdateMetadata(Chapter chapter, bool forceUpdate)
|
||||
{
|
||||
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
|
||||
|
||||
if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked))
|
||||
{
|
||||
chapter.CoverImage = GetCoverImage(firstFile);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -96,17 +103,18 @@ namespace API.Services
|
|||
/// </summary>
|
||||
/// <param name="volume"></param>
|
||||
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
|
||||
public void UpdateMetadata(Volume volume, bool forceUpdate)
|
||||
public bool UpdateMetadata(Volume volume, bool forceUpdate)
|
||||
{
|
||||
// We need to check if Volume coverImage matches first chapters if forceUpdate is false
|
||||
if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate
|
||||
, false)) return;
|
||||
, false)) return false;
|
||||
|
||||
volume.Chapters ??= new List<Chapter>();
|
||||
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault();
|
||||
|
||||
if (firstChapter == null) return;
|
||||
if (firstChapter == null) return false;
|
||||
|
||||
volume.CoverImage = firstChapter.CoverImage;
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -114,9 +122,10 @@ namespace API.Services
|
|||
/// </summary>
|
||||
/// <param name="series"></param>
|
||||
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
|
||||
public void UpdateMetadata(Series series, bool forceUpdate)
|
||||
public bool UpdateMetadata(Series series, bool forceUpdate)
|
||||
{
|
||||
if (series == null) return;
|
||||
var madeUpdate = false;
|
||||
if (series == null) return false;
|
||||
if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked))
|
||||
{
|
||||
series.Volumes ??= new List<Volume>();
|
||||
|
|
@ -129,39 +138,46 @@ namespace API.Services
|
|||
{
|
||||
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)
|
||||
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
|
||||
madeUpdate = true;
|
||||
}
|
||||
|
||||
if (!HasCoverImage(coverImage))
|
||||
{
|
||||
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)
|
||||
.FirstOrDefault()?.CoverImage;
|
||||
madeUpdate = true;
|
||||
}
|
||||
}
|
||||
series.CoverImage = firstCover?.CoverImage ?? coverImage;
|
||||
}
|
||||
|
||||
UpdateSeriesSummary(series, forceUpdate);
|
||||
return UpdateSeriesSummary(series, forceUpdate) || madeUpdate ;
|
||||
}
|
||||
|
||||
private void UpdateSeriesSummary(Series series, bool forceUpdate)
|
||||
private bool UpdateSeriesSummary(Series series, bool forceUpdate)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
|
||||
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return false;
|
||||
|
||||
var isBook = series.Library.Type == LibraryType.Book;
|
||||
var firstVolume = series.Volumes.FirstWithChapters(isBook);
|
||||
var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles();
|
||||
|
||||
var firstFile = firstChapter?.Files.FirstOrDefault();
|
||||
if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return;
|
||||
if (Parser.Parser.IsPdf(firstFile.FilePath)) return;
|
||||
if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return false;
|
||||
if (Parser.Parser.IsPdf(firstFile.FilePath)) return false;
|
||||
|
||||
var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath);
|
||||
if (string.IsNullOrEmpty(series.Summary))
|
||||
if (series.Format is MangaFormat.Archive or MangaFormat.Epub)
|
||||
{
|
||||
series.Summary = summary;
|
||||
var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath);
|
||||
if (!string.IsNullOrEmpty(series.Summary))
|
||||
{
|
||||
series.Summary = summary;
|
||||
firstFile.LastModified = DateTime.Now;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
firstFile.LastModified = DateTime.Now;
|
||||
firstFile.LastModified = DateTime.Now; // NOTE: Should I put this here as well since it might not have actually been parsed?
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -180,17 +196,19 @@ namespace API.Services
|
|||
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
|
||||
foreach (var series in library.Series)
|
||||
{
|
||||
var volumeUpdated = false;
|
||||
foreach (var volume in series.Volumes)
|
||||
{
|
||||
var chapterUpdated = false;
|
||||
foreach (var chapter in volume.Chapters)
|
||||
{
|
||||
UpdateMetadata(chapter, forceUpdate);
|
||||
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
|
||||
}
|
||||
|
||||
UpdateMetadata(volume, forceUpdate);
|
||||
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
|
||||
}
|
||||
|
||||
UpdateMetadata(series, forceUpdate);
|
||||
UpdateMetadata(series, volumeUpdated || forceUpdate);
|
||||
_unitOfWork.SeriesRepository.Update(series);
|
||||
}
|
||||
|
||||
|
|
@ -207,7 +225,7 @@ namespace API.Services
|
|||
/// </summary>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <param name="seriesId"></param>
|
||||
public void RefreshMetadataForSeries(int libraryId, int seriesId)
|
||||
public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
|
||||
|
|
@ -219,23 +237,26 @@ namespace API.Services
|
|||
return;
|
||||
}
|
||||
_logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name);
|
||||
var volumeUpdated = false;
|
||||
foreach (var volume in series.Volumes)
|
||||
{
|
||||
var chapterUpdated = false;
|
||||
foreach (var chapter in volume.Chapters)
|
||||
{
|
||||
UpdateMetadata(chapter, true);
|
||||
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
|
||||
}
|
||||
|
||||
UpdateMetadata(volume, true);
|
||||
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
|
||||
}
|
||||
|
||||
UpdateMetadata(series, true);
|
||||
UpdateMetadata(series, volumeUpdated || forceUpdate);
|
||||
_unitOfWork.SeriesRepository.Update(series);
|
||||
|
||||
|
||||
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
|
||||
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
|
||||
{
|
||||
_logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
|
||||
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.RefreshMetadataEvent(libraryId, seriesId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -141,10 +141,10 @@ namespace API.Services
|
|||
BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory));
|
||||
}
|
||||
|
||||
public void RefreshSeriesMetadata(int libraryId, int seriesId)
|
||||
public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
{
|
||||
_logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId);
|
||||
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId));
|
||||
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
|
||||
}
|
||||
|
||||
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,9 @@ using API.Interfaces;
|
|||
using API.Interfaces.Services;
|
||||
using API.Parser;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.SignalR;
|
||||
using Hangfire;
|
||||
using Microsoft.AspNetCore.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services.Tasks
|
||||
|
|
@ -27,10 +29,11 @@ namespace API.Services.Tasks
|
|||
private readonly IMetadataService _metadataService;
|
||||
private readonly IBookService _bookService;
|
||||
private readonly ICacheService _cacheService;
|
||||
private readonly IHubContext<MessageHub> _messageHub;
|
||||
private readonly NaturalSortComparer _naturalSort = new ();
|
||||
|
||||
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
|
||||
IMetadataService metadataService, IBookService bookService, ICacheService cacheService)
|
||||
IMetadataService metadataService, IBookService bookService, ICacheService cacheService, IHubContext<MessageHub> messageHub)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_logger = logger;
|
||||
|
|
@ -38,6 +41,7 @@ namespace API.Services.Tasks
|
|||
_metadataService = metadataService;
|
||||
_bookService = bookService;
|
||||
_cacheService = cacheService;
|
||||
_messageHub = messageHub;
|
||||
}
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 360)]
|
||||
|
|
@ -47,7 +51,7 @@ namespace API.Services.Tasks
|
|||
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
|
||||
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
|
||||
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId, seriesId);
|
||||
var dirs = FindHighestDirectoriesFromFiles(library, files);
|
||||
var dirs = DirectoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(f => f.Path), files.Select(f => f.FilePath).ToList());
|
||||
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{ seriesId });
|
||||
|
||||
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
|
||||
|
|
@ -63,6 +67,37 @@ namespace API.Services.Tasks
|
|||
parsedSeries.Remove(key);
|
||||
}
|
||||
|
||||
if (parsedSeries.Count == 0)
|
||||
{
|
||||
// We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely,
|
||||
// the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root
|
||||
// is the series folder.
|
||||
var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName));
|
||||
if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
|
||||
{
|
||||
dirs = new Dictionary<string, string>();
|
||||
var path = Path.GetPathRoot(existingFolder);
|
||||
if (!string.IsNullOrEmpty(path))
|
||||
{
|
||||
dirs[path] = string.Empty;
|
||||
}
|
||||
}
|
||||
_logger.LogDebug("{SeriesName} has bad naming convention, forcing rescan at a higher directory.", series.OriginalName);
|
||||
scanner = new ParseScannedFiles(_bookService, _logger);
|
||||
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
|
||||
totalFiles += totalFiles2;
|
||||
scanElapsedTime += scanElapsedTime2;
|
||||
|
||||
// If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
|
||||
// Hence we clear out anything but what we selected for
|
||||
firstSeries = library.Series.FirstOrDefault();
|
||||
keys = parsedSeries.Keys;
|
||||
foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
|
||||
{
|
||||
parsedSeries.Remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
var sw = new Stopwatch();
|
||||
UpdateLibrary(library, parsedSeries);
|
||||
|
||||
|
|
@ -74,8 +109,10 @@ namespace API.Services.Tasks
|
|||
totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
|
||||
|
||||
CleanupDbEntities();
|
||||
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId));
|
||||
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
|
||||
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
|
||||
// Tell UI that this series is done
|
||||
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId), cancellationToken: token);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
@ -83,54 +120,18 @@ namespace API.Services.Tasks
|
|||
"There was a critical error that resulted in a failed scan. Please check logs and rescan");
|
||||
await _unitOfWork.RollbackAsync();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds the highest directories from a set of MangaFiles
|
||||
/// </summary>
|
||||
/// <param name="library"></param>
|
||||
/// <param name="files"></param>
|
||||
/// <returns></returns>
|
||||
private static Dictionary<string, string> FindHighestDirectoriesFromFiles(Library library, IList<MangaFile> files)
|
||||
{
|
||||
var stopLookingForDirectories = false;
|
||||
var dirs = new Dictionary<string, string>();
|
||||
foreach (var folder in library.Folders)
|
||||
{
|
||||
if (stopLookingForDirectories) break;
|
||||
foreach (var file in files)
|
||||
{
|
||||
if (!file.FilePath.Contains(folder.Path)) continue;
|
||||
|
||||
var parts = DirectoryService.GetFoldersTillRoot(folder.Path, file.FilePath).ToList();
|
||||
if (parts.Count == 0)
|
||||
{
|
||||
// Break from all loops, we done, just scan folder.Path (library root)
|
||||
dirs.Add(folder.Path, string.Empty);
|
||||
stopLookingForDirectories = true;
|
||||
break;
|
||||
}
|
||||
|
||||
var fullPath = Path.Join(folder.Path, parts.Last());
|
||||
if (!dirs.ContainsKey(fullPath))
|
||||
{
|
||||
dirs.Add(fullPath, string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dirs;
|
||||
}
|
||||
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 360)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public void ScanLibraries()
|
||||
public async Task ScanLibraries()
|
||||
{
|
||||
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
|
||||
foreach (var lib in libraries)
|
||||
{
|
||||
ScanLibrary(lib.Id, false);
|
||||
await ScanLibrary(lib.Id, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -145,7 +146,7 @@ namespace API.Services.Tasks
|
|||
/// <param name="forceUpdate"></param>
|
||||
[DisableConcurrentExecution(360)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public void ScanLibrary(int libraryId, bool forceUpdate)
|
||||
public async Task ScanLibrary(int libraryId, bool forceUpdate)
|
||||
{
|
||||
Library library;
|
||||
try
|
||||
|
|
@ -188,6 +189,7 @@ namespace API.Services.Tasks
|
|||
CleanupAbandonedChapters();
|
||||
|
||||
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
|
||||
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibrary, MessageFactory.ScanLibraryEvent(libraryId, "complete"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
|||
|
|
@ -140,11 +140,7 @@ namespace API.Services.Tasks
|
|||
connections.AddRange(await _tracker.GetConnectionsForUser(admin));
|
||||
}
|
||||
|
||||
await _messageHub.Clients.Users(admins).SendAsync("UpdateAvailable", new SignalRMessage
|
||||
{
|
||||
Name = "UpdateAvailable",
|
||||
Body = update
|
||||
});
|
||||
await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateVersion, MessageFactory.UpdateVersionEvent(update));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
56
API/SignalR/MessageFactory.cs
Normal file
56
API/SignalR/MessageFactory.cs
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
using System.Threading;
|
||||
using API.DTOs.Update;
|
||||
|
||||
namespace API.SignalR
|
||||
{
|
||||
public static class MessageFactory
|
||||
{
|
||||
public static SignalRMessage ScanSeriesEvent(int seriesId)
|
||||
{
|
||||
return new SignalRMessage()
|
||||
{
|
||||
Name = SignalREvents.ScanSeries,
|
||||
Body = new
|
||||
{
|
||||
SeriesId = seriesId
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static SignalRMessage ScanLibraryEvent(int libraryId, string stage)
|
||||
{
|
||||
return new SignalRMessage()
|
||||
{
|
||||
Name = SignalREvents.ScanLibrary,
|
||||
Body = new
|
||||
{
|
||||
LibraryId = libraryId,
|
||||
Stage = stage
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static SignalRMessage RefreshMetadataEvent(int libraryId, int seriesId)
|
||||
{
|
||||
return new SignalRMessage()
|
||||
{
|
||||
Name = SignalREvents.RefreshMetadata,
|
||||
Body = new
|
||||
{
|
||||
SeriesId = seriesId,
|
||||
LibraryId = libraryId
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static SignalRMessage UpdateVersionEvent(UpdateNotificationDto update)
|
||||
{
|
||||
return new SignalRMessage
|
||||
{
|
||||
Name = SignalREvents.UpdateVersion,
|
||||
Body = update
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
11
API/SignalR/SignalREvents.cs
Normal file
11
API/SignalR/SignalREvents.cs
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
namespace API.SignalR
|
||||
{
|
||||
public static class SignalREvents
|
||||
{
|
||||
public const string UpdateVersion = "UpdateVersion";
|
||||
public const string ScanSeries = "ScanSeries";
|
||||
public const string RefreshMetadata = "RefreshMetadata";
|
||||
public const string ScanLibrary = "ScanLibrary";
|
||||
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue