A boatload of Bugs (#3704)

Co-authored-by: Amelia <77553571+Fesaa@users.noreply.github.com>
This commit is contained in:
Joe Milazzo 2025-04-05 15:52:01 -05:00 committed by GitHub
parent ea9b7ad0d1
commit 37734554ba
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
102 changed files with 2051 additions and 1115 deletions

View file

@ -85,11 +85,32 @@ public class BookService : IBookService
},
Epub2NcxReaderOptions = new Epub2NcxReaderOptions
{
IgnoreMissingContentForNavigationPoints = true
IgnoreMissingContentForNavigationPoints = false
},
SpineReaderOptions = new SpineReaderOptions
{
IgnoreMissingManifestItems = true
IgnoreMissingManifestItems = false
},
BookCoverReaderOptions = new BookCoverReaderOptions
{
Epub2MetadataIgnoreMissingManifestItem = false
}
};
public static readonly EpubReaderOptions LenientBookReaderOptions = new()
{
PackageReaderOptions = new PackageReaderOptions
{
IgnoreMissingToc = true,
SkipInvalidManifestItems = true,
},
Epub2NcxReaderOptions = new Epub2NcxReaderOptions
{
IgnoreMissingContentForNavigationPoints = false
},
SpineReaderOptions = new SpineReaderOptions
{
IgnoreMissingManifestItems = false
},
BookCoverReaderOptions = new BookCoverReaderOptions
{
@ -455,9 +476,12 @@ public class BookService : IBookService
private ComicInfo? GetEpubComicInfo(string filePath)
{
EpubBookRef? epubBook = null;
try
{
using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions);
epubBook = OpenEpubWithFallback(filePath, epubBook);
var publicationDate =
epubBook.Schema.Package.Metadata.Dates.Find(pDate => pDate.Event == "publication")?.Date;
@ -465,6 +489,7 @@ public class BookService : IBookService
{
publicationDate = epubBook.Schema.Package.Metadata.Dates.FirstOrDefault()?.Date;
}
var (year, month, day) = GetPublicationDate(publicationDate);
var summary = epubBook.Schema.Package.Metadata.Descriptions.FirstOrDefault();
@ -476,7 +501,8 @@ public class BookService : IBookService
Day = day,
Year = year,
Title = epubBook.Title,
Genre = string.Join(",", epubBook.Schema.Package.Metadata.Subjects.Select(s => s.Subject.ToLower().Trim())),
Genre = string.Join(",",
epubBook.Schema.Package.Metadata.Subjects.Select(s => s.Subject.ToLower().Trim())),
LanguageISO = ValidateLanguage(epubBook.Schema.Package.Metadata.Languages
.Select(l => l.Language)
.FirstOrDefault())
@ -487,7 +513,8 @@ public class BookService : IBookService
foreach (var identifier in epubBook.Schema.Package.Metadata.Identifiers)
{
if (string.IsNullOrEmpty(identifier.Identifier)) continue;
if (!string.IsNullOrEmpty(identifier.Scheme) && identifier.Scheme.Equals("ISBN", StringComparison.InvariantCultureIgnoreCase))
if (!string.IsNullOrEmpty(identifier.Scheme) &&
identifier.Scheme.Equals("ISBN", StringComparison.InvariantCultureIgnoreCase))
{
var isbn = identifier.Identifier.Replace("urn:isbn:", string.Empty).Replace("isbn:", string.Empty);
if (!ArticleNumberHelper.IsValidIsbn10(isbn) && !ArticleNumberHelper.IsValidIsbn13(isbn))
@ -495,11 +522,13 @@ public class BookService : IBookService
_logger.LogDebug("[BookService] {File} has invalid ISBN number", filePath);
continue;
}
info.Isbn = isbn;
}
if ((!string.IsNullOrEmpty(identifier.Scheme) && identifier.Scheme.Equals("URL", StringComparison.InvariantCultureIgnoreCase)) ||
identifier.Identifier.StartsWith("url:"))
if ((!string.IsNullOrEmpty(identifier.Scheme) &&
identifier.Scheme.Equals("URL", StringComparison.InvariantCultureIgnoreCase)) ||
identifier.Identifier.StartsWith("url:"))
{
var url = identifier.Identifier.Replace("url:", string.Empty);
weblinks.Add(url.Trim());
@ -529,6 +558,7 @@ public class BookService : IBookService
{
info.SeriesSort = metadataItem.Content;
}
break;
case "calibre:series_index":
info.Volume = metadataItem.Content;
@ -548,6 +578,7 @@ public class BookService : IBookService
{
info.SeriesSort = metadataItem.Content;
}
break;
case "collection-type":
// These look to be genres from https://manual.calibre-ebook.com/sub_groups.html or can be "series"
@ -578,7 +609,8 @@ public class BookService : IBookService
}
// If this is a single book and not a collection, set publication status to Completed
if (string.IsNullOrEmpty(info.Volume) && Parser.ParseVolume(filePath, LibraryType.Manga).Equals(Parser.LooseLeafVolume))
if (string.IsNullOrEmpty(info.Volume) &&
Parser.ParseVolume(filePath, LibraryType.Manga).Equals(Parser.LooseLeafVolume))
{
info.Count = 1;
}
@ -590,7 +622,8 @@ public class BookService : IBookService
var hasVolumeInSeries = !Parser.ParseVolume(info.Title, LibraryType.Manga)
.Equals(Parser.LooseLeafVolume);
if (string.IsNullOrEmpty(info.Volume) && hasVolumeInSeries && (!info.Series.Equals(info.Title) || string.IsNullOrEmpty(info.Series)))
if (string.IsNullOrEmpty(info.Volume) && hasVolumeInSeries &&
(!info.Series.Equals(info.Title) || string.IsNullOrEmpty(info.Series)))
{
// This is likely a light novel for which we can set series from parsed title
info.Series = Parser.ParseSeries(info.Title, LibraryType.Manga);
@ -601,14 +634,40 @@ public class BookService : IBookService
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[GetComicInfo] There was an exception parsing metadata");
_logger.LogWarning(ex, "[GetComicInfo] There was an exception parsing metadata: {FilePath}", filePath);
_mediaErrorService.ReportMediaIssue(filePath, MediaErrorProducer.BookService,
"There was an exception parsing metadata", ex);
}
finally
{
epubBook?.Dispose();
}
return null;
}
private EpubBookRef? OpenEpubWithFallback(string filePath, EpubBookRef? epubBook)
{
try
{
epubBook = EpubReader.OpenBook(filePath, BookReaderOptions);
}
catch (Exception ex)
{
_logger.LogWarning(ex,
"[GetComicInfo] There was an exception parsing metadata, falling back to a more lenient parsing method: {FilePath}",
filePath);
_mediaErrorService.ReportMediaIssue(filePath, MediaErrorProducer.BookService,
"There was an exception parsing metadata", ex);
}
finally
{
epubBook ??= EpubReader.OpenBook(filePath, LenientBookReaderOptions);
}
return epubBook;
}
public ComicInfo? GetComicInfo(string filePath)
{
if (!IsValidFile(filePath)) return null;
@ -765,7 +824,7 @@ public class BookService : IBookService
return docReader.GetPageCount();
}
using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions);
using var epubBook = EpubReader.OpenBook(filePath, LenientBookReaderOptions);
return epubBook.GetReadingOrder().Count;
}
catch (Exception ex)
@ -823,7 +882,7 @@ public class BookService : IBookService
try
{
using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions);
using var epubBook = EpubReader.OpenBook(filePath, LenientBookReaderOptions);
// <meta content="The Dark Tower" name="calibre:series"/>
// <meta content="Wolves of the Calla" name="calibre:title_sort"/>
@ -1027,7 +1086,7 @@ public class BookService : IBookService
/// <returns></returns>
public async Task<ICollection<BookChapterItem>> GenerateTableOfContents(Chapter chapter)
{
using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath, BookReaderOptions);
using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath, LenientBookReaderOptions);
var mappings = await CreateKeyToPageMappingAsync(book);
var navItems = await book.GetNavigationAsync();
@ -1155,7 +1214,7 @@ public class BookService : IBookService
/// <exception cref="KavitaException">All exceptions throw this</exception>
public async Task<string> GetBookPage(int page, int chapterId, string cachedEpubPath, string baseUrl)
{
using var book = await EpubReader.OpenBookAsync(cachedEpubPath, BookReaderOptions);
using var book = await EpubReader.OpenBookAsync(cachedEpubPath, LenientBookReaderOptions);
var mappings = await CreateKeyToPageMappingAsync(book);
var apiBase = baseUrl + "book/" + chapterId + "/" + BookApiUrl;
@ -1257,7 +1316,7 @@ public class BookService : IBookService
return GetPdfCoverImage(fileFilePath, fileName, outputDirectory, encodeFormat, size);
}
using var epubBook = EpubReader.OpenBook(fileFilePath, BookReaderOptions);
using var epubBook = EpubReader.OpenBook(fileFilePath, LenientBookReaderOptions);
try
{

View file

@ -17,6 +17,7 @@ using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using MimeKit;
using MimeTypes;
namespace API.Services;
#nullable enable
@ -355,9 +356,21 @@ public class EmailService : IEmailService
if (userEmailOptions.Attachments != null)
{
foreach (var attachment in userEmailOptions.Attachments)
foreach (var attachmentPath in userEmailOptions.Attachments)
{
await body.Attachments.AddAsync(attachment);
var mimeType = MimeTypeMap.GetMimeType(attachmentPath) ?? "application/octet-stream";
var mediaType = mimeType.Split('/')[0];
var mediaSubtype = mimeType.Split('/')[1];
var attachment = new MimePart(mediaType, mediaSubtype)
{
Content = new MimeContent(File.OpenRead(attachmentPath)),
ContentDisposition = new ContentDisposition(ContentDisposition.Attachment),
ContentTransferEncoding = ContentEncoding.Base64,
FileName = Path.GetFileName(attachmentPath)
};
body.Attachments.Add(attachment);
}
}

View file

@ -271,7 +271,7 @@ public class LocalizationService : ILocalizationService
// This could use a lookup table or follow a naming convention
try
{
var cultureInfo = new System.Globalization.CultureInfo(fileName);
var cultureInfo = new System.Globalization.CultureInfo(fileName.Replace('_', '-'));
return cultureInfo.NativeName;
}
catch

View file

@ -437,16 +437,24 @@ public class ExternalMetadataService : IExternalMetadataService
// Trim quotes if the response is a JSON string
errorMessage = errorMessage.Trim('"');
if (ex.StatusCode == 400 && errorMessage.Contains("Too many Requests"))
if (ex.StatusCode == 400)
{
_logger.LogInformation("Hit rate limit, will retry in 3 seconds");
await Task.Delay(3000);
if (errorMessage.Contains("Too many Requests"))
{
_logger.LogInformation("Hit rate limit, will retry in 3 seconds");
await Task.Delay(3000);
result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail")
.WithKavitaPlusHeaders(license, token)
.PostJsonAsync(data)
.ReceiveJson<
SeriesDetailPlusApiDto>();
result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail")
.WithKavitaPlusHeaders(license, token)
.PostJsonAsync(data)
.ReceiveJson<
SeriesDetailPlusApiDto>();
}
else if (errorMessage.Contains("Unknown Series"))
{
series.IsBlacklisted = true;
await _unitOfWork.CommitAsync();
}
}
}

View file

@ -956,6 +956,7 @@ public class ScrobblingService : IScrobblingService
// Recalculate the highest volume/chapter
foreach (var readEvt in readEvents)
{
// Note: this causes skewing in the scrobble history because it makes it look like there are duplicate events
readEvt.VolumeNumber =
(int) await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(readEvt.SeriesId,
readEvt.AppUser.Id);
@ -1027,7 +1028,7 @@ public class ScrobblingService : IScrobblingService
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
{
Comment = "AniList token has expired and needs rotating. Scrobbling wont work until then",
Details = $"User: {evt.AppUser.UserName}",
Details = $"User: {evt.AppUser.UserName}, Expired: {TokenService.GetTokenExpiry(evt.AppUser.AniListAccessToken)}",
LibraryId = evt.LibraryId,
SeriesId = evt.SeriesId
});
@ -1124,33 +1125,22 @@ public class ScrobblingService : IScrobblingService
private static bool CanProcessScrobbleEvent(ScrobbleEvent readEvent)
{
var userProviders = GetUserProviders(readEvent.AppUser);
if (readEvent.Series.Library.Type == LibraryType.Manga && MangaProviders.Intersect(userProviders).Any())
switch (readEvent.Series.Library.Type)
{
return true;
case LibraryType.Manga when MangaProviders.Intersect(userProviders).Any():
case LibraryType.Comic when
ComicProviders.Intersect(userProviders).Any():
case LibraryType.Book when
BookProviders.Intersect(userProviders).Any():
case LibraryType.LightNovel when
LightNovelProviders.Intersect(userProviders).Any():
return true;
default:
return false;
}
if (readEvent.Series.Library.Type == LibraryType.Comic &&
ComicProviders.Intersect(userProviders).Any())
{
return true;
}
if (readEvent.Series.Library.Type == LibraryType.Book &&
BookProviders.Intersect(userProviders).Any())
{
return true;
}
if (readEvent.Series.Library.Type == LibraryType.LightNovel &&
LightNovelProviders.Intersect(userProviders).Any())
{
return true;
}
return false;
}
private static IList<ScrobbleProvider> GetUserProviders(AppUser appUser)
private static List<ScrobbleProvider> GetUserProviders(AppUser appUser)
{
var providers = new List<ScrobbleProvider>();
if (!string.IsNullOrEmpty(appUser.AniListAccessToken)) providers.Add(ScrobbleProvider.AniList);
@ -1227,8 +1217,7 @@ public class ScrobblingService : IScrobblingService
public static string CreateUrl(string url, long? id)
{
if (id is null or 0) return string.Empty;
return $"{url}{id}/";
return id is null or 0 ? string.Empty : $"{url}{id}/";
}

View file

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
@ -11,6 +12,7 @@ using API.Helpers;
using API.SignalR;
using Kavita.Common;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
namespace API.Services;
@ -33,6 +35,9 @@ public interface IStreamService
Task<ExternalSourceDto> CreateExternalSource(int userId, ExternalSourceDto dto);
Task<ExternalSourceDto> UpdateExternalSource(int userId, ExternalSourceDto dto);
Task DeleteExternalSource(int userId, int externalSourceId);
Task DeleteSideNavSmartFilterStream(int userId, int sideNavStreamId);
Task DeleteDashboardSmartFilterStream(int userId, int dashboardStreamId);
Task RenameSmartFilterStreams(AppUserSmartFilter smartFilter);
}
public class StreamService : IStreamService
@ -40,12 +45,14 @@ public class StreamService : IStreamService
private readonly IUnitOfWork _unitOfWork;
private readonly IEventHub _eventHub;
private readonly ILocalizationService _localizationService;
private readonly ILogger<StreamService> _logger;
public StreamService(IUnitOfWork unitOfWork, IEventHub eventHub, ILocalizationService localizationService)
public StreamService(IUnitOfWork unitOfWork, IEventHub eventHub, ILocalizationService localizationService, ILogger<StreamService> logger)
{
_unitOfWork = unitOfWork;
_eventHub = eventHub;
_localizationService = localizationService;
_logger = logger;
}
public async Task<IEnumerable<DashboardStreamDto>> GetDashboardStreams(int userId, bool visibleOnly = true)
@ -91,6 +98,7 @@ public class StreamService : IStreamService
var ret = new DashboardStreamDto()
{
Id = createdStream.Id,
Name = createdStream.Name,
IsProvided = createdStream.IsProvided,
Visible = createdStream.Visible,
@ -182,6 +190,7 @@ public class StreamService : IStreamService
var ret = new SideNavStreamDto()
{
Id = createdStream.Id,
Name = createdStream.Name,
IsProvided = createdStream.IsProvided,
Visible = createdStream.Visible,
@ -344,4 +353,72 @@ public class StreamService : IStreamService
await _unitOfWork.CommitAsync();
}
public async Task DeleteSideNavSmartFilterStream(int userId, int sideNavStreamId)
{
try
{
var stream = await _unitOfWork.UserRepository.GetSideNavStream(sideNavStreamId);
if (stream == null) throw new KavitaException("sidenav-stream-doesnt-exist");
if (stream.AppUserId != userId) throw new KavitaException("sidenav-stream-doesnt-exist");
if (stream.StreamType != SideNavStreamType.SmartFilter)
{
throw new KavitaException("sidenav-stream-only-delete-smart-filter");
}
_unitOfWork.UserRepository.Delete(stream);
await _unitOfWork.CommitAsync();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception deleting SideNav Smart Filter Stream: {FilterId}", sideNavStreamId);
throw;
}
}
public async Task DeleteDashboardSmartFilterStream(int userId, int dashboardStreamId)
{
try
{
var stream = await _unitOfWork.UserRepository.GetDashboardStream(dashboardStreamId);
if (stream == null) throw new KavitaException("dashboard-stream-doesnt-exist");
if (stream.AppUserId != userId) throw new KavitaException("dashboard-stream-doesnt-exist");
if (stream.StreamType != DashboardStreamType.SmartFilter)
{
throw new KavitaException("dashboard-stream-only-delete-smart-filter");
}
_unitOfWork.UserRepository.Delete(stream);
await _unitOfWork.CommitAsync();
} catch (Exception ex)
{
_logger.LogError(ex, "There was an exception deleting Dashboard Smart Filter Stream: {FilterId}", dashboardStreamId);
throw;
}
}
public async Task RenameSmartFilterStreams(AppUserSmartFilter smartFilter)
{
var sideNavStreams = await _unitOfWork.UserRepository.GetSideNavStreamWithFilter(smartFilter.Id);
var dashboardStreams = await _unitOfWork.UserRepository.GetDashboardStreamWithFilter(smartFilter.Id);
foreach (var sideNavStream in sideNavStreams)
{
sideNavStream.Name = smartFilter.Name;
}
foreach (var dashboardStream in dashboardStreams)
{
dashboardStream.Name = smartFilter.Name;
}
await _unitOfWork.CommitAsync();
}
}

View file

@ -329,7 +329,7 @@ public class TaskScheduler : ITaskScheduler
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, normalizedOriginal]) ||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, string.Empty]))
{
_logger.LogDebug("Skipped scheduling ScanFolder for {Folder} as a job already queued",
_logger.LogTrace("Skipped scheduling ScanFolder for {Folder} as a job already queued",
normalizedFolder);
return;
}
@ -346,7 +346,7 @@ public class TaskScheduler : ITaskScheduler
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, string.Empty]))
{
_logger.LogDebug("Skipped scheduling ScanFolder for {Folder} as a job already queued",
_logger.LogTrace("Skipped scheduling ScanFolder for {Folder} as a job already queued",
normalizedFolder);
return;
}

View file

@ -8,8 +8,10 @@ using API.DTOs.Filtering;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
using API.Services.Tasks.Scanner.Parser;
using API.SignalR;
using Hangfire;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks;
@ -35,6 +37,9 @@ public interface ICleanupService
Task CleanupWantToRead();
Task ConsolidateProgress();
Task CleanupMediaErrors();
}
/// <summary>
/// Cleans up after operations on reoccurring basis
@ -88,9 +93,11 @@ public class CleanupService : ICleanupService
await CleanupBackups();
await SendProgress(0.35F, "Consolidating Progress Events");
_logger.LogInformation("Consolidating Progress Events");
await ConsolidateProgress();
await SendProgress(0.4F, "Consolidating Media Errors");
await CleanupMediaErrors();
await SendProgress(0.50F, "Cleaning deleted cover images");
_logger.LogInformation("Cleaning deleted cover images");
await DeleteSeriesCoverImages();
@ -241,6 +248,7 @@ public class CleanupService : ICleanupService
/// </summary>
public async Task ConsolidateProgress()
{
_logger.LogInformation("Consolidating Progress Events");
// AppUserProgress
var allProgress = await _unitOfWork.AppUserProgressRepository.GetAllProgress();
@ -291,6 +299,52 @@ public class CleanupService : ICleanupService
await _unitOfWork.CommitAsync();
}
/// <summary>
/// Scans through Media Error and removes any entries that have been fixed and are within the DB (proper files where wordcount/pagecount > 0)
/// </summary>
public async Task CleanupMediaErrors()
{
try
{
List<string> errorStrings = ["This archive cannot be read or not supported", "File format not supported"];
var mediaErrors = await _unitOfWork.MediaErrorRepository.GetAllErrorsAsync(errorStrings);
_logger.LogInformation("Beginning consolidation of {Count} Media Errors", mediaErrors.Count);
var pathToErrorMap = mediaErrors
.GroupBy(me => Parser.NormalizePath(me.FilePath))
.ToDictionary(
group => group.Key,
group => group.ToList() // The same file can be duplicated (rare issue when network drives die out midscan)
);
var normalizedPaths = pathToErrorMap.Keys.ToList();
// Find all files that are valid
var validFiles = await _unitOfWork.DataContext.MangaFile
.Where(f => normalizedPaths.Contains(f.FilePath) && f.Pages > 0)
.Select(f => f.FilePath)
.ToListAsync();
var removalCount = 0;
foreach (var validFilePath in validFiles)
{
if (!pathToErrorMap.TryGetValue(validFilePath, out var mediaError)) continue;
_unitOfWork.MediaErrorRepository.Remove(mediaError);
removalCount++;
}
await _unitOfWork.CommitAsync();
_logger.LogInformation("Finished consolidation of {Count} Media Errors, Removed: {RemovalCount}",
mediaErrors.Count, removalCount);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception consolidating media errors");
}
}
public async Task CleanupLogs()
{
_logger.LogInformation("Performing cleanup of logs directory");

View file

@ -179,7 +179,7 @@ public class WordCountAnalyzerService : IWordCountAnalyzerService
var pageCounter = 1;
try
{
using var book = await EpubReader.OpenBookAsync(filePath, BookService.BookReaderOptions);
using var book = await EpubReader.OpenBookAsync(filePath, BookService.LenientBookReaderOptions);
var totalPages = book.Content.Html.Local;
foreach (var bookPage in totalPages)

View file

@ -130,9 +130,9 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
}
// Patch is SeriesSort from ComicInfo
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
if (!string.IsNullOrEmpty(info.ComicInfo.SeriesSort))
{
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
info.SeriesSort = info.ComicInfo.SeriesSort.Trim();
}
}

View file

@ -167,7 +167,7 @@ public static partial class Parser
MatchOptions, RegexTimeout),
// Korean Volume: 제n화|권|회|장 -> Volume n, n화|권|회|장 -> Volume n, 63권#200.zip -> Volume 63 (no chapter, #200 is just files inside)
new Regex(
@"제?(?<Volume>\d+(\.\d)?)(권|회|화|장)",
@"제?(?<Volume>\d+(\.\d+)?)(권|회|화|장)",
MatchOptions, RegexTimeout),
// Korean Season: 시즌n -> Season n,
new Regex(

View file

@ -161,7 +161,7 @@ public class ScannerService : IScannerService
{
if (TaskScheduler.HasScanTaskRunningForSeries(series.Id))
{
_logger.LogDebug("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
_logger.LogTrace("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
return;
}
@ -186,7 +186,7 @@ public class ScannerService : IScannerService
{
if (TaskScheduler.HasScanTaskRunningForLibrary(library.Id))
{
_logger.LogDebug("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this library. Dropping request", folder);
_logger.LogTrace("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this library. Dropping request", folder);
return;
}
BackgroundJob.Schedule(() => ScanLibrary(library.Id, false, true), TimeSpan.FromMinutes(1));

View file

@ -162,4 +162,10 @@ public class TokenService : ITokenService
{
return !JwtHelper.IsTokenValid(token);
}
public static DateTime GetTokenExpiry(string? token)
{
return JwtHelper.GetTokenExpiry(token);
}
}