Reader Bugs + New Features (#1536)

* Updated a typo in manage tasks of Reoccuring -> Recurring

* Fixed a bug in MinimumNumberFromRange where a regex wasn't properly constructed which could skew results.

* Fixed a bug where Volume numbers that were a float wouldn't render correctly in the manga reader menu.

* Added the ability to double click on the image to bookmark it. Optimized the bookmark and unbookmark flows to remove 2 DB calls and reworked some flow of calls to speed it up.

Fixed some logic where when using double (manga) flow, both of the images wouldn't show the bookmark effect, despite both of them being saved. Likewise, fixed a bug where both images weren't updating UI state, so switching from double (manga) to single, the second image wouldn't show as bookmarked without a refresh.

* Double click works perfectly for bookmarking

* Collection cover image chooser will now prompt with all series covers by default.

Reset button is now moved up to the first slot if applicable.

* When a Completed series is fully read by a user, a nightly task will now remove that series from their Want to Read list.

* Added ability to trigger Want to Read cleanup from Tasks page.

* Moved the brightness readout to the label line and fixed a bootstrap migration bug where small buttons weren't actually small.

* Implemented ability to filter against release year (min or max or both).

* Fixed a log message that wasn't properly formatted when scan finished an no files changes.

* Cleaned up some code and merged some methods

* Implemented sort by Release year metadata filter.

* Fixed the code that finds ComicInfo.xml inside archives to only check the root and check explicitly for casing, so it must be ComicInfo.xml.

* Dependency updates

* Refactored some strings into consts and used TriggerJob rather than just enqueuing

* Fixed the prefetcher which wasn't properly loading in the correct order as it was designed.

* Cleaned up all traces of CircularArray from MangaReader

* Removed a debug code

* Fixed a bug with webtoon reader in fullscreen mode where continuous reader wouldn't trigger

* When cleaning up series from users' want to read lists, include both completed and cancelled.

* Fixed a bug where small images wouldn't have the pagination area extend to the bottom on manga reader

* Added a new method for hashing during prod builds and ensure we always use aot

* Fixed a bug where the save button wouldn't enable when color change occured.

* Cleaned up some issues in one of contributor's PR.
This commit is contained in:
Joseph Milazzo 2022-09-16 08:06:33 -05:00 committed by GitHub
parent 52c10510b2
commit 9cf4cf742b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
49 changed files with 408 additions and 221 deletions

View file

@ -19,7 +19,7 @@ public interface IAccountService
Task<IEnumerable<ApiException>> ValidateUsername(string username);
Task<IEnumerable<ApiException>> ValidateEmail(string email);
Task<bool> HasBookmarkPermission(AppUser user);
Task<bool> HasDownloadPermission(AppUser appuser);
Task<bool> HasDownloadPermission(AppUser user);
}
public class AccountService : IAccountService

View file

@ -44,7 +44,7 @@ public class ArchiveService : IArchiveService
private readonly ILogger<ArchiveService> _logger;
private readonly IDirectoryService _directoryService;
private readonly IImageService _imageService;
private const string ComicInfoFilename = "comicinfo";
private const string ComicInfoFilename = "ComicInfo.xml";
public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService, IImageService imageService)
{
@ -332,9 +332,8 @@ public class ArchiveService : IArchiveService
{
var filenameWithoutExtension = Path.GetFileNameWithoutExtension(name).ToLower();
return !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(fullName)
&& filenameWithoutExtension.Equals(ComicInfoFilename, StringComparison.InvariantCultureIgnoreCase)
&& !filenameWithoutExtension.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith)
&& Tasks.Scanner.Parser.Parser.IsXml(name);
&& fullName.Equals(ComicInfoFilename)
&& !filenameWithoutExtension.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith);
}
/// <summary>

View file

@ -561,8 +561,6 @@ public class BookService : IBookService
var seriesIndex = string.Empty;
var series = string.Empty;
var specialName = string.Empty;
var groupPosition = string.Empty;
var titleSort = string.Empty;
foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems)
@ -578,7 +576,6 @@ public class BookService : IBookService
break;
case "calibre:title_sort":
specialName = metadataItem.Content;
titleSort = metadataItem.Content;
break;
}
@ -592,7 +589,7 @@ public class BookService : IBookService
series = metadataItem.Content;
break;
case "collection-type":
groupPosition = metadataItem.Content;
// These look to be genres from https://manual.calibre-ebook.com/sub_groups.html
break;
}
}
@ -965,7 +962,7 @@ public class BookService : IBookService
}
catch (Exception)
{
/* Swallow exception. Some css doesn't have style rules ending in ; */
/* Swallow exception. Some css don't have style rules ending in ; */
}
body = Regex.Replace(body, @"([\s:]0)(px|pt|%|em)", "$1");

View file

@ -79,15 +79,14 @@ public class BookmarkService : IBookmarkService
/// <returns>If the save to DB and copy was successful</returns>
public async Task<bool> BookmarkPage(AppUser userWithBookmarks, BookmarkDto bookmarkDto, string imageToBookmark)
{
if (userWithBookmarks == null || userWithBookmarks.Bookmarks == null) return false;
try
{
var userBookmark =
await _unitOfWork.UserRepository.GetBookmarkForPage(bookmarkDto.Page, bookmarkDto.ChapterId, userWithBookmarks.Id);
var userBookmark = userWithBookmarks.Bookmarks.SingleOrDefault(b => b.Page == bookmarkDto.Page && b.ChapterId == bookmarkDto.ChapterId);
if (userBookmark != null)
{
_logger.LogError("Bookmark already exists for Series {SeriesId}, Volume {VolumeId}, Chapter {ChapterId}, Page {PageNum}", bookmarkDto.SeriesId, bookmarkDto.VolumeId, bookmarkDto.ChapterId, bookmarkDto.Page);
return false;
return true;
}
var fileInfo = _directoryService.FileSystem.FileInfo.FromFileName(imageToBookmark);
@ -101,14 +100,13 @@ public class BookmarkService : IBookmarkService
VolumeId = bookmarkDto.VolumeId,
SeriesId = bookmarkDto.SeriesId,
ChapterId = bookmarkDto.ChapterId,
FileName = Path.Join(targetFolderStem, fileInfo.Name)
FileName = Path.Join(targetFolderStem, fileInfo.Name),
AppUserId = userWithBookmarks.Id
};
_directoryService.CopyFileToDirectory(imageToBookmark, targetFilepath);
userWithBookmarks.Bookmarks ??= new List<AppUserBookmark>();
userWithBookmarks.Bookmarks.Add(bookmark);
_unitOfWork.UserRepository.Update(userWithBookmarks);
_unitOfWork.UserRepository.Add(bookmark);
await _unitOfWork.CommitAsync();
if (settings.ConvertBookmarkToWebP)
@ -136,15 +134,12 @@ public class BookmarkService : IBookmarkService
public async Task<bool> RemoveBookmarkPage(AppUser userWithBookmarks, BookmarkDto bookmarkDto)
{
if (userWithBookmarks.Bookmarks == null) return true;
var bookmarkToDelete = userWithBookmarks.Bookmarks.SingleOrDefault(x =>
x.ChapterId == bookmarkDto.ChapterId && x.Page == bookmarkDto.Page);
try
{
var bookmarkToDelete = userWithBookmarks.Bookmarks.SingleOrDefault(x =>
x.ChapterId == bookmarkDto.ChapterId && x.AppUserId == userWithBookmarks.Id && x.Page == bookmarkDto.Page &&
x.SeriesId == bookmarkDto.SeriesId);
if (bookmarkToDelete != null)
{
await DeleteBookmarkFiles(new[] {bookmarkToDelete});
_unitOfWork.UserRepository.Delete(bookmarkToDelete);
}
@ -152,10 +147,10 @@ public class BookmarkService : IBookmarkService
}
catch (Exception)
{
await _unitOfWork.RollbackAsync();
return false;
}
await DeleteBookmarkFiles(new[] {bookmarkToDelete});
return true;
}

View file

@ -368,15 +368,22 @@ public class DirectoryService : IDirectoryService
{
var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath);
if (!di.Exists) return;
try
{
foreach (var file in di.EnumerateFiles())
{
file.Delete();
}
foreach (var dir in di.EnumerateDirectories())
{
dir.Delete(true);
}
}
catch (UnauthorizedAccessException ex)
{
_logger.LogError(ex, "[ClearDirectory] Could not delete {DirectoryPath} due to permission issue", directoryPath);
}
foreach (var file in di.EnumerateFiles())
{
file.Delete();
}
foreach (var dir in di.EnumerateDirectories())
{
dir.Delete(true);
}
}
/// <summary>

View file

@ -128,7 +128,7 @@ public class ImageService : IImageService
return true;
}
catch (Exception ex)
catch (Exception)
{
/* Swallow Exception */
}

View file

@ -25,6 +25,7 @@ public interface IReaderService
Task MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable<Chapter> chapters);
Task<bool> SaveReadingProgress(ProgressDto progressDto, int userId);
Task<int> CapPageToChapter(int chapterId, int page);
int CapPageToChapter(Chapter chapter, int page);
Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId);
Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId);
Task<ChapterDto> GetContinuePoint(int seriesId, int userId);
@ -273,6 +274,21 @@ public class ReaderService : IReaderService
return page;
}
public int CapPageToChapter(Chapter chapter, int page)
{
if (page > chapter.Pages)
{
page = chapter.Pages;
}
if (page < 0)
{
page = 0;
}
return page;
}
/// <summary>
/// Tries to find the next logical Chapter
/// </summary>

View file

@ -1,14 +1,13 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Entities.Enums;
using API.Helpers.Converters;
using API.Services.Tasks;
using API.Services.Tasks.Metadata;
using API.Services.Tasks.Scanner;
using Hangfire;
using Microsoft.Extensions.Logging;
@ -49,9 +48,13 @@ public class TaskScheduler : ITaskScheduler
public static BackgroundJobServer Client => new BackgroundJobServer();
public const string ScanQueue = "scan";
public const string DefaultQueue = "default";
public const string RemoveFromWantToReadTaskId = "remove-from-want-to-read";
public const string CleanupDbTaskId = "cleanup-db";
public const string CleanupTaskId = "cleanup";
public const string BackupTaskId = "backup";
public const string ScanLibrariesTaskId = "scan-libraries";
public static readonly IList<string> ScanTasks = new List<string>()
{"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"};
private static readonly ImmutableArray<string> ScanTasks = ImmutableArray.Create("ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries");
private static readonly Random Rnd = new Random();
@ -83,27 +86,28 @@ public class TaskScheduler : ITaskScheduler
{
var scanLibrarySetting = setting;
_logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting);
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(),
RecurringJob.AddOrUpdate(ScanLibrariesTaskId, () => _scannerService.ScanLibraries(),
() => CronConverter.ConvertToCronNotation(scanLibrarySetting), TimeZoneInfo.Local);
}
else
{
RecurringJob.AddOrUpdate("scan-libraries", () => ScanLibraries(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate(ScanLibrariesTaskId, () => ScanLibraries(), Cron.Daily, TimeZoneInfo.Local);
}
setting = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Value;
if (setting != null)
{
_logger.LogDebug("Scheduling Backup Task for {Setting}", setting);
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local);
RecurringJob.AddOrUpdate(BackupTaskId, () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local);
}
else
{
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate(BackupTaskId, () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local);
}
RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate("cleanup-db", () => _cleanupService.CleanupDbEntries(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate(CleanupTaskId, () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate(CleanupDbTaskId, () => _cleanupService.CleanupDbEntries(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate(RemoveFromWantToReadTaskId, () => _cleanupService.CleanupWantToRead(), Cron.Daily, TimeZoneInfo.Local);
}
#region StatsTasks
@ -154,7 +158,6 @@ public class TaskScheduler : ITaskScheduler
BackgroundJob.Enqueue(() => _themeService.Scan());
}
#endregion
#region UpdateTasks

View file

@ -1,9 +1,14 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs.Filtering;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
using API.SignalR;
using Hangfire;
using Microsoft.AspNetCore.SignalR;
@ -21,6 +26,11 @@ public interface ICleanupService
Task DeleteTagCoverImages();
Task CleanupBackups();
void CleanupTemp();
/// <summary>
/// Responsible to remove Series from Want To Read when user's have fully read the series and the series has Publication Status of Completed or Cancelled.
/// </summary>
/// <returns></returns>
Task CleanupWantToRead();
}
/// <summary>
/// Cleans up after operations on reoccurring basis
@ -195,4 +205,43 @@ public class CleanupService : ICleanupService
_logger.LogInformation("Temp directory purged");
}
public async Task CleanupWantToRead()
{
_logger.LogInformation("Performing cleanup of Series that are Completed and have been fully read that are in Want To Read list");
var libraryIds = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).Select(l => l.Id).ToList();
var filter = new FilterDto()
{
PublicationStatus = new List<PublicationStatus>()
{
PublicationStatus.Completed,
PublicationStatus.Cancelled
},
Libraries = libraryIds,
ReadStatus = new ReadStatus()
{
Read = true,
InProgress = false,
NotRead = false
}
};
foreach (var user in await _unitOfWork.UserRepository.GetAllUsersAsync(AppUserIncludes.WantToRead))
{
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(0, user.Id, new UserParams(), filter);
var seriesIds = series.Select(s => s.Id).ToList();
if (seriesIds.Count == 0) continue;
user.WantToRead ??= new List<Series>();
user.WantToRead = user.WantToRead.Where(s => !seriesIds.Contains(s.Id)).ToList();
_unitOfWork.UserRepository.Update(user);
}
if (_unitOfWork.HasChanges())
{
await _unitOfWork.CommitAsync();
}
_logger.LogInformation("Performing cleanup of Series that are Completed and have been fully read that are in Want To Read list, completed");
}
}

View file

@ -215,6 +215,10 @@ public class ParseScannedFiles
/// <param name="libraryType"></param>
/// <param name="folders"></param>
/// <param name="libraryName"></param>
/// <param name="isLibraryScan">If true, does a directory scan first (resulting in folders being tackled in parallel), else does an immediate scan files</param>
/// <param name="seriesPaths">A map of Series names -> existing folder paths to handle skipping folders</param>
/// <param name="processSeriesInfos">Action which returns if the folder was skipped and the infos from said folder</param>
/// <param name="forceCheck">Defaults to false</param>
/// <returns></returns>
public async Task ScanLibrariesForSeries(LibraryType libraryType,
IEnumerable<string> folders, string libraryName, bool isLibraryScan,

View file

@ -1029,7 +1029,7 @@ public static class Parser
{
try
{
if (!Regex.IsMatch(range, @"^[\d-.]+$"))
if (!Regex.IsMatch(range, @"^[\d\-.]+$"))
{
return (float) 0.0;
}
@ -1047,7 +1047,7 @@ public static class Parser
{
try
{
if (!Regex.IsMatch(range, @"^[\d-.]+$"))
if (!Regex.IsMatch(range, @"^[\d\-.]+$"))
{
return (float) 0.0;
}

View file

@ -210,13 +210,13 @@ public class ProcessSeries : IProcessSeries
if (!library.Folders.Select(f => f.Path).Contains(seriesDirs.Keys.First()))
{
series.FolderPath = Parser.Parser.NormalizePath(seriesDirs.Keys.First());
_logger.LogDebug("Updating {Series} FolderPath to {FolderPath}", series.Name, series.FolderPath);
}
}
}
public void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false)
{
//BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForSeries(libraryId, seriesId, forceUpdate));
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(libraryId, seriesId, forceUpdate));
}

View file

@ -25,6 +25,7 @@ public interface IScannerService
/// cover images if forceUpdate is true.
/// </summary>
/// <param name="libraryId">Library to scan against</param>
/// <param name="forceUpdate">Don't perform optimization checks, defaults to false</param>
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
@ -396,6 +397,7 @@ public class ScannerService : IScannerService
/// ie) all entities will be rechecked for new cover images and comicInfo.xml changes
/// </summary>
/// <param name="libraryId"></param>
/// <param name="forceUpdate">Defaults to false</param>
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
@ -484,7 +486,7 @@ public class ScannerService : IScannerService
{
_logger.LogInformation(
"[ScannerService] Finished library scan of {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}. There were no changes",
totalFiles, seenSeries.Count, sw.ElapsedMilliseconds, library.Name);
seenSeries.Count, sw.ElapsedMilliseconds, library.Name);
}
else
{