Release Shakeout 3 (#1597)

* Fixed a bug where bulk selection on series detail wouldn't allow you to select the whole card, only the checkbox.

* Refactored the implementation of MarkChaptersAsRead to streamline it.

* Fixed a bug where volume cards weren't properly updating their read state based on events from backend.

* Added [ScannerService] to more loggers

* Fixed invite user flow

* Fixed broken edit user flow

* Fixed calling device service on unauthenticated screens causing redirection

* Fixed reset password via email not working when success message was sent back

* Fixed broken white theme on book reader

* Small tweaks to white theme

* More fixes

* Adjusted AutomaticRetries
This commit is contained in:
Joe Milazzo 2022-10-20 16:39:42 -07:00 committed by GitHub
parent b396217e7d
commit dbe1152d87
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 224 additions and 66 deletions

View file

@ -21,8 +21,8 @@ public interface IReaderService
{
Task MarkSeriesAsRead(AppUser user, int seriesId);
Task MarkSeriesAsUnread(AppUser user, int seriesId);
Task MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable<Chapter> chapters);
Task MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable<Chapter> chapters);
Task MarkChaptersAsRead(AppUser user, int seriesId, IList<Chapter> chapters);
Task MarkChaptersAsUnread(AppUser user, int seriesId, IList<Chapter> chapters);
Task<bool> SaveReadingProgress(ProgressDto progressDto, int userId);
Task<int> CapPageToChapter(int chapterId, int page);
int CapPageToChapter(Chapter chapter, int page);
@ -76,8 +76,6 @@ public class ReaderService : IReaderService
{
await MarkChaptersAsRead(user, seriesId, volume.Chapters);
}
_unitOfWork.UserRepository.Update(user);
}
/// <summary>
@ -93,18 +91,18 @@ public class ReaderService : IReaderService
{
await MarkChaptersAsUnread(user, seriesId, volume.Chapters);
}
_unitOfWork.UserRepository.Update(user);
}
/// <summary>
/// Marks all Chapters as Read by creating or updating UserProgress rows. Does not commit.
/// </summary>
/// <remarks>Emits events to the UI for each chapter progress and one for each volume progress</remarks>
/// <param name="user"></param>
/// <param name="seriesId"></param>
/// <param name="chapters"></param>
public async Task MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable<Chapter> chapters)
public async Task MarkChaptersAsRead(AppUser user, int seriesId, IList<Chapter> chapters)
{
var seenVolume = new Dictionary<int, bool>();
foreach (var chapter in chapters)
{
var userProgress = GetUserProgressForChapter(user, chapter);
@ -118,19 +116,29 @@ public class ReaderService : IReaderService
SeriesId = seriesId,
ChapterId = chapter.Id
});
await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate,
MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId, chapter.VolumeId, chapter.Id, chapter.Pages));
}
else
{
userProgress.PagesRead = chapter.Pages;
userProgress.SeriesId = seriesId;
userProgress.VolumeId = chapter.VolumeId;
await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate,
MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, userProgress.SeriesId, userProgress.VolumeId, userProgress.ChapterId, chapter.Pages));
}
await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate,
MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId, chapter.VolumeId, chapter.Id, chapter.Pages));
// Send out volume events for each distinct volume
if (!seenVolume.ContainsKey(chapter.VolumeId))
{
seenVolume[chapter.VolumeId] = true;
await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate,
MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId,
chapter.VolumeId, 0, chapters.Where(c => c.VolumeId == chapter.VolumeId).Sum(c => c.Pages)));
}
}
_unitOfWork.UserRepository.Update(user);
}
/// <summary>
@ -139,8 +147,9 @@ public class ReaderService : IReaderService
/// <param name="user"></param>
/// <param name="seriesId"></param>
/// <param name="chapters"></param>
public async Task MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable<Chapter> chapters)
public async Task MarkChaptersAsUnread(AppUser user, int seriesId, IList<Chapter> chapters)
{
var seenVolume = new Dictionary<int, bool>();
foreach (var chapter in chapters)
{
var userProgress = GetUserProgressForChapter(user, chapter);
@ -153,7 +162,17 @@ public class ReaderService : IReaderService
await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate,
MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, userProgress.SeriesId, userProgress.VolumeId, userProgress.ChapterId, 0));
// Send out volume events for each distinct volume
if (!seenVolume.ContainsKey(chapter.VolumeId))
{
seenVolume[chapter.VolumeId] = true;
await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate,
MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId,
chapter.VolumeId, 0, 0));
}
}
_unitOfWork.UserRepository.Update(user);
}
/// <summary>
@ -526,7 +545,7 @@ public class ReaderService : IReaderService
var chapters = volume.Chapters
.OrderBy(c => float.Parse(c.Number))
.Where(c => !c.IsSpecial && Tasks.Scanner.Parser.Parser.MaxNumberFromRange(c.Range) <= chapterNumber);
await MarkChaptersAsRead(user, volume.SeriesId, chapters);
await MarkChaptersAsRead(user, volume.SeriesId, chapters.ToList());
}
}

View file

@ -150,7 +150,7 @@ public class ParseScannedFiles
catch (Exception ex)
{
_logger.LogError(ex,
"There was an error trying to find and apply .kavitaignores above the Series Folder. Scanning without them present");
"[ScannerService] There was an error trying to find and apply .kavitaignores above the Series Folder. Scanning without them present");
}
return seriesMatcher;
@ -200,13 +200,13 @@ public class ParseScannedFiles
}
catch (Exception ex)
{
_logger.LogCritical(ex, "{SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series);
_logger.LogCritical(ex, "[ScannerService] {SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series);
foreach (var seriesKey in scannedSeries.Keys.Where(ps =>
ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries)
|| ps.NormalizedName.Equals(normalizedLocalizedSeries)
|| ps.NormalizedName.Equals(normalizedSortSeries))))
{
_logger.LogCritical("Matches: {SeriesName} matches on {SeriesKey}", info.Series, seriesKey.Name);
_logger.LogCritical("[ScannerService] Matches: {SeriesName} matches on {SeriesKey}", info.Series, seriesKey.Name);
}
}
}
@ -240,14 +240,14 @@ public class ParseScannedFiles
}
catch (Exception ex)
{
_logger.LogCritical(ex, "Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath);
_logger.LogCritical(ex, "[ScannerService] Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath);
var values = scannedSeries.Where(p =>
(Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries ||
Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedLocalSeries) &&
p.Key.Format == info.Format);
foreach (var pair in values)
{
_logger.LogCritical("Duplicate Series in DB matches with {SeriesName}: {DuplicateName}", info.Series, pair.Key.Name);
_logger.LogCritical("[ScannerService] Duplicate Series in DB matches with {SeriesName}: {DuplicateName}", info.Series, pair.Key.Name);
}
}
@ -285,11 +285,11 @@ public class ParseScannedFiles
Format = fp.Format,
}).ToList();
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
_logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
return;
}
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(folder, libraryName, ProgressEventType.Updated));
if (files.Count == 0)
@ -316,7 +316,7 @@ public class ParseScannedFiles
catch (Exception ex)
{
_logger.LogError(ex,
"There was an exception that occurred during tracking {FilePath}. Skipping this file",
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
info.FullFilePath);
}
}
@ -339,7 +339,7 @@ public class ParseScannedFiles
}
catch (ArgumentException ex)
{
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath);
_logger.LogError(ex, "[ScannerService] The directory '{FolderPath}' does not exist", folderPath);
}
}

View file

@ -29,12 +29,12 @@ public interface IScannerService
/// <param name="forceUpdate">Don't perform optimization checks, defaults to false</param>
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanLibrary(int libraryId, bool forceUpdate = false);
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ScanLibraries();
[Queue(TaskScheduler.ScanQueue)]
@ -407,7 +407,7 @@ public class ScannerService : IScannerService
[Queue(TaskScheduler.ScanQueue)]
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanLibraries()
{
_logger.LogInformation("Starting Scan of All Libraries");