Time Estimation Cleanup (#1301)
* Moved the calculation for time to read to the backend. Tweaked some logic around showing est time to complete. * Added debug logging to help pinpoint a duplicate issue in Kavita. * More combination logic is error checked in a special way for Robbie to reproduce an issue. * Migrated chapter detail card to use backend for time calculation. Ensure we take all chapters into account for volume time calcs * Tweaked messaging for some critical logs to include file * Ensure pages count uses comma separated number * Moved Hangfire annotations to interface level. Adjusted word count service to always recalculate when user requests via analyze series files.
This commit is contained in:
parent
85b4ad0c58
commit
8e69b6cfc0
12 changed files with 161 additions and 62 deletions
|
@ -19,6 +19,7 @@ public interface IBookmarkService
|
|||
Task<bool> BookmarkPage(AppUser userWithBookmarks, BookmarkDto bookmarkDto, string imageToBookmark);
|
||||
Task<bool> RemoveBookmarkPage(AppUser userWithBookmarks, BookmarkDto bookmarkDto);
|
||||
Task<IEnumerable<string>> GetBookmarkFilesById(IEnumerable<int> bookmarkIds);
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 2 * 60 * 60), AutomaticRetry(Attempts = 0)]
|
||||
Task ConvertAllBookmarkToWebP();
|
||||
|
||||
}
|
||||
|
@ -173,7 +174,6 @@ public class BookmarkService : IBookmarkService
|
|||
/// <summary>
|
||||
/// This is a long-running job that will convert all bookmarks into WebP. Do not invoke anyway except via Hangfire.
|
||||
/// </summary>
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 2 * 60 * 60), AutomaticRetry(Attempts = 0)]
|
||||
public async Task ConvertAllBookmarkToWebP()
|
||||
{
|
||||
var bookmarkDirectory =
|
||||
|
|
|
@ -27,6 +27,8 @@ public interface IMetadataService
|
|||
/// </summary>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <param name="forceUpdate"></param>
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task RefreshMetadata(int libraryId, bool forceUpdate = false);
|
||||
/// <summary>
|
||||
/// Performs a forced refresh of metadata just for a series and it's nested entities
|
||||
|
@ -196,8 +198,6 @@ public class MetadataService : IMetadataService
|
|||
/// <remarks>This can be heavy on memory first run</remarks>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public async Task RefreshMetadata(int libraryId, bool forceUpdate = false)
|
||||
{
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
|
||||
|
|
|
@ -17,8 +17,10 @@ namespace API.Services.Tasks.Metadata;
|
|||
|
||||
public interface IWordCountAnalyzerService
|
||||
{
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task ScanLibrary(int libraryId, bool forceUpdate = false);
|
||||
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate = true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -40,8 +42,7 @@ public class WordCountAnalyzerService : IWordCountAnalyzerService
|
|||
_cacheHelper = cacheHelper;
|
||||
}
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
|
||||
public async Task ScanLibrary(int libraryId, bool forceUpdate = false)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
|
@ -113,7 +114,7 @@ public class WordCountAnalyzerService : IWordCountAnalyzerService
|
|||
|
||||
}
|
||||
|
||||
public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate = true)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
|
||||
|
@ -126,7 +127,7 @@ public class WordCountAnalyzerService : IWordCountAnalyzerService
|
|||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.WordCountAnalyzerProgressEvent(libraryId, 0F, ProgressEventType.Started, series.Name));
|
||||
|
||||
await ProcessSeries(series);
|
||||
await ProcessSeries(series, forceUpdate);
|
||||
|
||||
if (_unitOfWork.HasChanges())
|
||||
{
|
||||
|
|
|
@ -164,27 +164,44 @@ namespace API.Services.Tasks.Scanner
|
|||
info.Series = MergeName(info);
|
||||
|
||||
var normalizedSeries = Parser.Parser.Normalize(info.Series);
|
||||
var normalizedSortSeries = Parser.Parser.Normalize(info.SeriesSort);
|
||||
var normalizedLocalizedSeries = Parser.Parser.Normalize(info.LocalizedSeries);
|
||||
var existingKey = _scannedSeries.Keys.FirstOrDefault(ps =>
|
||||
ps.Format == info.Format && (ps.NormalizedName == normalizedSeries
|
||||
|| ps.NormalizedName == normalizedLocalizedSeries));
|
||||
existingKey ??= new ParsedSeries()
|
||||
{
|
||||
Format = info.Format,
|
||||
Name = info.Series,
|
||||
NormalizedName = normalizedSeries
|
||||
};
|
||||
|
||||
_scannedSeries.AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
|
||||
try
|
||||
{
|
||||
oldValue ??= new List<ParserInfo>();
|
||||
if (!oldValue.Contains(info))
|
||||
var existingKey = _scannedSeries.Keys.SingleOrDefault(ps =>
|
||||
ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries)
|
||||
|| ps.NormalizedName.Equals(normalizedLocalizedSeries)
|
||||
|| ps.NormalizedName.Equals(normalizedSortSeries)));
|
||||
existingKey ??= new ParsedSeries()
|
||||
{
|
||||
oldValue.Add(info);
|
||||
}
|
||||
Format = info.Format,
|
||||
Name = info.Series,
|
||||
NormalizedName = normalizedSeries
|
||||
};
|
||||
|
||||
return oldValue;
|
||||
});
|
||||
_scannedSeries.AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
|
||||
{
|
||||
oldValue ??= new List<ParserInfo>();
|
||||
if (!oldValue.Contains(info))
|
||||
{
|
||||
oldValue.Add(info);
|
||||
}
|
||||
|
||||
return oldValue;
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "{SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series);
|
||||
foreach (var seriesKey in _scannedSeries.Keys.Where(ps =>
|
||||
ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries)
|
||||
|| ps.NormalizedName.Equals(normalizedLocalizedSeries)
|
||||
|| ps.NormalizedName.Equals(normalizedSortSeries))))
|
||||
{
|
||||
_logger.LogCritical("Matches: {SeriesName} matches on {SeriesKey}", info.Series, seriesKey.Name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -198,14 +215,32 @@ namespace API.Services.Tasks.Scanner
|
|||
var normalizedSeries = Parser.Parser.Normalize(info.Series);
|
||||
var normalizedLocalSeries = Parser.Parser.Normalize(info.LocalizedSeries);
|
||||
// We use FirstOrDefault because this was introduced late in development and users might have 2 series with both names
|
||||
var existingName =
|
||||
_scannedSeries.FirstOrDefault(p =>
|
||||
(Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries ||
|
||||
Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedLocalSeries) && p.Key.Format == info.Format)
|
||||
.Key;
|
||||
if (existingName != null && !string.IsNullOrEmpty(existingName.Name))
|
||||
try
|
||||
{
|
||||
return existingName.Name;
|
||||
var existingName =
|
||||
_scannedSeries.SingleOrDefault(p =>
|
||||
(Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries ||
|
||||
Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedLocalSeries) &&
|
||||
p.Key.Format == info.Format)
|
||||
.Key;
|
||||
|
||||
if (existingName != null && !string.IsNullOrEmpty(existingName.Name))
|
||||
{
|
||||
return existingName.Name;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath);
|
||||
var values = _scannedSeries.Where(p =>
|
||||
(Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries ||
|
||||
Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedLocalSeries) &&
|
||||
p.Key.Format == info.Format);
|
||||
foreach (var pair in values)
|
||||
{
|
||||
_logger.LogCritical("Duplicate Series in DB matches with {SeriesName}: {DuplicateName}", info.Series, pair.Key.Name);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return info.Series;
|
||||
|
|
|
@ -28,8 +28,14 @@ public interface IScannerService
|
|||
/// cover images if forceUpdate is true.
|
||||
/// </summary>
|
||||
/// <param name="libraryId">Library to scan against</param>
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task ScanLibrary(int libraryId);
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task ScanLibraries();
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task ScanSeries(int libraryId, int seriesId, CancellationToken token);
|
||||
}
|
||||
|
||||
|
@ -63,8 +69,6 @@ public class ScannerService : IScannerService
|
|||
_wordCountAnalyzerService = wordCountAnalyzerService;
|
||||
}
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public async Task ScanSeries(int libraryId, int seriesId, CancellationToken token)
|
||||
{
|
||||
var sw = new Stopwatch();
|
||||
|
@ -247,8 +251,6 @@ public class ScannerService : IScannerService
|
|||
}
|
||||
|
||||
|
||||
[DisableConcurrentExecution(timeoutInSeconds: 60 * 60 * 60 * 4)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
public async Task ScanLibraries()
|
||||
{
|
||||
_logger.LogInformation("Starting Scan of All Libraries");
|
||||
|
@ -267,8 +269,7 @@ public class ScannerService : IScannerService
|
|||
/// ie) all entities will be rechecked for new cover images and comicInfo.xml changes
|
||||
/// </summary>
|
||||
/// <param name="libraryId"></param>
|
||||
[DisableConcurrentExecution(60 * 60 * 60)]
|
||||
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
|
||||
public async Task ScanLibrary(int libraryId)
|
||||
{
|
||||
Library library;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue