Basic Stats (#1673)
* Refactored ResponseCache profiles into consts * Refactored code to use an extension method for getting user library ids. * Started server statistics, added a charting library, and added a table sort column (not finished) * Refactored code and have a fully working example of sortable headers. Still doesn't work with default sorting state, will work on that later. * Implemented file size, but it's too expensive, so commented out. * Added a migration to provide extension and length/size information in the DB to allow for faster stat apis. * Added the ability to force a library scan from library settings. * Refactored some apis to provide more of a file breakdown rather than just file size. * Working on visualization of file breakdown * Fixed the file breakdown visual * Fixed up 2 visualizations * Added back an api for member names, started work on top reads * Hooked up the other library types and username/days. * Preparing to remove top reads and refactor into Top users * Added LibraryId to AppUserProgress to help with complex lookups. * Added the new libraryId hook into some stats methods * Updated api methods to use libraryId for progress * More places where LibraryId is needed * Added some high level server stats * Got a ton done on server stats * Updated default theme (dark) to be the default root variables. This will allow user themes to override just what they want, rather than maintain their own css variables. * Implemented a monster query for top users by reading time. It's very slow and can be cleaned up likely. * Hooked up top reads. Code needs a big refactor. Handing off for Robbie treatment and I'll switch to User stats. * Implemented last 5 recently read series (broken) and added some basic css * Fixed recently read query * Cleanup the css a bit, Robbie we need you * More css love * Cleaned up DTOs that aren't needed anymore * Fixed top readers query * When calculating top readers, don't include read events where nothing is read (0 pages) * Hooked up the date into GetTopUsers * Hooked top readers up with days and refactored and cleaned up componets not used * Fixed up query * Started on a day by day breakdown, but going to take a break from stats. * Added a temp task to run some migration manually for stats to work * Ensure OPDS-PS uses new libraryId for progress reporting * Fixed a code smell * Adding some styling * adding more styles * Removed some debug stuff from user stats * Bump qs from 6.5.2 to 6.5.3 in /UI/Web Bumps [qs](https://github.com/ljharb/qs) from 6.5.2 to 6.5.3. - [Release notes](https://github.com/ljharb/qs/releases) - [Changelog](https://github.com/ljharb/qs/blob/main/CHANGELOG.md) - [Commits](https://github.com/ljharb/qs/compare/v6.5.2...v6.5.3) --- updated-dependencies: - dependency-name: qs dependency-type: indirect ... Signed-off-by: dependabot[bot] <support@github.com> * Tweaked some code for bad data cases * Refactored a chapter lookup to remove un-needed Volume join in 5 places across the code. * API push Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: Robbie Davis <robbie@therobbiedavis.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
parent
4724dc5a76
commit
c361e66b35
106 changed files with 6898 additions and 170 deletions
|
|
@ -27,7 +27,7 @@ public interface IProcessSeries
|
|||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task Prime();
|
||||
Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library);
|
||||
Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false);
|
||||
void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
}
|
||||
|
||||
|
|
@ -75,7 +75,7 @@ public class ProcessSeries : IProcessSeries
|
|||
_tags = await _unitOfWork.TagRepository.GetAllTagsAsync();
|
||||
}
|
||||
|
||||
public async Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library)
|
||||
public async Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false)
|
||||
{
|
||||
if (!parsedInfos.Any()) return;
|
||||
|
||||
|
|
@ -120,7 +120,7 @@ public class ProcessSeries : IProcessSeries
|
|||
// parsedInfos[0] is not the first volume or chapter. We need to find it using a ComicInfo check (as it uses firstParsedInfo for series sort)
|
||||
var firstParsedInfo = parsedInfos.FirstOrDefault(p => p.ComicInfo != null, firstInfo);
|
||||
|
||||
UpdateVolumes(series, parsedInfos);
|
||||
UpdateVolumes(series, parsedInfos, forceUpdate);
|
||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||
|
||||
series.NormalizedName = Parser.Parser.Normalize(series.Name);
|
||||
|
|
@ -430,7 +430,7 @@ public class ProcessSeries : IProcessSeries
|
|||
});
|
||||
}
|
||||
|
||||
private void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos)
|
||||
private void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
||||
{
|
||||
var startingVolumeCount = series.Volumes.Count;
|
||||
// Add new volumes and update chapters per volume
|
||||
|
|
@ -465,7 +465,7 @@ public class ProcessSeries : IProcessSeries
|
|||
|
||||
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
||||
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
|
||||
UpdateChapters(series, volume, infos);
|
||||
UpdateChapters(series, volume, infos, forceUpdate);
|
||||
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
||||
|
||||
// Update all the metadata on the Chapters
|
||||
|
|
@ -512,7 +512,7 @@ public class ProcessSeries : IProcessSeries
|
|||
series.Name, startingVolumeCount, series.Volumes.Count);
|
||||
}
|
||||
|
||||
private void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos)
|
||||
private void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
||||
{
|
||||
// Add new chapters
|
||||
foreach (var info in parsedInfos)
|
||||
|
|
@ -546,7 +546,7 @@ public class ProcessSeries : IProcessSeries
|
|||
if (chapter == null) continue;
|
||||
// Add files
|
||||
var specialTreatment = info.IsSpecialInfo();
|
||||
AddOrUpdateFileForChapter(chapter, info);
|
||||
AddOrUpdateFileForChapter(chapter, info, forceUpdate);
|
||||
chapter.Number = Parser.Parser.MinNumberFromRange(info.Chapters) + string.Empty;
|
||||
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
||||
}
|
||||
|
|
@ -572,22 +572,26 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
}
|
||||
|
||||
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
|
||||
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info, bool forceUpdate = false)
|
||||
{
|
||||
chapter.Files ??= new List<MangaFile>();
|
||||
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
|
||||
var fileInfo = _directoryService.FileSystem.FileInfo.FromFileName(info.FullFilePath);
|
||||
if (existingFile != null)
|
||||
{
|
||||
existingFile.Format = info.Format;
|
||||
if (!_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
|
||||
if (!forceUpdate && !_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
|
||||
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
|
||||
existingFile.Extension = fileInfo.Extension.ToLowerInvariant();
|
||||
existingFile.Bytes = fileInfo.Length;
|
||||
// We skip updating DB here with last modified time so that metadata refresh can do it
|
||||
}
|
||||
else
|
||||
{
|
||||
var file = DbFactory.MangaFile(info.FullFilePath, info.Format, _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format));
|
||||
if (file == null) return;
|
||||
|
||||
file.Extension = fileInfo.Extension.ToLowerInvariant();
|
||||
file.Bytes = fileInfo.Length;
|
||||
chapter.Files.Add(file);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue