Foundational Rework (#2745)
This commit is contained in:
parent
42cd6e9b3a
commit
4fa21fe1ca
92 changed files with 13330 additions and 650 deletions
|
|
@ -365,12 +365,68 @@ public class ParseScannedFiles
|
|||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
|
||||
if (scannedSeries[series].Count <= 0 || processSeriesInfos == null) continue;
|
||||
|
||||
UpdateSortOrder(scannedSeries, series);
|
||||
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParsedSeries series)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Set the Sort order per Volume
|
||||
var volumes = scannedSeries[series].GroupBy(info => info.Volumes);
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
var infos = scannedSeries[series].Where(info => info.Volumes == volume.Key).ToList();
|
||||
IList<ParserInfo> chapters;
|
||||
var specialTreatment = infos.TrueForAll(info => info.IsSpecial);
|
||||
|
||||
if (specialTreatment)
|
||||
{
|
||||
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
|
||||
chapters = infos
|
||||
.OrderBy(info => info.SpecialIndex)
|
||||
.ToList();
|
||||
}
|
||||
else
|
||||
{
|
||||
chapters = infos
|
||||
.OrderByNatural(info => info.Chapters)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
|
||||
var counter = 0f;
|
||||
var prevIssue = string.Empty;
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
if (float.TryParse(chapter.Chapters, out var parsedChapter))
|
||||
{
|
||||
counter = parsedChapter;
|
||||
if (!string.IsNullOrEmpty(prevIssue) && parsedChapter.Is(float.Parse(prevIssue)))
|
||||
{
|
||||
// Bump by 0.1
|
||||
counter += 0.1f;
|
||||
}
|
||||
chapter.IssueOrder = counter;
|
||||
prevIssue = $"{parsedChapter}";
|
||||
}
|
||||
else
|
||||
{
|
||||
chapter.IssueOrder = counter;
|
||||
counter++;
|
||||
prevIssue = chapter.Chapters;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an issue setting IssueOrder");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
|||
|
|
@ -96,6 +96,7 @@ public class DefaultParser : IDefaultParser
|
|||
if (Parser.HasSpecialMarker(fileName))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.SpecialIndex = Parser.ParseSpecialIndex(fileName);
|
||||
ret.Chapters = Parser.DefaultChapter;
|
||||
ret.Volumes = Parser.LooseLeafVolume;
|
||||
|
||||
|
|
@ -113,6 +114,12 @@ public class DefaultParser : IDefaultParser
|
|||
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
|
||||
}
|
||||
|
||||
// v0.8.x: Introducing a change where Specials will go in a separate Volume with a reserved number
|
||||
if (ret.IsSpecial)
|
||||
{
|
||||
ret.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
}
|
||||
|
||||
return ret.Series == string.Empty ? null : ret;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
|
@ -12,10 +13,16 @@ namespace API.Services.Tasks.Scanner.Parser;
|
|||
public static class Parser
|
||||
{
|
||||
// NOTE: If you change this, don't forget to change in the UI (see Series Detail)
|
||||
public const string DefaultChapter = "0"; // -2147483648
|
||||
public const string LooseLeafVolume = "0";
|
||||
public const int DefaultChapterNumber = 0;
|
||||
public const int LooseLeafVolumeNumber = 0;
|
||||
public const string DefaultChapter = "-100000"; // -2147483648
|
||||
public const string LooseLeafVolume = "-100000";
|
||||
public const int DefaultChapterNumber = -100_000;
|
||||
public const int LooseLeafVolumeNumber = -100_000;
|
||||
/// <summary>
|
||||
/// The Volume Number of Specials to reside in
|
||||
/// </summary>
|
||||
public const int SpecialVolumeNumber = 100_000;
|
||||
public const string SpecialVolume = "100000";
|
||||
|
||||
public static readonly TimeSpan RegexTimeout = TimeSpan.FromMilliseconds(500);
|
||||
|
||||
public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg|\.webp|\.gif|\.avif)"; // Don't forget to update CoverChooser
|
||||
|
|
@ -678,6 +685,13 @@ public static class Parser
|
|||
return SpecialMarkerRegex.IsMatch(filePath);
|
||||
}
|
||||
|
||||
public static int ParseSpecialIndex(string filePath)
|
||||
{
|
||||
var match = SpecialMarkerRegex.Match(filePath).Value.Replace("SP", string.Empty);
|
||||
if (string.IsNullOrEmpty(match)) return 0;
|
||||
return int.Parse(match);
|
||||
}
|
||||
|
||||
public static bool IsMangaSpecial(string filePath)
|
||||
{
|
||||
filePath = ReplaceUnderscores(filePath);
|
||||
|
|
@ -944,35 +958,52 @@ public static class Parser
|
|||
{
|
||||
try
|
||||
{
|
||||
if (!Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
// Check if the range string is not null or empty
|
||||
if (string.IsNullOrEmpty(range) || !Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-");
|
||||
return tokens.Min(t => t.AsFloat());
|
||||
// Check if there is a range or not
|
||||
if (Regex.IsMatch(range, @"\d-{1}\d"))
|
||||
{
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-", StringSplitOptions.RemoveEmptyEntries);
|
||||
return tokens.Min(t => t.AsFloat());
|
||||
}
|
||||
|
||||
return float.Parse(range);
|
||||
}
|
||||
catch
|
||||
catch (Exception)
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static float MaxNumberFromRange(string range)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
// Check if the range string is not null or empty
|
||||
if (string.IsNullOrEmpty(range) || !Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-");
|
||||
return tokens.Max(t => t.AsFloat());
|
||||
// Check if there is a range or not
|
||||
if (Regex.IsMatch(range, @"\d-{1}\d"))
|
||||
{
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-", StringSplitOptions.RemoveEmptyEntries);
|
||||
return tokens.Max(t => t.AsFloat());
|
||||
}
|
||||
|
||||
return float.Parse(range);
|
||||
}
|
||||
catch
|
||||
catch (Exception)
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -60,6 +60,10 @@ public class ParserInfo
|
|||
/// If the file contains no volume/chapter information or contains Special Keywords <see cref="Parser.MangaSpecialRegex"/>
|
||||
/// </summary>
|
||||
public bool IsSpecial { get; set; }
|
||||
/// <summary>
|
||||
/// If the file has a Special Marker explicitly, this will contain the index
|
||||
/// </summary>
|
||||
public int SpecialIndex { get; set; } = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Used for specials or books, stores what the UI should show.
|
||||
|
|
@ -67,6 +71,12 @@ public class ParserInfo
|
|||
/// </summary>
|
||||
public string Title { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// This can be filled in from ComicInfo.xml during scanning. Will update the SortOrder field on <see cref="Entities.Chapter"/>.
|
||||
/// Falls back to Parsed Chapter number
|
||||
/// </summary>
|
||||
public float IssueOrder { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// If the ParserInfo has the IsSpecial tag or both volumes and chapters are default aka 0
|
||||
/// </summary>
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ using System.Collections.Generic;
|
|||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
|
|
@ -219,14 +220,6 @@ public class ProcessSeries : IProcessSeries
|
|||
_logger.LogCritical(ex,
|
||||
"[ScannerService] There was an issue writing to the database for series {SeriesName}",
|
||||
series.Name);
|
||||
_logger.LogTrace("[ScannerService] Series Metadata Dump: {@Series}", series.Metadata);
|
||||
_logger.LogTrace("[ScannerService] People Dump: {@People}", _people
|
||||
.Select(p =>
|
||||
new {p.Id, p.Name, SeriesMetadataIds =
|
||||
p.SeriesMetadatas?.Select(m => m.Id),
|
||||
ChapterMetadataIds =
|
||||
p.ChapterMetadatas?.Select(m => m.Id)
|
||||
.ToList()}));
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
||||
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series.OriginalName}",
|
||||
|
|
@ -314,8 +307,8 @@ public class ProcessSeries : IProcessSeries
|
|||
// The actual number of count's defined across all chapter's metadata
|
||||
series.Metadata.MaxCount = chapters.Max(chapter => chapter.Count);
|
||||
|
||||
var maxVolume = series.Volumes.Max(v => (int) Parser.Parser.MaxNumberFromRange(v.Name));
|
||||
var maxChapter = chapters.Max(c => (int) Parser.Parser.MaxNumberFromRange(c.Range));
|
||||
var maxVolume = (int) series.Volumes.Max(v => v.MaxNumber);
|
||||
var maxChapter = (int) chapters.Max(c => c.MaxNumber);
|
||||
|
||||
// Single books usually don't have a number in their Range (filename)
|
||||
if (series.Format == MangaFormat.Epub || series.Format == MangaFormat.Pdf && chapters.Count == 1)
|
||||
|
|
@ -544,10 +537,12 @@ public class ProcessSeries : IProcessSeries
|
|||
Volume? volume;
|
||||
try
|
||||
{
|
||||
volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
|
||||
// With the Name change to be formatted, Name no longer working because Name returns "1" and volumeNumber is "1.0", so we use LookupName as the original
|
||||
volume = series.Volumes.SingleOrDefault(s => s.LookupName == volumeNumber);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// TODO: Push this to UI in some way
|
||||
if (!ex.Message.Equals("Sequence contains more than one matching element")) throw;
|
||||
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||
throw new KavitaException(
|
||||
|
|
@ -561,7 +556,8 @@ public class ProcessSeries : IProcessSeries
|
|||
series.Volumes.Add(volume);
|
||||
}
|
||||
|
||||
volume.Name = volumeNumber;
|
||||
volume.LookupName = volumeNumber;
|
||||
volume.Name = volume.GetNumberTitle();
|
||||
|
||||
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
||||
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
|
||||
|
|
@ -586,7 +582,9 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
|
||||
// Remove existing volumes that aren't in parsedInfos
|
||||
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
|
||||
var nonDeletedVolumes = series.Volumes
|
||||
.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.LookupName))
|
||||
.ToList();
|
||||
if (series.Volumes.Count != nonDeletedVolumes.Count)
|
||||
{
|
||||
_logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
|
||||
|
|
@ -597,8 +595,9 @@ public class ProcessSeries : IProcessSeries
|
|||
var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? string.Empty;
|
||||
if (!string.IsNullOrEmpty(file) && _directoryService.FileSystem.File.Exists(file))
|
||||
{
|
||||
// This can happen when file is renamed and volume is removed
|
||||
_logger.LogInformation(
|
||||
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
|
||||
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk (usually volume marker removed) File: {File}",
|
||||
file);
|
||||
}
|
||||
|
||||
|
|
@ -640,12 +639,19 @@ public class ProcessSeries : IProcessSeries
|
|||
chapter.UpdateFrom(info);
|
||||
}
|
||||
|
||||
if (chapter == null) continue;
|
||||
if (chapter == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Add files
|
||||
var specialTreatment = info.IsSpecialInfo();
|
||||
AddOrUpdateFileForChapter(chapter, info, forceUpdate);
|
||||
|
||||
// TODO: Investigate using the ChapterBuilder here
|
||||
chapter.Number = Parser.Parser.MinNumberFromRange(info.Chapters).ToString(CultureInfo.InvariantCulture);
|
||||
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
||||
chapter.MinNumber = Parser.Parser.MinNumberFromRange(info.Chapters);
|
||||
chapter.MaxNumber = Parser.Parser.MaxNumberFromRange(info.Chapters);
|
||||
chapter.SortOrder = info.IssueOrder;
|
||||
chapter.Range = chapter.GetNumberTitle();
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -655,7 +661,7 @@ public class ProcessSeries : IProcessSeries
|
|||
{
|
||||
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
|
||||
{
|
||||
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
|
||||
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.GetNumberTitle(), volume.Name, parsedInfos[0].Series);
|
||||
volume.Chapters.Remove(existingChapter);
|
||||
}
|
||||
else
|
||||
|
|
@ -680,6 +686,7 @@ public class ProcessSeries : IProcessSeries
|
|||
if (!forceUpdate && !_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
|
||||
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
|
||||
existingFile.Extension = fileInfo.Extension.ToLowerInvariant();
|
||||
existingFile.FileName = Path.GetFileNameWithoutExtension(existingFile.FilePath);
|
||||
existingFile.Bytes = fileInfo.Length;
|
||||
// We skip updating DB here with last modified time so that metadata refresh can do it
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue