Comic Rework, New Scanner, Foundation Overahul (is this a full release?) (#2780)
This commit is contained in:
parent
d7e9e7c832
commit
7552c3f5fa
182 changed files with 27630 additions and 3046 deletions
|
|
@ -53,30 +53,30 @@
|
|||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CsvHelper" Version="30.1.0" />
|
||||
<PackageReference Include="MailKit" Version="4.3.0" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="8.0.1">
|
||||
<PackageReference Include="CsvHelper" Version="31.0.2" />
|
||||
<PackageReference Include="MailKit" Version="4.4.0" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="8.0.3">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="12.0.1" />
|
||||
<PackageReference Include="Docnet.Core" Version="2.6.0" />
|
||||
<PackageReference Include="EasyCaching.InMemory" Version="1.9.2" />
|
||||
<PackageReference Include="ExCSS" Version="4.2.4" />
|
||||
<PackageReference Include="ExCSS" Version="4.2.5" />
|
||||
<PackageReference Include="Flurl" Version="3.0.7" />
|
||||
<PackageReference Include="Flurl.Http" Version="3.2.4" />
|
||||
<PackageReference Include="Hangfire" Version="1.8.9" />
|
||||
<PackageReference Include="Hangfire.InMemory" Version="0.7.0" />
|
||||
<PackageReference Include="Hangfire" Version="1.8.11" />
|
||||
<PackageReference Include="Hangfire.InMemory" Version="0.8.0" />
|
||||
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
|
||||
<PackageReference Include="Hangfire.Storage.SQLite" Version="0.4.0" />
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.11.58" />
|
||||
<PackageReference Include="Hangfire.Storage.SQLite" Version="0.4.1" />
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.11.59" />
|
||||
<PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" />
|
||||
<PackageReference Include="Hangfire.AspNetCore" Version="1.8.9" />
|
||||
<PackageReference Include="Hangfire.AspNetCore" Version="1.8.11" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.SignalR" Version="1.1.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.1" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="8.0.1" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="8.0.1" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.1" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.3" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="8.0.3" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="8.0.3" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.3" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="3.0.0" />
|
||||
<PackageReference Include="MimeTypeMapOfficial" Version="1.0.17" />
|
||||
|
|
@ -94,16 +94,16 @@
|
|||
<PackageReference Include="Serilog.Sinks.File" Version="5.0.0" />
|
||||
<PackageReference Include="Serilog.Sinks.SignalR.Core" Version="0.1.2" />
|
||||
<PackageReference Include="SharpCompress" Version="0.36.0" />
|
||||
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.2" />
|
||||
<PackageReference Include="SonarAnalyzer.CSharp" Version="9.19.0.84025">
|
||||
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.3" />
|
||||
<PackageReference Include="SonarAnalyzer.CSharp" Version="9.21.0.86780">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore.Filters" Version="8.0.0" />
|
||||
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.3.1" />
|
||||
<PackageReference Include="System.IO.Abstractions" Version="20.0.15" />
|
||||
<PackageReference Include="System.Drawing.Common" Version="8.0.1" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore.Filters" Version="8.0.1" />
|
||||
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.4.0" />
|
||||
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
|
||||
<PackageReference Include="System.Drawing.Common" Version="8.0.3" />
|
||||
<PackageReference Include="VersOne.Epub" Version="3.3.1" />
|
||||
</ItemGroup>
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using API.Extensions;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Comparators;
|
||||
|
|
@ -6,28 +7,28 @@ namespace API.Comparators;
|
|||
#nullable enable
|
||||
|
||||
/// <summary>
|
||||
/// Sorts chapters based on their Number. Uses natural ordering of doubles.
|
||||
/// Sorts chapters based on their Number. Uses natural ordering of doubles. Specials always LAST.
|
||||
/// </summary>
|
||||
public class ChapterSortComparer : IComparer<double>
|
||||
public class ChapterSortComparerDefaultLast : IComparer<float>
|
||||
{
|
||||
/// <summary>
|
||||
/// Normal sort for 2 doubles. 0 always comes last
|
||||
/// Normal sort for 2 doubles. DefaultChapterNumber always comes last
|
||||
/// </summary>
|
||||
/// <param name="x"></param>
|
||||
/// <param name="y"></param>
|
||||
/// <returns></returns>
|
||||
public int Compare(double x, double y)
|
||||
public int Compare(float x, float y)
|
||||
{
|
||||
if (x == Parser.DefaultChapterNumber && y == Parser.DefaultChapterNumber) return 0;
|
||||
if (x.Is(Parser.DefaultChapterNumber) && y.Is(Parser.DefaultChapterNumber)) return 0;
|
||||
// if x is 0, it comes second
|
||||
if (x == Parser.DefaultChapterNumber) return 1;
|
||||
if (x.Is(Parser.DefaultChapterNumber)) return 1;
|
||||
// if y is 0, it comes second
|
||||
if (y == Parser.DefaultChapterNumber) return -1;
|
||||
if (y.Is(Parser.DefaultChapterNumber)) return -1;
|
||||
|
||||
return x.CompareTo(y);
|
||||
}
|
||||
|
||||
public static readonly ChapterSortComparer Default = new ChapterSortComparer();
|
||||
public static readonly ChapterSortComparerDefaultLast Default = new ChapterSortComparerDefaultLast();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -37,33 +38,43 @@ public class ChapterSortComparer : IComparer<double>
|
|||
/// This is represented by Chapter 0, Chapter 81.
|
||||
/// </example>
|
||||
/// </summary>
|
||||
public class ChapterSortComparerZeroFirst : IComparer<double>
|
||||
public class ChapterSortComparerDefaultFirst : IComparer<float>
|
||||
{
|
||||
public int Compare(double x, double y)
|
||||
public int Compare(float x, float y)
|
||||
{
|
||||
if (x == Parser.DefaultChapterNumber && y == Parser.DefaultChapterNumber) return 0;
|
||||
if (x.Is(Parser.DefaultChapterNumber) && y.Is(Parser.DefaultChapterNumber)) return 0;
|
||||
// if x is 0, it comes first
|
||||
if (x == Parser.DefaultChapterNumber) return -1;
|
||||
if (x.Is(Parser.DefaultChapterNumber)) return -1;
|
||||
// if y is 0, it comes first
|
||||
if (y == Parser.DefaultChapterNumber) return 1;
|
||||
if (y.Is(Parser.DefaultChapterNumber)) return 1;
|
||||
|
||||
return x.CompareTo(y);
|
||||
}
|
||||
|
||||
public static readonly ChapterSortComparerZeroFirst Default = new ChapterSortComparerZeroFirst();
|
||||
public static readonly ChapterSortComparerDefaultFirst Default = new ChapterSortComparerDefaultFirst();
|
||||
}
|
||||
|
||||
public class SortComparerZeroLast : IComparer<double>
|
||||
/// <summary>
|
||||
/// Sorts chapters based on their Number. Uses natural ordering of doubles. Specials always LAST.
|
||||
/// </summary>
|
||||
public class ChapterSortComparerSpecialsLast : IComparer<float>
|
||||
{
|
||||
public int Compare(double x, double y)
|
||||
/// <summary>
|
||||
/// Normal sort for 2 doubles. DefaultSpecialNumber always comes last
|
||||
/// </summary>
|
||||
/// <param name="x"></param>
|
||||
/// <param name="y"></param>
|
||||
/// <returns></returns>
|
||||
public int Compare(float x, float y)
|
||||
{
|
||||
if (x == Parser.DefaultChapterNumber && y == Parser.DefaultChapterNumber) return 0;
|
||||
// if x is 0, it comes last
|
||||
if (x == Parser.DefaultChapterNumber) return 1;
|
||||
// if y is 0, it comes last
|
||||
if (y == Parser.DefaultChapterNumber) return -1;
|
||||
if (x.Is(Parser.SpecialVolumeNumber) && y.Is(Parser.SpecialVolumeNumber)) return 0;
|
||||
// if x is 0, it comes second
|
||||
if (x.Is(Parser.SpecialVolumeNumber)) return 1;
|
||||
// if y is 0, it comes second
|
||||
if (y.Is(Parser.SpecialVolumeNumber)) return -1;
|
||||
|
||||
return x.CompareTo(y);
|
||||
}
|
||||
public static readonly SortComparerZeroLast Default = new SortComparerZeroLast();
|
||||
|
||||
public static readonly ChapterSortComparerSpecialsLast Default = new ChapterSortComparerSpecialsLast();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,13 +33,14 @@ public class CblController : BaseApiController
|
|||
/// <param name="file">FormBody with parameter name of cbl</param>
|
||||
/// <returns></returns>
|
||||
[HttpPost("validate")]
|
||||
public async Task<ActionResult<CblImportSummaryDto>> ValidateCbl([FromForm(Name = "cbl")] IFormFile file)
|
||||
public async Task<ActionResult<CblImportSummaryDto>> ValidateCbl([FromForm(Name = "cbl")] IFormFile file,
|
||||
[FromForm(Name = "comicVineMatching")] bool comicVineMatching = false)
|
||||
{
|
||||
var userId = User.GetUserId();
|
||||
try
|
||||
{
|
||||
var cbl = await SaveAndLoadCblFile(file);
|
||||
var importSummary = await _readingListService.ValidateCblFile(userId, cbl);
|
||||
var importSummary = await _readingListService.ValidateCblFile(userId, cbl, comicVineMatching);
|
||||
importSummary.FileName = file.FileName;
|
||||
return Ok(importSummary);
|
||||
}
|
||||
|
|
@ -83,13 +84,14 @@ public class CblController : BaseApiController
|
|||
/// <param name="dryRun">If true, will only emulate the import but not perform. This should be done to preview what will happen</param>
|
||||
/// <returns></returns>
|
||||
[HttpPost("import")]
|
||||
public async Task<ActionResult<CblImportSummaryDto>> ImportCbl([FromForm(Name = "cbl")] IFormFile file, [FromForm(Name = "dryRun")] bool dryRun = false)
|
||||
public async Task<ActionResult<CblImportSummaryDto>> ImportCbl([FromForm(Name = "cbl")] IFormFile file,
|
||||
[FromForm(Name = "dryRun")] bool dryRun = false, [FromForm(Name = "comicVineMatching")] bool comicVineMatching = false)
|
||||
{
|
||||
try
|
||||
{
|
||||
var userId = User.GetUserId();
|
||||
var cbl = await SaveAndLoadCblFile(file);
|
||||
var importSummary = await _readingListService.CreateReadingListFromCbl(userId, cbl, dryRun);
|
||||
var importSummary = await _readingListService.CreateReadingListFromCbl(userId, cbl, dryRun, comicVineMatching);
|
||||
importSummary.FileName = file.FileName;
|
||||
return Ok(importSummary);
|
||||
} catch (ArgumentNullException)
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ public class DownloadController : BaseApiController
|
|||
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume!.SeriesId);
|
||||
try
|
||||
{
|
||||
return await DownloadFiles(files, $"download_{User.GetUsername()}_c{chapterId}", $"{series!.Name} - Chapter {chapter.Number}.zip");
|
||||
return await DownloadFiles(files, $"download_{User.GetUsername()}_c{chapterId}", $"{series!.Name} - Chapter {chapter.GetNumberTitle()}.zip");
|
||||
}
|
||||
catch (KavitaException ex)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -32,7 +32,11 @@ public class LicenseController(
|
|||
public async Task<ActionResult<bool>> HasValidLicense(bool forceCheck = false)
|
||||
{
|
||||
var result = await licenseService.HasActiveLicense(forceCheck);
|
||||
await taskScheduler.ScheduleKavitaPlusTasks();
|
||||
if (result)
|
||||
{
|
||||
await taskScheduler.ScheduleKavitaPlusTasks();
|
||||
}
|
||||
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ public class OpdsController : BaseApiController
|
|||
};
|
||||
|
||||
private readonly FilterV2Dto _filterV2Dto = new FilterV2Dto();
|
||||
private readonly ChapterSortComparer _chapterSortComparer = ChapterSortComparer.Default;
|
||||
private readonly ChapterSortComparerDefaultLast _chapterSortComparerDefaultLast = ChapterSortComparerDefaultLast.Default;
|
||||
private const int PageSize = 20;
|
||||
|
||||
public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService,
|
||||
|
|
@ -857,8 +857,8 @@ public class OpdsController : BaseApiController
|
|||
var seriesDetail = await _seriesService.GetSeriesDetail(seriesId, userId);
|
||||
foreach (var volume in seriesDetail.Volumes)
|
||||
{
|
||||
var chapters = (await _unitOfWork.ChapterRepository.GetChaptersAsync(volume.Id)).OrderBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture),
|
||||
_chapterSortComparer);
|
||||
var chapters = (await _unitOfWork.ChapterRepository.GetChaptersAsync(volume.Id))
|
||||
.OrderBy(x => x.MinNumber, _chapterSortComparerDefaultLast);
|
||||
|
||||
foreach (var chapterId in chapters.Select(c => c.Id))
|
||||
{
|
||||
|
|
@ -907,8 +907,8 @@ public class OpdsController : BaseApiController
|
|||
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId);
|
||||
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
|
||||
var chapters =
|
||||
(await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture),
|
||||
_chapterSortComparer);
|
||||
(await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId))
|
||||
.OrderBy(x => x.MinNumber, _chapterSortComparerDefaultLast);
|
||||
var feed = CreateFeed(series.Name + " - Volume " + volume!.Name + $" - {_seriesService.FormatChapterName(userId, libraryType)}s ",
|
||||
$"{prefix}{apiKey}/series/{seriesId}/volume/{volumeId}", apiKey, prefix);
|
||||
SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-{_seriesService.FormatChapterName(userId, libraryType)}s");
|
||||
|
|
@ -1101,18 +1101,18 @@ public class OpdsController : BaseApiController
|
|||
|
||||
var title = $"{series.Name}";
|
||||
|
||||
if (volume!.Chapters.Count == 1)
|
||||
if (volume!.Chapters.Count == 1 && !volume.IsSpecial())
|
||||
{
|
||||
var volumeLabel = await _localizationService.Translate(userId, "volume-num", string.Empty);
|
||||
SeriesService.RenameVolumeName(volume.Chapters.First(), volume, libraryType, volumeLabel);
|
||||
if (volume.Name != Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
SeriesService.RenameVolumeName(volume, libraryType, volumeLabel);
|
||||
if (!volume.IsLooseLeaf())
|
||||
{
|
||||
title += $" - {volume.Name}";
|
||||
}
|
||||
}
|
||||
else if (!volume.IsLooseLeaf())
|
||||
else if (!volume.IsLooseLeaf() && !volume.IsSpecial())
|
||||
{
|
||||
title = $"{series.Name} - Volume {volume.Name} - {await _seriesService.FormatChapterTitle(userId, chapter, libraryType)}";
|
||||
title = $"{series.Name} - Volume {volume.Name} - {await _seriesService.FormatChapterTitle(userId, chapter, libraryType)}";
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
|||
|
|
@ -13,14 +13,25 @@ public class ChapterDto : IHasReadTimeEstimate
|
|||
{
|
||||
public int Id { get; init; }
|
||||
/// <summary>
|
||||
/// Range of chapters. Chapter 2-4 -> "2-4". Chapter 2 -> "2".
|
||||
/// Range of chapters. Chapter 2-4 -> "2-4". Chapter 2 -> "2". If special, will be special name.
|
||||
/// </summary>
|
||||
/// <remarks>This can be something like 19.HU or Alpha as some comics are like this</remarks>
|
||||
public string Range { get; init; } = default!;
|
||||
/// <summary>
|
||||
/// Smallest number of the Range.
|
||||
/// </summary>
|
||||
[Obsolete("Use MinNumber and MaxNumber instead")]
|
||||
public string Number { get; init; } = default!;
|
||||
/// <summary>
|
||||
/// This may be 0 under the circumstance that the Issue is "Alpha" or other non-standard numbers.
|
||||
/// </summary>
|
||||
public float MinNumber { get; init; }
|
||||
public float MaxNumber { get; init; }
|
||||
/// <summary>
|
||||
/// The sorting order of the Chapter. Inherits from MinNumber, but can be overridden.
|
||||
/// </summary>
|
||||
public float SortOrder { get; set; }
|
||||
/// <summary>
|
||||
/// Total number of pages in all MangaFiles
|
||||
/// </summary>
|
||||
public int Pages { get; init; }
|
||||
|
|
|
|||
|
|
@ -48,6 +48,9 @@ public enum FilterField
|
|||
/// <summary>
|
||||
/// Average rating from Kavita+ - Not usable for non-licensed users
|
||||
/// </summary>
|
||||
AverageRating = 28
|
||||
AverageRating = 28,
|
||||
Imprint = 29,
|
||||
Team = 30,
|
||||
Location = 31
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,10 +18,13 @@ public class ChapterMetadataDto
|
|||
public ICollection<PersonDto> Characters { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Pencillers { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Inkers { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Imprints { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Colorists { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Letterers { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Editors { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Translators { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Teams { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Locations { get; set; } = new List<PersonDto>();
|
||||
|
||||
public ICollection<GenreTagDto> Genres { get; set; } = new List<GenreTagDto>();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using System.Xml.Serialization;
|
||||
using API.Data.Metadata;
|
||||
|
||||
namespace API.DTOs.ReadingLists.CBL;
|
||||
|
||||
|
|
@ -21,6 +22,12 @@ public class CblBook
|
|||
[XmlAttribute("Year")]
|
||||
public string Year { get; set; }
|
||||
/// <summary>
|
||||
/// Main Series, Annual, Limited Series
|
||||
/// </summary>
|
||||
/// <remarks>This maps to <see cref="ComicInfo">Format</see> tag</remarks>
|
||||
[XmlAttribute("Format")]
|
||||
public string Format { get; set; }
|
||||
/// <summary>
|
||||
/// The underlying filetype
|
||||
/// </summary>
|
||||
/// <remarks>This is not part of the standard and explicitly for Kavita to support non cbz/cbr files</remarks>
|
||||
|
|
|
|||
|
|
@ -22,4 +22,5 @@ public class RelatedSeriesDto
|
|||
public IEnumerable<SeriesDto> Doujinshis { get; set; } = default!;
|
||||
public IEnumerable<SeriesDto> Parent { get; set; } = default!;
|
||||
public IEnumerable<SeriesDto> Editions { get; set; } = default!;
|
||||
public IEnumerable<SeriesDto> Annuals { get; set; } = default!;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,4 +17,5 @@ public class UpdateRelatedSeriesDto
|
|||
public IList<int> AlternativeVersions { get; set; } = default!;
|
||||
public IList<int> Doujinshis { get; set; } = default!;
|
||||
public IList<int> Editions { get; set; } = default!;
|
||||
public IList<int> Annuals { get; set; } = default!;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,10 +30,14 @@ public class SeriesMetadataDto
|
|||
public ICollection<PersonDto> Characters { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Pencillers { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Inkers { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Imprints { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Colorists { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Letterers { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Editors { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Translators { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Teams { get; set; } = new List<PersonDto>();
|
||||
public ICollection<PersonDto> Locations { get; set; } = new List<PersonDto>();
|
||||
|
||||
/// <summary>
|
||||
/// Highest Age Rating from all Chapters
|
||||
/// </summary>
|
||||
|
|
@ -80,10 +84,13 @@ public class SeriesMetadataDto
|
|||
public bool ColoristLocked { get; set; }
|
||||
public bool EditorLocked { get; set; }
|
||||
public bool InkerLocked { get; set; }
|
||||
public bool ImprintLocked { get; set; }
|
||||
public bool LettererLocked { get; set; }
|
||||
public bool PencillerLocked { get; set; }
|
||||
public bool PublisherLocked { get; set; }
|
||||
public bool TranslatorLocked { get; set; }
|
||||
public bool TeamLocked { get; set; }
|
||||
public bool LocationLocked { get; set; }
|
||||
public bool CoverArtistLocked { get; set; }
|
||||
public bool ReleaseYearLocked { get; set; }
|
||||
|
||||
|
|
|
|||
|
|
@ -14,5 +14,5 @@ public class ReadHistoryEvent
|
|||
public required string SeriesName { get; set; } = default!;
|
||||
public DateTime ReadDate { get; set; }
|
||||
public int ChapterId { get; set; }
|
||||
public required string ChapterNumber { get; set; } = default!;
|
||||
public required float ChapterNumber { get; set; } = default!;
|
||||
}
|
||||
|
|
|
|||
12
API/DTOs/TachiyomiChapterDto.cs
Normal file
12
API/DTOs/TachiyomiChapterDto.cs
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
namespace API.DTOs;
|
||||
|
||||
/// <summary>
|
||||
/// This is explicitly for Tachiyomi. Number field was removed in v0.8.0, but Tachiyomi needs it for the hacks.
|
||||
/// </summary>
|
||||
public class TachiyomiChapterDto : ChapterDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Smallest number of the Range.
|
||||
/// </summary>
|
||||
public string Number { get; init; } = default!;
|
||||
}
|
||||
|
|
@ -3,6 +3,7 @@ using System;
|
|||
using System.Collections.Generic;
|
||||
using API.Entities;
|
||||
using API.Entities.Interfaces;
|
||||
using API.Extensions;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.DTOs;
|
||||
|
|
@ -20,7 +21,7 @@ public class VolumeDto : IHasReadTimeEstimate
|
|||
/// This will map to MinNumber. Number was removed in v0.7.13.8/v0.7.14
|
||||
/// </summary>
|
||||
[Obsolete("Use MinNumber")]
|
||||
public float Number { get; set; }
|
||||
public int Number { get; set; }
|
||||
public int Pages { get; set; }
|
||||
public int PagesRead { get; set; }
|
||||
public DateTime LastModifiedUtc { get; set; }
|
||||
|
|
@ -50,6 +51,15 @@ public class VolumeDto : IHasReadTimeEstimate
|
|||
/// <returns></returns>
|
||||
public bool IsLooseLeaf()
|
||||
{
|
||||
return Math.Abs(this.MinNumber - Parser.LooseLeafVolumeNumber) < 0.001f;
|
||||
return MinNumber.Is(Parser.LooseLeafVolumeNumber);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Does this volume hold only specials?
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public bool IsSpecial()
|
||||
{
|
||||
return MinNumber.Is(Parser.SpecialVolumeNumber);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -156,10 +156,15 @@ public sealed class DataContext : IdentityDbContext<AppUser, AppRole, int,
|
|||
{
|
||||
if (e.FromQuery || e.Entry.State != EntityState.Added || e.Entry.Entity is not IEntityDate entity) return;
|
||||
|
||||
entity.Created = DateTime.Now;
|
||||
entity.LastModified = DateTime.Now;
|
||||
entity.CreatedUtc = DateTime.UtcNow;
|
||||
entity.LastModifiedUtc = DateTime.UtcNow;
|
||||
|
||||
// This allows for mocking
|
||||
if (entity.Created == DateTime.MinValue)
|
||||
{
|
||||
entity.Created = DateTime.Now;
|
||||
entity.CreatedUtc = DateTime.UtcNow;
|
||||
}
|
||||
}
|
||||
|
||||
private static void OnEntityStateChanged(object? sender, EntityStateChangedEventArgs e)
|
||||
|
|
|
|||
140
API/Data/ManualMigrations/ManualMigrateMixedSpecials.cs
Normal file
140
API/Data/ManualMigrations/ManualMigrateMixedSpecials.cs
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
public class UserProgressCsvRecord
|
||||
{
|
||||
public bool IsSpecial { get; set; }
|
||||
public int AppUserId { get; set; }
|
||||
public int PagesRead { get; set; }
|
||||
public string Range { get; set; }
|
||||
public string Number { get; set; }
|
||||
public float MinNumber { get; set; }
|
||||
public int SeriesId { get; set; }
|
||||
public int VolumeId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 migration to move Specials into their own volume and retain user progress.
|
||||
/// </summary>
|
||||
public static class MigrateMixedSpecials
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateMixedSpecials"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running ManualMigrateMixedSpecials migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// First, group all the progresses into different series
|
||||
|
||||
// Get each series and move the specials from old volume to the new Volume()
|
||||
|
||||
// Create a new progress event from existing and store the Id of existing progress event to delete it
|
||||
|
||||
// Save per series
|
||||
|
||||
var progress = await dataContext.AppUserProgresses
|
||||
.Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord
|
||||
{
|
||||
IsSpecial = c.IsSpecial,
|
||||
AppUserId = p.AppUserId,
|
||||
PagesRead = p.PagesRead,
|
||||
Range = c.Range,
|
||||
Number = c.Number,
|
||||
MinNumber = c.MinNumber,
|
||||
SeriesId = p.SeriesId,
|
||||
VolumeId = p.VolumeId
|
||||
})
|
||||
.Where(d => d.IsSpecial || d.Number == "0")
|
||||
.Join(dataContext.Volume, d => d.VolumeId, v => v.Id, (d, v) => new
|
||||
{
|
||||
ProgressRecord = d,
|
||||
Volume = v
|
||||
})
|
||||
.Where(d => d.Volume.Name == "0")
|
||||
.ToListAsync();
|
||||
|
||||
// First, group all the progresses into different series
|
||||
logger.LogCritical("Migrating {Count} progress events to new Volume structure - This may take over 10 minutes depending on size of DB. Please wait", progress.Count);
|
||||
var progressesGroupedBySeries = progress.GroupBy(p => p.ProgressRecord.SeriesId);
|
||||
|
||||
foreach (var seriesGroup in progressesGroupedBySeries)
|
||||
{
|
||||
// Get each series and move the specials from the old volume to the new Volume
|
||||
var seriesId = seriesGroup.Key;
|
||||
var specialsInSeries = seriesGroup
|
||||
.Where(p => p.ProgressRecord.IsSpecial)
|
||||
.ToList();
|
||||
|
||||
|
||||
// Get distinct Volumes by Id. For each one, create it then create the progress events
|
||||
var distinctVolumes = specialsInSeries.DistinctBy(d => d.Volume.Id);
|
||||
foreach (var distinctVolume in distinctVolumes)
|
||||
{
|
||||
// Create a new volume for each series with the appropriate number (-100000)
|
||||
var chapters = await dataContext.Chapter
|
||||
.Where(c => c.VolumeId == distinctVolume.Volume.Id && c.IsSpecial).ToListAsync();
|
||||
|
||||
var newVolume = new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithSeriesId(seriesId)
|
||||
.WithChapters(chapters)
|
||||
.Build();
|
||||
dataContext.Volume.Add(newVolume);
|
||||
await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId
|
||||
|
||||
// Migrate the progress event to the new volume
|
||||
distinctVolume.ProgressRecord.VolumeId = newVolume.Id;
|
||||
|
||||
|
||||
logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}",
|
||||
chapters.Count, distinctVolume.Volume.Id, newVolume.Id);
|
||||
// Move the special chapters from the old volume to the new Volume
|
||||
var specialChapters = await dataContext.Chapter
|
||||
.Where(c => c.VolumeId == distinctVolume.ProgressRecord.VolumeId && c.IsSpecial)
|
||||
.ToListAsync();
|
||||
|
||||
foreach (var specialChapter in specialChapters)
|
||||
{
|
||||
// Update the VolumeId on the existing progress event
|
||||
specialChapter.VolumeId = newVolume.Id;
|
||||
}
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
}
|
||||
|
||||
// Save changes after processing all series
|
||||
if (dataContext.ChangeTracker.HasChanges())
|
||||
{
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
// Update all Volumes with Name as "0" -> Special
|
||||
logger.LogCritical("Updating all Volumes with Name 0 to SpecialNumber");
|
||||
|
||||
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "ManualMigrateMixedSpecials",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running ManualMigrateMixedSpecials migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
||||
89
API/Data/ManualMigrations/MigrateChapterFields.cs
Normal file
89
API/Data/ManualMigrations/MigrateChapterFields.cs
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Introduced in v0.8.0, this migrates the existing Chapter and Volume 0 -> Parser defined, MangaFile.FileName
|
||||
/// </summary>
|
||||
public static class MigrateChapterFields
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateChapterFields"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// Update all volumes only have specials in them (rare)
|
||||
var volumesWithJustSpecials = dataContext.Volume
|
||||
.Include(v => v.Chapters)
|
||||
.Where(v => v.Name == "0" && v.Chapters.All(c => c.IsSpecial))
|
||||
.ToList();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating {Count} volumes that only have specials in them", volumesWithJustSpecials.Count);
|
||||
foreach (var volume in volumesWithJustSpecials)
|
||||
{
|
||||
volume.Name = $"{Parser.SpecialVolumeNumber}";
|
||||
volume.MinNumber = Parser.SpecialVolumeNumber;
|
||||
volume.MaxNumber = Parser.SpecialVolumeNumber;
|
||||
}
|
||||
|
||||
// Update all volumes that only have loose leafs in them
|
||||
var looseLeafVolumes = dataContext.Volume
|
||||
.Include(v => v.Chapters)
|
||||
.Where(v => v.Name == "0" && v.Chapters.All(c => !c.IsSpecial))
|
||||
.ToList();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating {Count} volumes that only have loose leaf chapters in them", looseLeafVolumes.Count);
|
||||
foreach (var volume in looseLeafVolumes)
|
||||
{
|
||||
volume.Name = $"{Parser.DefaultChapterNumber}";
|
||||
volume.MinNumber = Parser.DefaultChapterNumber;
|
||||
volume.MaxNumber = Parser.DefaultChapterNumber;
|
||||
}
|
||||
|
||||
// Update all MangaFile
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating all MangaFiles");
|
||||
foreach (var mangaFile in dataContext.MangaFile)
|
||||
{
|
||||
mangaFile.FileName = Parser.RemoveExtensionIfSupported(mangaFile.FilePath);
|
||||
}
|
||||
|
||||
var looseLeafChapters = await dataContext.Chapter.Where(c => c.Number == "0").ToListAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Updating {Count} loose leaf chapters", looseLeafChapters.Count);
|
||||
foreach (var chapter in looseLeafChapters)
|
||||
{
|
||||
chapter.Number = Parser.DefaultChapter;
|
||||
chapter.MinNumber = Parser.DefaultChapterNumber;
|
||||
chapter.MaxNumber = Parser.DefaultChapterNumber;
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateChapterFields",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterFields migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
||||
50
API/Data/ManualMigrations/MigrateChapterNumber.cs
Normal file
50
API/Data/ManualMigrations/MigrateChapterNumber.cs
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// Introduced in v0.8.0, this migrates the existing Chapter Range -> Chapter Min/Max Number
|
||||
/// </summary>
|
||||
public static class MigrateChapterNumber
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateChapterNumber"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterNumber migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// Get all volumes
|
||||
foreach (var chapter in dataContext.Chapter)
|
||||
{
|
||||
if (chapter.IsSpecial)
|
||||
{
|
||||
chapter.MinNumber = Parser.DefaultChapterNumber;
|
||||
chapter.MaxNumber = Parser.DefaultChapterNumber;
|
||||
continue;
|
||||
}
|
||||
chapter.MinNumber = Parser.MinNumberFromRange(chapter.Range);
|
||||
chapter.MaxNumber = Parser.MaxNumberFromRange(chapter.Range);
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateChapterNumber",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterNumber migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
||||
55
API/Data/ManualMigrations/MigrateChapterRange.cs
Normal file
55
API/Data/ManualMigrations/MigrateChapterRange.cs
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 changed the range to that it doesn't have filename by default
|
||||
/// </summary>
|
||||
public static class MigrateChapterRange
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateChapterRange"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterRange migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var chapters = await dataContext.Chapter.ToListAsync();
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
if (Parser.MinNumberFromRange(chapter.Range) == 0.0f)
|
||||
{
|
||||
chapter.Range = chapter.GetNumberTitle();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Save changes after processing all series
|
||||
if (dataContext.ChangeTracker.HasChanges())
|
||||
{
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateChapterRange",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateChapterRange migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
||||
|
|
@ -15,9 +15,8 @@ public static class MigrateLibrariesToHaveAllFileTypes
|
|||
{
|
||||
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.Library.AnyAsync(l => l.LibraryFileTypes.Count == 0))
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateLibrariesToHaveAllFileTypes"))
|
||||
{
|
||||
logger.LogCritical("Running MigrateLibrariesToHaveAllFileTypes migration - Completed. This is not an error");
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -16,8 +16,6 @@ public static class MigrateManualHistory
|
|||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync())
|
||||
{
|
||||
logger.LogCritical(
|
||||
"Running MigrateManualHistory migration - Completed. This is not an error");
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ using System.Text.RegularExpressions;
|
|||
using System.Threading.Tasks;
|
||||
using API.DTOs.Filtering.v2;
|
||||
using API.Helpers;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
|
@ -21,8 +22,12 @@ public static class MigrateSmartFilterEncoding
|
|||
|
||||
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
logger.LogCritical("Running MigrateSmartFilterEncoding migration - Please be patient, this may take some time. This is not an error");
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateSmartFilterEncoding"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical("Running MigrateSmartFilterEncoding migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var smartFilters = dataContext.AppUserSmartFilter.ToList();
|
||||
foreach (var filter in smartFilters)
|
||||
|
|
|
|||
|
|
@ -14,6 +14,10 @@ public static class MigrateUserLibrarySideNavStream
|
|||
{
|
||||
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateUserLibrarySideNavStream"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var usersWithLibraryStreams = await dataContext.AppUser
|
||||
.Include(u => u.SideNavStreams)
|
||||
|
|
|
|||
41
API/Data/ManualMigrations/MigrateVolumeLookupName.cs
Normal file
41
API/Data/ManualMigrations/MigrateVolumeLookupName.cs
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
public static class MigrateVolumeLookupName
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateVolumeLookupName"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateVolumeLookupName migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
// Update all volumes to have LookupName as after this migration, name isn't used for lookup
|
||||
var volumes = dataContext.Volume.ToList();
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
volume.LookupName = volume.Name;
|
||||
}
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateVolumeLookupName",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
logger.LogCritical(
|
||||
"Running MigrateVolumeLookupName migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
||||
|
|
@ -13,8 +13,13 @@ namespace API.Data.ManualMigrations;
|
|||
/// </summary>
|
||||
public static class MigrateVolumeNumber
|
||||
{
|
||||
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateVolumeNumber"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (await dataContext.Volume.AnyAsync(v => v.MaxNumber > 0))
|
||||
{
|
||||
logger.LogCritical(
|
||||
|
|
|
|||
|
|
@ -20,6 +20,11 @@ public static class MigrateWantToReadExport
|
|||
{
|
||||
try
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateWantToReadExport"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var importFile = Path.Join(directoryService.ConfigDirectory, "want-to-read-migration.csv");
|
||||
if (File.Exists(importFile))
|
||||
{
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ using API.Data.Repositories;
|
|||
using API.Entities;
|
||||
using API.Services;
|
||||
using CsvHelper;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
|
@ -15,8 +16,14 @@ namespace API.Data.ManualMigrations;
|
|||
/// </summary>
|
||||
public static class MigrateWantToReadImport
|
||||
{
|
||||
public static async Task Migrate(IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger<Program> logger)
|
||||
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, IDirectoryService directoryService, ILogger<Program> logger)
|
||||
{
|
||||
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateWantToReadImport"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var importFile = Path.Join(directoryService.ConfigDirectory, "want-to-read-migration.csv");
|
||||
var outputFile = Path.Join(directoryService.ConfigDirectory, "imported-want-to-read-migration.csv");
|
||||
|
||||
|
|
|
|||
|
|
@ -127,7 +127,11 @@ public class ComicInfo
|
|||
public string CoverArtist { get; set; } = string.Empty;
|
||||
public string Editor { get; set; } = string.Empty;
|
||||
public string Publisher { get; set; } = string.Empty;
|
||||
public string Imprint { get; set; } = string.Empty;
|
||||
public string Characters { get; set; } = string.Empty;
|
||||
public string Teams { get; set; } = string.Empty;
|
||||
public string Locations { get; set; } = string.Empty;
|
||||
|
||||
|
||||
public static AgeRating ConvertAgeRatingToEnum(string value)
|
||||
{
|
||||
|
|
@ -151,9 +155,12 @@ public class ComicInfo
|
|||
info.Letterer = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Letterer);
|
||||
info.Penciller = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Penciller);
|
||||
info.Publisher = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Publisher);
|
||||
info.Imprint = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Imprint);
|
||||
info.Characters = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Characters);
|
||||
info.Translator = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Translator);
|
||||
info.CoverArtist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.CoverArtist);
|
||||
info.Teams = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Teams);
|
||||
info.Locations = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Locations);
|
||||
|
||||
// We need to convert GTIN to ISBN
|
||||
if (!string.IsNullOrEmpty(info.GTIN))
|
||||
|
|
@ -174,7 +181,12 @@ public class ComicInfo
|
|||
|
||||
if (!string.IsNullOrEmpty(info.Number))
|
||||
{
|
||||
info.Number = info.Number.Replace(",", "."); // Corrective measure for non English OSes
|
||||
info.Number = info.Number.Trim().Replace(",", "."); // Corrective measure for non English OSes
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(info.Volume))
|
||||
{
|
||||
info.Volume = info.Volume.Trim();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
2877
API/Data/Migrations/20240214232436_ChapterNumber.Designer.cs
generated
Normal file
2877
API/Data/Migrations/20240214232436_ChapterNumber.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
40
API/Data/Migrations/20240214232436_ChapterNumber.cs
Normal file
40
API/Data/Migrations/20240214232436_ChapterNumber.cs
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class ChapterNumber : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<float>(
|
||||
name: "MaxNumber",
|
||||
table: "Chapter",
|
||||
type: "REAL",
|
||||
nullable: false,
|
||||
defaultValue: 0f);
|
||||
|
||||
migrationBuilder.AddColumn<float>(
|
||||
name: "MinNumber",
|
||||
table: "Chapter",
|
||||
type: "REAL",
|
||||
nullable: false,
|
||||
defaultValue: 0f);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "MaxNumber",
|
||||
table: "Chapter");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "MinNumber",
|
||||
table: "Chapter");
|
||||
}
|
||||
}
|
||||
}
|
||||
2880
API/Data/Migrations/20240216000223_MangaFileNameTemp.Designer.cs
generated
Normal file
2880
API/Data/Migrations/20240216000223_MangaFileNameTemp.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
28
API/Data/Migrations/20240216000223_MangaFileNameTemp.cs
Normal file
28
API/Data/Migrations/20240216000223_MangaFileNameTemp.cs
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class MangaFileNameTemp : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "FileName",
|
||||
table: "MangaFile",
|
||||
type: "TEXT",
|
||||
nullable: true);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "FileName",
|
||||
table: "MangaFile");
|
||||
}
|
||||
}
|
||||
}
|
||||
2883
API/Data/Migrations/20240222125420_ChapterIssueSort.Designer.cs
generated
Normal file
2883
API/Data/Migrations/20240222125420_ChapterIssueSort.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
29
API/Data/Migrations/20240222125420_ChapterIssueSort.cs
Normal file
29
API/Data/Migrations/20240222125420_ChapterIssueSort.cs
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class ChapterIssueSort : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<float>(
|
||||
name: "SortOrder",
|
||||
table: "Chapter",
|
||||
type: "REAL",
|
||||
nullable: false,
|
||||
defaultValue: 0f);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "SortOrder",
|
||||
table: "Chapter");
|
||||
}
|
||||
}
|
||||
}
|
||||
2886
API/Data/Migrations/20240225235816_VolumeLookupName.Designer.cs
generated
Normal file
2886
API/Data/Migrations/20240225235816_VolumeLookupName.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
28
API/Data/Migrations/20240225235816_VolumeLookupName.cs
Normal file
28
API/Data/Migrations/20240225235816_VolumeLookupName.cs
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class VolumeLookupName : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "LookupName",
|
||||
table: "Volume",
|
||||
type: "TEXT",
|
||||
nullable: true);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "LookupName",
|
||||
table: "Volume");
|
||||
}
|
||||
}
|
||||
}
|
||||
2889
API/Data/Migrations/20240309140117_SeriesImprints.Designer.cs
generated
Normal file
2889
API/Data/Migrations/20240309140117_SeriesImprints.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
29
API/Data/Migrations/20240309140117_SeriesImprints.cs
Normal file
29
API/Data/Migrations/20240309140117_SeriesImprints.cs
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class SeriesImprints : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<bool>(
|
||||
name: "ImprintLocked",
|
||||
table: "SeriesMetadata",
|
||||
type: "INTEGER",
|
||||
nullable: false,
|
||||
defaultValue: false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "ImprintLocked",
|
||||
table: "SeriesMetadata");
|
||||
}
|
||||
}
|
||||
}
|
||||
2892
API/Data/Migrations/20240313112552_SeriesLowestFolderPath.Designer.cs
generated
Normal file
2892
API/Data/Migrations/20240313112552_SeriesLowestFolderPath.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
28
API/Data/Migrations/20240313112552_SeriesLowestFolderPath.cs
Normal file
28
API/Data/Migrations/20240313112552_SeriesLowestFolderPath.cs
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class SeriesLowestFolderPath : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "LowestFolderPath",
|
||||
table: "Series",
|
||||
type: "TEXT",
|
||||
nullable: true);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "LowestFolderPath",
|
||||
table: "Series");
|
||||
}
|
||||
}
|
||||
}
|
||||
2898
API/Data/Migrations/20240314194402_TeamsAndLocations.Designer.cs
generated
Normal file
2898
API/Data/Migrations/20240314194402_TeamsAndLocations.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load diff
40
API/Data/Migrations/20240314194402_TeamsAndLocations.cs
Normal file
40
API/Data/Migrations/20240314194402_TeamsAndLocations.cs
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace API.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class TeamsAndLocations : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.AddColumn<bool>(
|
||||
name: "LocationLocked",
|
||||
table: "SeriesMetadata",
|
||||
type: "INTEGER",
|
||||
nullable: false,
|
||||
defaultValue: false);
|
||||
|
||||
migrationBuilder.AddColumn<bool>(
|
||||
name: "TeamLocked",
|
||||
table: "SeriesMetadata",
|
||||
type: "INTEGER",
|
||||
nullable: false,
|
||||
defaultValue: false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "LocationLocked",
|
||||
table: "SeriesMetadata");
|
||||
|
||||
migrationBuilder.DropColumn(
|
||||
name: "TeamLocked",
|
||||
table: "SeriesMetadata");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -15,7 +15,7 @@ namespace API.Data.Migrations
|
|||
protected override void BuildModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder.HasAnnotation("ProductVersion", "8.0.1");
|
||||
modelBuilder.HasAnnotation("ProductVersion", "8.0.3");
|
||||
|
||||
modelBuilder.Entity("API.Entities.AppRole", b =>
|
||||
{
|
||||
|
|
@ -679,9 +679,15 @@ namespace API.Data.Migrations
|
|||
b.Property<int>("MaxHoursToRead")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<float>("MaxNumber")
|
||||
.HasColumnType("REAL");
|
||||
|
||||
b.Property<int>("MinHoursToRead")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<float>("MinNumber")
|
||||
.HasColumnType("REAL");
|
||||
|
||||
b.Property<string>("Number")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
|
|
@ -697,6 +703,9 @@ namespace API.Data.Migrations
|
|||
b.Property<string>("SeriesGroup")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
b.Property<float>("SortOrder")
|
||||
.HasColumnType("REAL");
|
||||
|
||||
b.Property<string>("StoryArc")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
|
|
@ -973,6 +982,9 @@ namespace API.Data.Migrations
|
|||
b.Property<string>("Extension")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
b.Property<string>("FileName")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
b.Property<string>("FilePath")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
|
|
@ -1241,6 +1253,9 @@ namespace API.Data.Migrations
|
|||
b.Property<bool>("GenresLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<bool>("ImprintLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<bool>("InkerLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
|
|
@ -1253,6 +1268,9 @@ namespace API.Data.Migrations
|
|||
b.Property<bool>("LettererLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<bool>("LocationLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<int>("MaxCount")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
|
|
@ -1290,6 +1308,9 @@ namespace API.Data.Migrations
|
|||
b.Property<bool>("TagsLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<bool>("TeamLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<int>("TotalCount")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
|
|
@ -1665,6 +1686,9 @@ namespace API.Data.Migrations
|
|||
b.Property<bool>("LocalizedNameLocked")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
b.Property<string>("LowestFolderPath")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
b.Property<int>("MaxHoursToRead")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
|
|
@ -1839,6 +1863,9 @@ namespace API.Data.Migrations
|
|||
b.Property<DateTime>("LastModifiedUtc")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
b.Property<string>("LookupName")
|
||||
.HasColumnType("TEXT");
|
||||
|
||||
b.Property<int>("MaxHoursToRead")
|
||||
.HasColumnType("INTEGER");
|
||||
|
||||
|
|
|
|||
|
|
@ -167,9 +167,10 @@ public class AppUserProgressRepository : IAppUserProgressRepository
|
|||
(appUserProgresses, chapter) => new {appUserProgresses, chapter})
|
||||
.Where(p => p.appUserProgresses.SeriesId == seriesId && p.appUserProgresses.AppUserId == userId &&
|
||||
p.appUserProgresses.PagesRead >= p.chapter.Pages)
|
||||
.Select(p => p.chapter.Range)
|
||||
.Where(p => p.chapter.MaxNumber != Parser.SpecialVolumeNumber)
|
||||
.Select(p => p.chapter.MaxNumber)
|
||||
.ToListAsync();
|
||||
return list.Count == 0 ? 0 : list.DefaultIfEmpty().Where(d => d != null).Max(d => (int) Math.Floor(Parser.MaxNumberFromRange(d)));
|
||||
return list.Count == 0 ? 0 : (int) list.DefaultIfEmpty().Max(d => d);
|
||||
}
|
||||
|
||||
public async Task<float> GetHighestFullyReadVolumeForSeries(int seriesId, int userId)
|
||||
|
|
@ -179,6 +180,7 @@ public class AppUserProgressRepository : IAppUserProgressRepository
|
|||
(appUserProgresses, chapter) => new {appUserProgresses, chapter})
|
||||
.Where(p => p.appUserProgresses.SeriesId == seriesId && p.appUserProgresses.AppUserId == userId &&
|
||||
p.appUserProgresses.PagesRead >= p.chapter.Pages)
|
||||
.Where(p => p.chapter.MaxNumber != Parser.SpecialVolumeNumber)
|
||||
.Select(p => p.chapter.Volume.MaxNumber)
|
||||
.ToListAsync();
|
||||
return list.Count == 0 ? 0 : list.DefaultIfEmpty().Max();
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ public class ChapterRepository : IChapterRepository
|
|||
.Where(c => c.Id == chapterId)
|
||||
.Join(_context.Volume, c => c.VolumeId, v => v.Id, (chapter, volume) => new
|
||||
{
|
||||
ChapterNumber = chapter.Range,
|
||||
ChapterNumber = chapter.MinNumber,
|
||||
VolumeNumber = volume.Name,
|
||||
VolumeId = volume.Id,
|
||||
chapter.IsSpecial,
|
||||
|
|
@ -102,8 +102,8 @@ public class ChapterRepository : IChapterRepository
|
|||
})
|
||||
.Select(data => new ChapterInfoDto()
|
||||
{
|
||||
ChapterNumber = data.ChapterNumber,
|
||||
VolumeNumber = data.VolumeNumber + string.Empty,
|
||||
ChapterNumber = data.ChapterNumber + string.Empty, // TODO: Fix this
|
||||
VolumeNumber = data.VolumeNumber + string.Empty, // TODO: Fix this
|
||||
VolumeId = data.VolumeId,
|
||||
IsSpecial = data.IsSpecial,
|
||||
SeriesId = data.SeriesId,
|
||||
|
|
@ -175,6 +175,7 @@ public class ChapterRepository : IChapterRepository
|
|||
{
|
||||
return await _context.Chapter
|
||||
.Includes(includes)
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.FirstOrDefaultAsync(c => c.Id == chapterId);
|
||||
}
|
||||
|
||||
|
|
@ -187,6 +188,7 @@ public class ChapterRepository : IChapterRepository
|
|||
{
|
||||
return await _context.Chapter
|
||||
.Where(c => c.VolumeId == volumeId)
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.ToListAsync();
|
||||
}
|
||||
|
||||
|
|
@ -267,10 +269,16 @@ public class ChapterRepository : IChapterRepository
|
|||
return chapter;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Includes Volumes
|
||||
/// </summary>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <returns></returns>
|
||||
public IEnumerable<Chapter> GetChaptersForSeries(int seriesId)
|
||||
{
|
||||
return _context.Chapter
|
||||
.Where(c => c.Volume.SeriesId == seriesId)
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.Include(c => c.Volume)
|
||||
.AsEnumerable();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ public interface IExternalSeriesMetadataRepository
|
|||
Task<bool> ExternalSeriesMetadataNeedsRefresh(int seriesId);
|
||||
Task<SeriesDetailPlusDto> GetSeriesDetailPlusDto(int seriesId);
|
||||
Task LinkRecommendationsToSeries(Series series);
|
||||
Task LinkRecommendationsToSeries(int seriesId);
|
||||
Task<bool> IsBlacklistedSeries(int seriesId);
|
||||
Task CreateBlacklistedSeries(int seriesId, bool saveChanges = true);
|
||||
Task RemoveFromBlacklist(int seriesId);
|
||||
|
|
@ -179,6 +180,13 @@ public class ExternalSeriesMetadataRepository : IExternalSeriesMetadataRepositor
|
|||
return seriesDetailPlusDto;
|
||||
}
|
||||
|
||||
public async Task LinkRecommendationsToSeries(int seriesId)
|
||||
{
|
||||
var series = await _context.Series.Where(s => s.Id == seriesId).AsNoTracking().SingleOrDefaultAsync();
|
||||
if (series == null) return;
|
||||
await LinkRecommendationsToSeries(series);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Searches Recommendations without a SeriesId on record and attempts to link based on Series Name/Localized Name
|
||||
/// </summary>
|
||||
|
|
|
|||
|
|
@ -318,7 +318,7 @@ public class LibraryRepository : ILibraryRepository
|
|||
/// <returns></returns>
|
||||
public async Task<bool> DoAnySeriesFoldersMatch(IEnumerable<string> folders)
|
||||
{
|
||||
var normalized = folders.Select(Services.Tasks.Scanner.Parser.Parser.NormalizePath);
|
||||
var normalized = folders.Select(Parser.NormalizePath);
|
||||
return await _context.Series.AnyAsync(s => normalized.Contains(s.FolderPath));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -498,6 +498,7 @@ public class SeriesRepository : ISeriesRepository
|
|||
.Include(c => c.Files)
|
||||
.Where(c => EF.Functions.Like(c.TitleName, $"%{searchQuery}%")
|
||||
|| EF.Functions.Like(c.ISBN, $"%{searchQuery}%")
|
||||
|| EF.Functions.Like(c.Range, $"%{searchQuery}%")
|
||||
)
|
||||
.Where(c => c.Files.All(f => fileIds.Contains(f.Id)))
|
||||
.AsSplitQuery()
|
||||
|
|
@ -1183,6 +1184,9 @@ public class SeriesRepository : ISeriesRepository
|
|||
FilterField.Letterer => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Colorist => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Inker => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Imprint => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Team => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Location => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Penciller => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Writers => query.HasPeople(true, statement.Comparison, (IList<int>) value),
|
||||
FilterField.Genres => query.HasGenre(true, statement.Comparison, (IList<int>) value),
|
||||
|
|
@ -1817,19 +1821,7 @@ public class SeriesRepository : ISeriesRepository
|
|||
AlternativeSettings = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.AlternativeSetting, userRating),
|
||||
AlternativeVersions = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.AlternativeVersion, userRating),
|
||||
Doujinshis = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Doujinshi, userRating),
|
||||
// Parent = await _context.Series
|
||||
// .SelectMany(s =>
|
||||
// s.TargetSeries.Where(r => r.TargetSeriesId == seriesId
|
||||
// && usersSeriesIds.Contains(r.TargetSeriesId)
|
||||
// && r.RelationKind != RelationKind.Prequel
|
||||
// && r.RelationKind != RelationKind.Sequel
|
||||
// && r.RelationKind != RelationKind.Edition)
|
||||
// .Select(sr => sr.Series))
|
||||
// .RestrictAgainstAgeRestriction(userRating)
|
||||
// .AsSplitQuery()
|
||||
// .AsNoTracking()
|
||||
// .ProjectTo<SeriesDto>(_mapper.ConfigurationProvider)
|
||||
// .ToListAsync(),
|
||||
Annuals = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Annual, userRating),
|
||||
Parent = await _context.SeriesRelation
|
||||
.Where(r => r.TargetSeriesId == seriesId
|
||||
&& usersSeriesIds.Contains(r.TargetSeriesId)
|
||||
|
|
@ -1891,8 +1883,8 @@ public class SeriesRepository : ISeriesRepository
|
|||
VolumeId = c.VolumeId,
|
||||
ChapterId = c.Id,
|
||||
Format = c.Volume.Series.Format,
|
||||
ChapterNumber = c.Number,
|
||||
ChapterRange = c.Range,
|
||||
ChapterNumber = c.MinNumber + string.Empty, // TODO: Refactor this
|
||||
ChapterRange = c.Range, // TODO: Refactor this
|
||||
IsSpecial = c.IsSpecial,
|
||||
VolumeNumber = c.Volume.MinNumber,
|
||||
ChapterTitle = c.Title,
|
||||
|
|
@ -2063,7 +2055,7 @@ public class SeriesRepository : ISeriesRepository
|
|||
foreach (var series in info)
|
||||
{
|
||||
if (series.FolderPath == null) continue;
|
||||
if (!map.ContainsKey(series.FolderPath))
|
||||
if (!map.TryGetValue(series.FolderPath, out var value))
|
||||
{
|
||||
map.Add(series.FolderPath, new List<SeriesModified>()
|
||||
{
|
||||
|
|
@ -2072,9 +2064,8 @@ public class SeriesRepository : ISeriesRepository
|
|||
}
|
||||
else
|
||||
{
|
||||
map[series.FolderPath].Add(series);
|
||||
value.Add(series);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return map;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
|
@ -6,6 +7,7 @@ using API.DTOs;
|
|||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Extensions.QueryExtensions;
|
||||
using API.Services;
|
||||
using AutoMapper;
|
||||
using AutoMapper.QueryableExtensions;
|
||||
|
|
@ -14,6 +16,15 @@ using Microsoft.EntityFrameworkCore;
|
|||
|
||||
namespace API.Data.Repositories;
|
||||
|
||||
[Flags]
|
||||
public enum VolumeIncludes
|
||||
{
|
||||
None = 1,
|
||||
Chapters = 2,
|
||||
People = 4,
|
||||
Tags = 8,
|
||||
}
|
||||
|
||||
public interface IVolumeRepository
|
||||
{
|
||||
void Add(Volume volume);
|
||||
|
|
@ -22,7 +33,7 @@ public interface IVolumeRepository
|
|||
Task<IList<MangaFile>> GetFilesForVolume(int volumeId);
|
||||
Task<string?> GetVolumeCoverImageAsync(int volumeId);
|
||||
Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds);
|
||||
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
|
||||
Task<IList<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId, VolumeIncludes includes = VolumeIncludes.Chapters);
|
||||
Task<Volume?> GetVolumeAsync(int volumeId);
|
||||
Task<VolumeDto?> GetVolumeDtoAsync(int volumeId, int userId);
|
||||
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
|
||||
|
|
@ -129,6 +140,7 @@ public class VolumeRepository : IVolumeRepository
|
|||
.Include(vol => vol.Chapters)
|
||||
.ThenInclude(c => c.Files)
|
||||
.AsSplitQuery()
|
||||
.OrderBy(v => v.MinNumber)
|
||||
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
|
||||
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
|
||||
|
||||
|
|
@ -177,22 +189,22 @@ public class VolumeRepository : IVolumeRepository
|
|||
/// <param name="seriesId"></param>
|
||||
/// <param name="userId"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId)
|
||||
public async Task<IList<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId, VolumeIncludes includes = VolumeIncludes.Chapters)
|
||||
{
|
||||
var volumes = await _context.Volume
|
||||
.Where(vol => vol.SeriesId == seriesId)
|
||||
.Include(vol => vol.Chapters)
|
||||
.ThenInclude(c => c.People)
|
||||
.Include(vol => vol.Chapters)
|
||||
.ThenInclude(c => c.Tags)
|
||||
.Includes(includes)
|
||||
.OrderBy(volume => volume.MinNumber)
|
||||
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
|
||||
.AsNoTracking()
|
||||
.AsSplitQuery()
|
||||
.ToListAsync();
|
||||
|
||||
await AddVolumeModifiers(userId, volumes);
|
||||
SortSpecialChapters(volumes);
|
||||
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
volume.Chapters = volume.Chapters.OrderBy(c => c.SortOrder).ToList();
|
||||
}
|
||||
|
||||
return volumes;
|
||||
}
|
||||
|
|
@ -213,15 +225,6 @@ public class VolumeRepository : IVolumeRepository
|
|||
}
|
||||
|
||||
|
||||
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
|
||||
{
|
||||
foreach (var v in volumes.WhereLooseLeaf())
|
||||
{
|
||||
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private async Task AddVolumeModifiers(int userId, IReadOnlyCollection<VolumeDto> volumes)
|
||||
{
|
||||
var volIds = volumes.Select(s => s.Id);
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Interfaces;
|
||||
using API.Extensions;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Entities;
|
||||
|
|
@ -10,14 +12,27 @@ public class Chapter : IEntityDate, IHasReadTimeEstimate
|
|||
{
|
||||
public int Id { get; set; }
|
||||
/// <summary>
|
||||
/// Range of numbers. Chapter 2-4 -> "2-4". Chapter 2 -> "2".
|
||||
/// Range of numbers. Chapter 2-4 -> "2-4". Chapter 2 -> "2". If the chapter is a special, will return the Special Name
|
||||
/// </summary>
|
||||
public required string Range { get; set; }
|
||||
/// <summary>
|
||||
/// Smallest number of the Range. Can be a partial like Chapter 4.5
|
||||
/// </summary>
|
||||
[Obsolete("Use MinNumber and MaxNumber instead")]
|
||||
public required string Number { get; set; }
|
||||
/// <summary>
|
||||
/// Minimum Chapter Number.
|
||||
/// </summary>
|
||||
public float MinNumber { get; set; }
|
||||
/// <summary>
|
||||
/// Maximum Chapter Number
|
||||
/// </summary>
|
||||
public float MaxNumber { get; set; }
|
||||
/// <summary>
|
||||
/// The sorting order of the Chapter. Inherits from MinNumber, but can be overridden.
|
||||
/// </summary>
|
||||
public float SortOrder { get; set; }
|
||||
/// <summary>
|
||||
/// The files that represent this Chapter
|
||||
/// </summary>
|
||||
public ICollection<MangaFile> Files { get; set; } = null!;
|
||||
|
|
@ -44,6 +59,7 @@ public class Chapter : IEntityDate, IHasReadTimeEstimate
|
|||
/// Used for books/specials to display custom title. For non-specials/books, will be set to <see cref="Range"/>
|
||||
/// </summary>
|
||||
public string? Title { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Age Rating for the issue/chapter
|
||||
/// </summary>
|
||||
|
|
@ -130,10 +146,48 @@ public class Chapter : IEntityDate, IHasReadTimeEstimate
|
|||
if (IsSpecial)
|
||||
{
|
||||
Number = Parser.DefaultChapter;
|
||||
MinNumber = Parser.DefaultChapterNumber;
|
||||
MaxNumber = Parser.DefaultChapterNumber;
|
||||
}
|
||||
// NOTE: This doesn't work well for all because Pdf usually should use into.Title or even filename
|
||||
Title = (IsSpecial && info.Format == MangaFormat.Epub)
|
||||
? info.Title
|
||||
: Range;
|
||||
: Parser.RemoveExtensionIfSupported(Range);
|
||||
|
||||
var specialTreatment = info.IsSpecialInfo();
|
||||
Range = specialTreatment ? info.Filename : info.Chapters;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the Chapter Number. If the chapter is a range, returns that, formatted.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public string GetNumberTitle()
|
||||
{
|
||||
if (MinNumber.Is(MaxNumber))
|
||||
{
|
||||
if (MinNumber.Is(Parser.DefaultChapterNumber) && IsSpecial)
|
||||
{
|
||||
return Parser.RemoveExtensionIfSupported(Title);
|
||||
}
|
||||
|
||||
if (MinNumber.Is(0) && !float.TryParse(Range, out _))
|
||||
{
|
||||
return $"{Range}";
|
||||
}
|
||||
|
||||
return $"{MinNumber}";
|
||||
|
||||
}
|
||||
return $"{MinNumber}-{MaxNumber}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Is the Chapter representing a single Volume (volume 1.cbz). If so, Min/Max will be Default and will not be special
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public bool IsSingleVolumeChapter()
|
||||
{
|
||||
return MinNumber.Is(Parser.DefaultChapterNumber) && !IsSpecial;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,4 +29,10 @@ public enum LibraryType
|
|||
/// </summary>
|
||||
[Description("Light Novel")]
|
||||
LightNovel = 4,
|
||||
/// <summary>
|
||||
/// Uses Comic regex for filename parsing, uses ComicVine type of Parsing. Will replace Comic type in future
|
||||
/// </summary>
|
||||
[Description("Comic (ComicVine)")]
|
||||
ComicVine = 5,
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,11 @@ public enum PersonRole
|
|||
/// <summary>
|
||||
/// The Translator
|
||||
/// </summary>
|
||||
Translator = 12
|
||||
|
||||
|
||||
Translator = 12,
|
||||
/// <summary>
|
||||
/// The publisher before another Publisher bought
|
||||
/// </summary>
|
||||
Imprint = 13,
|
||||
Team = 14,
|
||||
Location = 15
|
||||
}
|
||||
|
|
|
|||
|
|
@ -71,6 +71,11 @@ public enum RelationKind
|
|||
/// Same story, could be translation, colorization... Different edition of the series
|
||||
/// </summary>
|
||||
[Description("Edition")]
|
||||
Edition = 13
|
||||
Edition = 13,
|
||||
/// <summary>
|
||||
/// The target series is an annual of the Series
|
||||
/// </summary>
|
||||
[Description("Annual")]
|
||||
Annual = 14
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,6 +13,10 @@ public class MangaFile : IEntityDate
|
|||
{
|
||||
public int Id { get; set; }
|
||||
/// <summary>
|
||||
/// The filename without extension
|
||||
/// </summary>
|
||||
public string FileName { get; set; }
|
||||
/// <summary>
|
||||
/// Absolute path to the archive file
|
||||
/// </summary>
|
||||
public required string FilePath { get; set; }
|
||||
|
|
|
|||
|
|
@ -68,14 +68,16 @@ public class SeriesMetadata : IHasConcurrencyToken
|
|||
public bool ColoristLocked { get; set; }
|
||||
public bool EditorLocked { get; set; }
|
||||
public bool InkerLocked { get; set; }
|
||||
public bool ImprintLocked { get; set; }
|
||||
public bool LettererLocked { get; set; }
|
||||
public bool PencillerLocked { get; set; }
|
||||
public bool PublisherLocked { get; set; }
|
||||
public bool TranslatorLocked { get; set; }
|
||||
public bool TeamLocked { get; set; }
|
||||
public bool LocationLocked { get; set; }
|
||||
public bool CoverArtistLocked { get; set; }
|
||||
public bool ReleaseYearLocked { get; set; }
|
||||
|
||||
|
||||
// Relationship
|
||||
public Series Series { get; set; } = null!;
|
||||
public int SeriesId { get; set; }
|
||||
|
|
|
|||
|
|
@ -64,6 +64,11 @@ public class Series : IEntityDate, IHasReadTimeEstimate
|
|||
/// <remarks><see cref="Services.Tasks.Scanner.Parser.Parser.NormalizePath"/> must be used before setting</remarks>
|
||||
public string? FolderPath { get; set; }
|
||||
/// <summary>
|
||||
/// Lowest path (that is under library root) that contains all files for the series.
|
||||
/// </summary>
|
||||
/// <remarks><see cref="Services.Tasks.Scanner.Parser.Parser.NormalizePath"/> must be used before setting</remarks>
|
||||
public string? LowestFolderPath { get; set; }
|
||||
/// <summary>
|
||||
/// Last time the folder was scanned
|
||||
/// </summary>
|
||||
public DateTime LastFolderScanned { get; set; }
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using API.Entities.Interfaces;
|
||||
using API.Extensions;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Entities;
|
||||
|
||||
|
|
@ -13,6 +15,10 @@ public class Volume : IEntityDate, IHasReadTimeEstimate
|
|||
/// <remarks>For Books with Series_index, this will map to the Series Index.</remarks>
|
||||
public required string Name { get; set; }
|
||||
/// <summary>
|
||||
/// This is just the original Parsed volume number for lookups
|
||||
/// </summary>
|
||||
public string LookupName { get; set; }
|
||||
/// <summary>
|
||||
/// The minimum number in the Name field in Int form
|
||||
/// </summary>
|
||||
/// <remarks>Removed in v0.7.13.8, this was an int and we need the ability to have 0.5 volumes render on the UI</remarks>
|
||||
|
|
@ -55,4 +61,17 @@ public class Volume : IEntityDate, IHasReadTimeEstimate
|
|||
public Series Series { get; set; } = null!;
|
||||
public int SeriesId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns the Chapter Number. If the chapter is a range, returns that, formatted.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public string GetNumberTitle()
|
||||
{
|
||||
if (MinNumber.Is(MaxNumber))
|
||||
{
|
||||
return $"{MinNumber}";
|
||||
}
|
||||
return $"{MinNumber}-{MaxNumber}";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@ public static class ApplicationServiceExtensions
|
|||
services.AddScoped<ILibraryWatcher, LibraryWatcher>();
|
||||
services.AddScoped<ITachiyomiService, TachiyomiService>();
|
||||
services.AddScoped<ICollectionTagService, CollectionTagService>();
|
||||
services.AddScoped<ITagManagerService, TagManagerService>();
|
||||
|
||||
services.AddScoped<IFileSystem, FileSystem>();
|
||||
services.AddScoped<IDirectoryService, DirectoryService>();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using API.Entities;
|
||||
using API.Helpers;
|
||||
|
|
@ -28,10 +29,11 @@ public static class ChapterListExtensions
|
|||
/// <returns></returns>
|
||||
public static Chapter? GetChapterByRange(this IEnumerable<Chapter> chapters, ParserInfo info)
|
||||
{
|
||||
var normalizedPath = Parser.NormalizePath(info.FullFilePath);
|
||||
var specialTreatment = info.IsSpecialInfo();
|
||||
return specialTreatment
|
||||
? chapters.FirstOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath)))
|
||||
: chapters.FirstOrDefault(c => c.Range == info.Chapters);
|
||||
return specialTreatment
|
||||
? chapters.FirstOrDefault(c => c.Range == Parser.RemoveExtensionIfSupported(info.Filename) || c.Files.Select(f => Parser.NormalizePath(f.FilePath)).Contains(normalizedPath))
|
||||
: chapters.FirstOrDefault(c => c.Range == info.Chapters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -41,6 +43,6 @@ public static class ChapterListExtensions
|
|||
/// <returns></returns>
|
||||
public static int MinimumReleaseYear(this IList<Chapter> chapters)
|
||||
{
|
||||
return chapters.Select(v => v.ReleaseDate.Year).Where(y => NumberHelper.IsValidYear(y)).DefaultIfEmpty().Min();
|
||||
return chapters.Select(v => v.ReleaseDate.Year).Where(NumberHelper.IsValidYear).DefaultIfEmpty().Min();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
26
API/Extensions/FloatExtensions.cs
Normal file
26
API/Extensions/FloatExtensions.cs
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
using System;
|
||||
|
||||
namespace API.Extensions;
|
||||
|
||||
public static class FloatExtensions
|
||||
{
|
||||
private const float Tolerance = 0.001f;
|
||||
|
||||
/// <summary>
|
||||
/// Used to compare 2 floats together
|
||||
/// </summary>
|
||||
/// <param name="a"></param>
|
||||
/// <param name="b"></param>
|
||||
/// <returns></returns>
|
||||
public static bool Is(this float a, float? b)
|
||||
{
|
||||
if (!b.HasValue) return false;
|
||||
return Math.Abs((float) (a - b)) < Tolerance;
|
||||
}
|
||||
|
||||
public static bool IsNot(this float a, float? b)
|
||||
{
|
||||
if (!b.HasValue) return false;
|
||||
return Math.Abs((float) (a - b)) > Tolerance;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using API.Entities;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
|
@ -27,7 +28,9 @@ public static class ParserInfoListExtensions
|
|||
/// <returns></returns>
|
||||
public static bool HasInfo(this IList<ParserInfo> infos, Chapter chapter)
|
||||
{
|
||||
return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range)
|
||||
: infos.Any(v => v.Chapters == chapter.Range);
|
||||
var chapterFiles = chapter.Files.Select(x => Parser.NormalizePath(x.FilePath)).ToList();
|
||||
var infoFiles = infos.Select(x => Parser.NormalizePath(x.FullFilePath)).ToList();
|
||||
return infoFiles.Intersect(chapterFiles).Any();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,6 +39,31 @@ public static class IncludesExtensions
|
|||
return queryable.AsSplitQuery();
|
||||
}
|
||||
|
||||
public static IQueryable<Volume> Includes(this IQueryable<Volume> queryable,
|
||||
VolumeIncludes includes)
|
||||
{
|
||||
if (includes.HasFlag(VolumeIncludes.Chapters))
|
||||
{
|
||||
queryable = queryable.Include(vol => vol.Chapters);
|
||||
}
|
||||
|
||||
if (includes.HasFlag(VolumeIncludes.People))
|
||||
{
|
||||
queryable = queryable
|
||||
.Include(vol => vol.Chapters)
|
||||
.ThenInclude(c => c.People);
|
||||
}
|
||||
|
||||
if (includes.HasFlag(VolumeIncludes.Tags))
|
||||
{
|
||||
queryable = queryable
|
||||
.Include(vol => vol.Chapters)
|
||||
.ThenInclude(c => c.Tags);
|
||||
}
|
||||
|
||||
return queryable.AsSplitQuery();
|
||||
}
|
||||
|
||||
public static IQueryable<Series> Includes(this IQueryable<Series> query,
|
||||
SeriesIncludes includeFlags)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Linq;
|
||||
using API.Comparators;
|
||||
using API.Entities;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
|
@ -19,13 +17,19 @@ public static class SeriesExtensions
|
|||
public static string? GetCoverImage(this Series series)
|
||||
{
|
||||
var volumes = (series.Volumes ?? [])
|
||||
.OrderBy(v => v.MinNumber, ChapterSortComparer.Default)
|
||||
.OrderBy(v => v.MinNumber, ChapterSortComparerDefaultLast.Default)
|
||||
.ToList();
|
||||
var firstVolume = volumes.GetCoverImage(series.Format);
|
||||
if (firstVolume == null) return null;
|
||||
|
||||
// If first volume here is specials, move to the next as specials should almost always be last.
|
||||
if (firstVolume.MinNumber.Is(Parser.SpecialVolumeNumber) && volumes.Count > 1)
|
||||
{
|
||||
firstVolume = volumes[1];
|
||||
}
|
||||
|
||||
var chapters = firstVolume.Chapters
|
||||
.OrderBy(c => c.Number.AsDouble(), ChapterSortComparerZeroFirst.Default)
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.ToList();
|
||||
|
||||
if (chapters.Count > 1 && chapters.Exists(c => c.IsSpecial))
|
||||
|
|
@ -34,32 +38,42 @@ public static class SeriesExtensions
|
|||
}
|
||||
|
||||
// just volumes
|
||||
if (volumes.TrueForAll(v => $"{v.MinNumber}" != Parser.LooseLeafVolume))
|
||||
if (volumes.TrueForAll(v => v.MinNumber.IsNot(Parser.LooseLeafVolumeNumber)))
|
||||
{
|
||||
return firstVolume.CoverImage;
|
||||
}
|
||||
// If we have loose leaf chapters
|
||||
|
||||
// if loose leaf chapters AND volumes, just return first volume
|
||||
if (volumes.Count >= 1 && $"{volumes[0].MinNumber}" != Parser.LooseLeafVolume)
|
||||
if (volumes.Count >= 1 && volumes[0].MinNumber.IsNot(Parser.LooseLeafVolumeNumber))
|
||||
{
|
||||
var looseLeafChapters = volumes.Where(v => $"{v.MinNumber}" == Parser.LooseLeafVolume)
|
||||
.SelectMany(c => c.Chapters.Where(c => !c.IsSpecial))
|
||||
.OrderBy(c => c.Number.AsDouble(), ChapterSortComparerZeroFirst.Default)
|
||||
var looseLeafChapters = volumes.Where(v => v.MinNumber.Is(Parser.LooseLeafVolumeNumber))
|
||||
.SelectMany(c => c.Chapters.Where(c2 => !c2.IsSpecial))
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.ToList();
|
||||
if (looseLeafChapters.Count > 0 && (1.0f * volumes[0].MinNumber) > looseLeafChapters[0].Number.AsFloat())
|
||||
|
||||
if (looseLeafChapters.Count > 0 && volumes[0].MinNumber > looseLeafChapters[0].MinNumber)
|
||||
{
|
||||
var first = looseLeafChapters.Find(c => c.SortOrder.Is(1f));
|
||||
if (first != null) return first.CoverImage;
|
||||
return looseLeafChapters[0].CoverImage;
|
||||
}
|
||||
return firstVolume.CoverImage;
|
||||
}
|
||||
|
||||
var firstLooseLeafChapter = volumes
|
||||
.Where(v => $"{v.MinNumber}" == Parser.LooseLeafVolume)
|
||||
.SelectMany(v => v.Chapters)
|
||||
.OrderBy(c => c.Number.AsDouble(), ChapterSortComparerZeroFirst.Default)
|
||||
.FirstOrDefault(c => !c.IsSpecial);
|
||||
var chpts = volumes
|
||||
.First(v => v.MinNumber.Is(Parser.LooseLeafVolumeNumber))
|
||||
.Chapters
|
||||
.Where(c => !c.IsSpecial)
|
||||
.OrderBy(c => c.MinNumber, ChapterSortComparerDefaultLast.Default)
|
||||
.ToList();
|
||||
|
||||
return firstLooseLeafChapter?.CoverImage ?? firstVolume.CoverImage;
|
||||
var exactlyChapter1 = chpts.Find(c => c.MinNumber.Is(1f));
|
||||
if (exactlyChapter1 != null)
|
||||
{
|
||||
return exactlyChapter1.CoverImage;
|
||||
}
|
||||
|
||||
return chpts.FirstOrDefault()?.CoverImage ?? firstVolume.CoverImage;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ using System.Collections;
|
|||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using API.Comparators;
|
||||
using API.DTOs;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
|
|
@ -24,7 +25,7 @@ public static class VolumeListExtensions
|
|||
{
|
||||
if (volumes == null) throw new ArgumentException("Volumes cannot be null");
|
||||
|
||||
if (seriesFormat == MangaFormat.Epub || seriesFormat == MangaFormat.Pdf)
|
||||
if (seriesFormat is MangaFormat.Epub or MangaFormat.Pdf)
|
||||
{
|
||||
return volumes.MinBy(x => x.MinNumber);
|
||||
}
|
||||
|
|
@ -45,7 +46,7 @@ public static class VolumeListExtensions
|
|||
/// <returns></returns>
|
||||
public static bool HasAnyNonLooseLeafVolumes(this IEnumerable<Volume> volumes)
|
||||
{
|
||||
return volumes.Any(x => Math.Abs(x.MinNumber - Parser.DefaultChapterNumber) > 0.001f);
|
||||
return volumes.Any(v => v.MinNumber.IsNot(Parser.DefaultChapterNumber));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -55,7 +56,8 @@ public static class VolumeListExtensions
|
|||
/// <returns></returns>
|
||||
public static Volume? FirstNonLooseLeafOrDefault(this IEnumerable<Volume> volumes)
|
||||
{
|
||||
return volumes.OrderBy(x => x.MinNumber).FirstOrDefault(v => Math.Abs(v.MinNumber - Parser.DefaultChapterNumber) >= 0.001f);
|
||||
return volumes.OrderBy(x => x.MinNumber, ChapterSortComparerDefaultLast.Default)
|
||||
.FirstOrDefault(v => v.MinNumber.IsNot(Parser.DefaultChapterNumber));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -65,16 +67,26 @@ public static class VolumeListExtensions
|
|||
/// <returns></returns>
|
||||
public static Volume? GetLooseLeafVolumeOrDefault(this IEnumerable<Volume> volumes)
|
||||
{
|
||||
return volumes.FirstOrDefault(v => Math.Abs(v.MinNumber - Parser.DefaultChapterNumber) < 0.001f);
|
||||
return volumes.FirstOrDefault(v => v.MinNumber.Is(Parser.DefaultChapterNumber));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the first (and only) special volume or null if none
|
||||
/// </summary>
|
||||
/// <param name="volumes"></param>
|
||||
/// <returns></returns>
|
||||
public static Volume? GetSpecialVolumeOrDefault(this IEnumerable<Volume> volumes)
|
||||
{
|
||||
return volumes.FirstOrDefault(v => v.MinNumber.Is(Parser.SpecialVolumeNumber));
|
||||
}
|
||||
|
||||
public static IEnumerable<VolumeDto> WhereNotLooseLeaf(this IEnumerable<VolumeDto> volumes)
|
||||
{
|
||||
return volumes.Where(v => Math.Abs(v.MinNumber - Parser.DefaultChapterNumber) >= 0.001f);
|
||||
return volumes.Where(v => v.MinNumber.Is(Parser.DefaultChapterNumber));
|
||||
}
|
||||
|
||||
public static IEnumerable<VolumeDto> WhereLooseLeaf(this IEnumerable<VolumeDto> volumes)
|
||||
{
|
||||
return volumes.Where(v => Math.Abs(v.MinNumber - Parser.DefaultChapterNumber) < 0.001f);
|
||||
return volumes.Where(v => v.MinNumber.Is(Parser.DefaultChapterNumber));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ public class AutoMapperProfiles : Profile
|
|||
.ForMember(dest => dest.Series, opt => opt.MapFrom(src => src.Series));
|
||||
CreateMap<LibraryDto, Library>();
|
||||
CreateMap<Volume, VolumeDto>()
|
||||
.ForMember(dest => dest.Number, opt => opt.MapFrom(src => src.MinNumber));
|
||||
.ForMember(dest => dest.Number, opt => opt.MapFrom(src => (int) src.MinNumber));
|
||||
CreateMap<MangaFile, MangaFileDto>();
|
||||
CreateMap<Chapter, ChapterDto>();
|
||||
CreateMap<Series, SeriesDto>();
|
||||
|
|
@ -128,6 +128,14 @@ public class AutoMapperProfiles : Profile
|
|||
opt =>
|
||||
opt.MapFrom(
|
||||
src => src.People.Where(p => p.Role == PersonRole.Editor).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Teams,
|
||||
opt =>
|
||||
opt.MapFrom(
|
||||
src => src.People.Where(p => p.Role == PersonRole.Team).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Locations,
|
||||
opt =>
|
||||
opt.MapFrom(
|
||||
src => src.People.Where(p => p.Role == PersonRole.Location).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Genres,
|
||||
opt =>
|
||||
opt.MapFrom(
|
||||
|
|
@ -154,6 +162,9 @@ public class AutoMapperProfiles : Profile
|
|||
.ForMember(dest => dest.Inkers,
|
||||
opt =>
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Imprints,
|
||||
opt =>
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Imprint).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Letterers,
|
||||
opt =>
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer).OrderBy(p => p.NormalizedName)))
|
||||
|
|
@ -171,7 +182,14 @@ public class AutoMapperProfiles : Profile
|
|||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Editors,
|
||||
opt =>
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor).OrderBy(p => p.NormalizedName)));
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Teams,
|
||||
opt =>
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Team).OrderBy(p => p.NormalizedName)))
|
||||
.ForMember(dest => dest.Locations,
|
||||
opt =>
|
||||
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Location).OrderBy(p => p.NormalizedName)))
|
||||
;
|
||||
|
||||
CreateMap<AppUser, UserDto>()
|
||||
.ForMember(dest => dest.AgeRestriction,
|
||||
|
|
@ -200,6 +218,8 @@ public class AutoMapperProfiles : Profile
|
|||
CreateMap<ReadingList, ReadingListDto>();
|
||||
CreateMap<ReadingListItem, ReadingListItemDto>();
|
||||
CreateMap<ScrobbleError, ScrobbleErrorDto>();
|
||||
CreateMap<ChapterDto, TachiyomiChapterDto>();
|
||||
CreateMap<Chapter, TachiyomiChapterDto>();
|
||||
|
||||
CreateMap<Series, SearchResultDto>()
|
||||
.ForMember(dest => dest.SeriesId,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
|
@ -17,20 +18,25 @@ public class ChapterBuilder : IEntityBuilder<Chapter>
|
|||
{
|
||||
_chapter = new Chapter()
|
||||
{
|
||||
Range = string.IsNullOrEmpty(range) ? number : range,
|
||||
Range = string.IsNullOrEmpty(range) ? number : Parser.RemoveExtensionIfSupported(range),
|
||||
Title = string.IsNullOrEmpty(range) ? number : range,
|
||||
Number = Parser.MinNumberFromRange(number).ToString(CultureInfo.InvariantCulture),
|
||||
MinNumber = Parser.MinNumberFromRange(number),
|
||||
MaxNumber = Parser.MaxNumberFromRange(number),
|
||||
SortOrder = Parser.MinNumberFromRange(number),
|
||||
Files = new List<MangaFile>(),
|
||||
Pages = 1
|
||||
Pages = 1,
|
||||
CreatedUtc = DateTime.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
public static ChapterBuilder FromParserInfo(ParserInfo info)
|
||||
{
|
||||
var specialTreatment = info.IsSpecialInfo();
|
||||
var specialTitle = specialTreatment ? info.Filename : info.Chapters;
|
||||
var specialTitle = specialTreatment ? Parser.RemoveExtensionIfSupported(info.Filename) : info.Chapters;
|
||||
var builder = new ChapterBuilder(Parser.DefaultChapter);
|
||||
return builder.WithNumber(specialTreatment ? Parser.DefaultChapter : Parser.MinNumberFromRange(info.Chapters) + string.Empty)
|
||||
|
||||
return builder.WithNumber(Parser.RemoveExtensionIfSupported(info.Chapters))
|
||||
.WithRange(specialTreatment ? info.Filename : info.Chapters)
|
||||
.WithTitle((specialTreatment && info.Format == MangaFormat.Epub)
|
||||
? info.Title
|
||||
|
|
@ -44,9 +50,18 @@ public class ChapterBuilder : IEntityBuilder<Chapter>
|
|||
return this;
|
||||
}
|
||||
|
||||
public ChapterBuilder WithNumber(string number)
|
||||
|
||||
private ChapterBuilder WithNumber(string number)
|
||||
{
|
||||
_chapter.Number = number;
|
||||
_chapter.MinNumber = Parser.MinNumberFromRange(number);
|
||||
_chapter.MaxNumber = Parser.MaxNumberFromRange(number);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ChapterBuilder WithSortOrder(float order)
|
||||
{
|
||||
_chapter.SortOrder = order;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
@ -62,9 +77,9 @@ public class ChapterBuilder : IEntityBuilder<Chapter>
|
|||
return this;
|
||||
}
|
||||
|
||||
private ChapterBuilder WithRange(string range)
|
||||
public ChapterBuilder WithRange(string range)
|
||||
{
|
||||
_chapter.Range = range;
|
||||
_chapter.Range = Parser.RemoveExtensionIfSupported(range);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
using System.IO;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Helpers.Builders;
|
||||
|
||||
|
|
@ -19,6 +20,7 @@ public class MangaFileBuilder : IEntityBuilder<MangaFile>
|
|||
Pages = pages,
|
||||
LastModified = File.GetLastWriteTime(filePath),
|
||||
LastModifiedUtc = File.GetLastWriteTimeUtc(filePath),
|
||||
FileName = Parser.RemoveExtensionIfSupported(filePath)
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,9 @@ public class SeriesBuilder : IEntityBuilder<Series>
|
|||
SortName = name,
|
||||
NormalizedName = name.ToNormalized(),
|
||||
NormalizedLocalizedName = name.ToNormalized(),
|
||||
Metadata = new SeriesMetadataBuilder().Build(),
|
||||
Metadata = new SeriesMetadataBuilder()
|
||||
.WithPublicationStatus(PublicationStatus.OnGoing)
|
||||
.Build(),
|
||||
Volumes = new List<Volume>(),
|
||||
ExternalSeriesMetadata = new ExternalSeriesMetadata()
|
||||
};
|
||||
|
|
@ -90,4 +92,10 @@ public class SeriesBuilder : IEntityBuilder<Series>
|
|||
_series.LibraryId = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SeriesBuilder WithPublicationStatus(PublicationStatus status)
|
||||
{
|
||||
_series.Metadata.PublicationStatus = status;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ public class VolumeBuilder : IEntityBuilder<Volume>
|
|||
_volume = new Volume()
|
||||
{
|
||||
Name = volumeNumber,
|
||||
LookupName = volumeNumber,
|
||||
MinNumber = Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(volumeNumber),
|
||||
MaxNumber = Services.Tasks.Scanner.Parser.Parser.MaxNumberFromRange(volumeNumber),
|
||||
Chapters = new List<Chapter>()
|
||||
|
|
@ -49,7 +50,7 @@ public class VolumeBuilder : IEntityBuilder<Volume>
|
|||
return this;
|
||||
}
|
||||
|
||||
public VolumeBuilder WithChapters(List<Chapter> chapters)
|
||||
public VolumeBuilder WithChapters(IList<Chapter> chapters)
|
||||
{
|
||||
_volume.Chapters = chapters;
|
||||
return this;
|
||||
|
|
|
|||
|
|
@ -58,6 +58,15 @@ public static class FilterFieldValueConverter
|
|||
FilterField.Inker => value.Split(',')
|
||||
.Select(int.Parse)
|
||||
.ToList(),
|
||||
FilterField.Imprint => value.Split(',')
|
||||
.Select(int.Parse)
|
||||
.ToList(),
|
||||
FilterField.Team => value.Split(',')
|
||||
.Select(int.Parse)
|
||||
.ToList(),
|
||||
FilterField.Location => value.Split(',')
|
||||
.Select(int.Parse)
|
||||
.ToList(),
|
||||
FilterField.Penciller => value.Split(',')
|
||||
.Select(int.Parse)
|
||||
.ToList(),
|
||||
|
|
|
|||
|
|
@ -12,25 +12,28 @@ namespace API.Helpers;
|
|||
|
||||
public static class GenreHelper
|
||||
{
|
||||
public static void UpdateGenre(ICollection<Genre> allGenres, IEnumerable<string> names, Action<Genre> action)
|
||||
|
||||
public static void UpdateGenre(Dictionary<string, Genre> allGenres,
|
||||
IEnumerable<string> names, Action<Genre, bool> action)
|
||||
{
|
||||
foreach (var name in names)
|
||||
{
|
||||
if (string.IsNullOrEmpty(name.Trim())) continue;
|
||||
|
||||
var normalizedName = name.ToNormalized();
|
||||
var genre = allGenres.FirstOrDefault(p => p.NormalizedTitle != null && p.NormalizedTitle.Equals(normalizedName));
|
||||
if (genre == null)
|
||||
if (string.IsNullOrEmpty(normalizedName)) continue;
|
||||
|
||||
if (allGenres.TryGetValue(normalizedName, out var genre))
|
||||
{
|
||||
action(genre, false);
|
||||
}
|
||||
else
|
||||
{
|
||||
genre = new GenreBuilder(name).Build();
|
||||
allGenres.Add(genre);
|
||||
allGenres.Add(normalizedName, genre);
|
||||
action(genre, true);
|
||||
}
|
||||
|
||||
action(genre);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void KeepOnlySameGenreBetweenLists(ICollection<Genre> existingGenres, ICollection<Genre> removeAllExcept, Action<Genre>? action = null)
|
||||
{
|
||||
var existing = existingGenres.ToList();
|
||||
|
|
@ -64,6 +67,7 @@ public static class GenreHelper
|
|||
public static void UpdateGenreList(ICollection<GenreTagDto>? tags, Series series,
|
||||
IReadOnlyCollection<Genre> allTags, Action<Genre> handleAdd, Action onModified)
|
||||
{
|
||||
// TODO: Write some unit tests
|
||||
if (tags == null) return;
|
||||
var isModified = false;
|
||||
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ namespace API.Helpers;
|
|||
|
||||
public static class PersonHelper
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Given a list of all existing people, this will check the new names and roles and if it doesn't exist in allPeople, will create and
|
||||
/// add an entry. For each person in name, the callback will be executed.
|
||||
|
|
@ -24,7 +25,6 @@ public static class PersonHelper
|
|||
/// <param name="action"></param>
|
||||
public static void UpdatePeople(ICollection<Person> allPeople, IEnumerable<string> names, PersonRole role, Action<Person> action)
|
||||
{
|
||||
// TODO: Validate if we need this, not used
|
||||
var allPeopleTypeRole = allPeople.Where(p => p.Role == role).ToList();
|
||||
|
||||
foreach (var name in names)
|
||||
|
|
|
|||
|
|
@ -1,43 +1,37 @@
|
|||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.DTOs.Metadata;
|
||||
using API.Entities;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Helpers;
|
||||
#nullable enable
|
||||
|
||||
public static class TagHelper
|
||||
{
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="allTags"></param>
|
||||
/// <param name="names"></param>
|
||||
/// <param name="action">Callback for every item. Will give said item back and a bool if item was added</param>
|
||||
public static void UpdateTag(ICollection<Tag> allTags, IEnumerable<string> names, Action<Tag, bool> action)
|
||||
public static void UpdateTag(Dictionary<string, Tag> allTags, IEnumerable<string> names, Action<Tag, bool> action)
|
||||
{
|
||||
foreach (var name in names)
|
||||
{
|
||||
if (string.IsNullOrEmpty(name.Trim())) continue;
|
||||
|
||||
var added = false;
|
||||
var normalizedName = name.ToNormalized();
|
||||
allTags.TryGetValue(normalizedName, out var tag);
|
||||
|
||||
var genre = allTags.FirstOrDefault(p =>
|
||||
p.NormalizedTitle.Equals(normalizedName));
|
||||
if (genre == null)
|
||||
var added = tag == null;
|
||||
if (tag == null)
|
||||
{
|
||||
added = true;
|
||||
genre = new TagBuilder(name).Build();
|
||||
allTags.Add(genre);
|
||||
tag = new TagBuilder(name).Build();
|
||||
allTags.Add(normalizedName, tag);
|
||||
}
|
||||
|
||||
action(genre, added);
|
||||
action(tag, added);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -79,6 +73,22 @@ public static class TagHelper
|
|||
}
|
||||
}
|
||||
|
||||
public static IList<string> GetTagValues(string comicInfoTagSeparatedByComma)
|
||||
{
|
||||
// TODO: Unit tests needed
|
||||
if (string.IsNullOrEmpty(comicInfoTagSeparatedByComma))
|
||||
{
|
||||
return ImmutableList<string>.Empty;
|
||||
}
|
||||
|
||||
return comicInfoTagSeparatedByComma.Split(",")
|
||||
.Select(s => s.Trim())
|
||||
.DistinctBy(Parser.Normalize)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Remove tags on a list
|
||||
/// </summary>
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ public class ArchiveService : IArchiveService
|
|||
/// <returns></returns>
|
||||
public string GetCoverImage(string archivePath, string fileName, string outputDirectory, EncodeFormat format, CoverImageSize size = CoverImageSize.Default)
|
||||
{
|
||||
if (archivePath == null || !IsValidArchive(archivePath)) return string.Empty;
|
||||
if (string.IsNullOrEmpty(archivePath) || !IsValidArchive(archivePath)) return string.Empty;
|
||||
try
|
||||
{
|
||||
var libraryHandler = CanOpen(archivePath);
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ using System.Threading.Tasks;
|
|||
using API.DTOs.System;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.Helpers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
|
|
@ -53,6 +54,8 @@ public interface IDirectoryService
|
|||
bool CopyDirectoryToDirectory(string? sourceDirName, string destDirName, string searchPattern = "");
|
||||
Dictionary<string, string> FindHighestDirectoriesFromFiles(IEnumerable<string> libraryFolders,
|
||||
IList<string> filePaths);
|
||||
string? FindLowestDirectoriesFromFiles(IEnumerable<string> libraryFolders,
|
||||
IList<string> filePaths);
|
||||
IEnumerable<string> GetFoldersTillRoot(string rootPath, string fullPath);
|
||||
IEnumerable<string> GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly);
|
||||
bool ExistOrCreate(string directoryPath);
|
||||
|
|
@ -584,6 +587,43 @@ public class DirectoryService : IDirectoryService
|
|||
return dirs;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds the lowest directory from a set of file paths. Does not return the root path, will always select the lowest non-root path.
|
||||
/// </summary>
|
||||
/// <remarks>If the file paths do not contain anything from libraryFolders, this returns an empty dictionary back</remarks>
|
||||
/// <param name="libraryFolders">List of top level folders which files belong to</param>
|
||||
/// <param name="filePaths">List of file paths that belong to libraryFolders</param>
|
||||
/// <returns></returns>
|
||||
public string? FindLowestDirectoriesFromFiles(IEnumerable<string> libraryFolders, IList<string> filePaths)
|
||||
{
|
||||
|
||||
|
||||
var stopLookingForDirectories = false;
|
||||
var dirs = new Dictionary<string, string>();
|
||||
foreach (var folder in libraryFolders.Select(Tasks.Scanner.Parser.Parser.NormalizePath))
|
||||
{
|
||||
if (stopLookingForDirectories) break;
|
||||
foreach (var file in filePaths.Select(Tasks.Scanner.Parser.Parser.NormalizePath))
|
||||
{
|
||||
if (!file.Contains(folder)) continue;
|
||||
|
||||
var lowestPath = Path.GetDirectoryName(file)?.Replace(folder, string.Empty);
|
||||
if (!string.IsNullOrEmpty(lowestPath))
|
||||
{
|
||||
dirs.TryAdd(Parser.NormalizePath(lowestPath), string.Empty);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (dirs.Keys.Count == 1) return dirs.Keys.First();
|
||||
if (dirs.Keys.Count > 1)
|
||||
{
|
||||
return dirs.Keys.Last();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope.
|
||||
/// </summary>
|
||||
|
|
@ -657,7 +697,7 @@ public class DirectoryService : IDirectoryService
|
|||
/// <returns></returns>
|
||||
public IList<string> ScanFiles(string folderPath, string fileTypes, GlobMatcher? matcher = null)
|
||||
{
|
||||
_logger.LogDebug("[ScanFiles] called on {Path}", folderPath);
|
||||
_logger.LogTrace("[ScanFiles] called on {Path}", folderPath);
|
||||
var files = new List<string>();
|
||||
if (!Exists(folderPath)) return files;
|
||||
|
||||
|
|
|
|||
|
|
@ -197,7 +197,7 @@ public class MediaConversionService : IMediaConversionService
|
|||
foreach (var volume in nonCustomOrConvertedVolumeCovers)
|
||||
{
|
||||
if (string.IsNullOrEmpty(volume.CoverImage)) continue;
|
||||
volume.CoverImage = volume.Chapters.MinBy(x => x.Number.AsDouble(), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
volume.CoverImage = volume.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
_unitOfWork.VolumeRepository.Update(volume);
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,6 +82,7 @@ public class MetadataService : IMetadataService
|
|||
chapter.CoverImage = _readingItemService.GetCoverImage(firstFile.FilePath,
|
||||
ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId), firstFile.Format, encodeFormat, coverImageSize);
|
||||
_unitOfWork.ChapterRepository.Update(chapter);
|
||||
|
||||
_updateEvents.Add(MessageFactory.CoverUpdateEvent(chapter.Id, MessageFactoryEntityTypes.Chapter));
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
|
|
@ -107,9 +108,15 @@ public class MetadataService : IMetadataService
|
|||
null, volume.Created, forceUpdate)) return Task.FromResult(false);
|
||||
|
||||
|
||||
// For cover selection, chapters need to try for issue 1 first, then fallback to first sort order
|
||||
volume.Chapters ??= new List<Chapter>();
|
||||
var firstChapter = volume.Chapters.MinBy(x => x.Number.AsDouble(), ChapterSortComparerZeroFirst.Default);
|
||||
if (firstChapter == null) return Task.FromResult(false);
|
||||
|
||||
var firstChapter = volume.Chapters.FirstOrDefault(x => x.MinNumber.Is(1f));
|
||||
if (firstChapter == null)
|
||||
{
|
||||
firstChapter = volume.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default);
|
||||
if (firstChapter == null) return Task.FromResult(false);
|
||||
}
|
||||
|
||||
volume.CoverImage = firstChapter.CoverImage;
|
||||
_updateEvents.Add(MessageFactory.CoverUpdateEvent(volume.Id, MessageFactoryEntityTypes.Volume));
|
||||
|
|
@ -130,8 +137,8 @@ public class MetadataService : IMetadataService
|
|||
null, series.Created, forceUpdate, series.CoverImageLocked))
|
||||
return Task.CompletedTask;
|
||||
|
||||
series.Volumes ??= new List<Volume>();
|
||||
series.CoverImage = series.GetCoverImage(); // BUG: At this point the volume or chapter hasn't regenerated the cover
|
||||
series.Volumes ??= [];
|
||||
series.CoverImage = series.GetCoverImage();
|
||||
|
||||
_updateEvents.Add(MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series));
|
||||
return Task.CompletedTask;
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ public class ExternalMetadataService : IExternalMetadataService
|
|||
private readonly IMapper _mapper;
|
||||
private readonly ILicenseService _licenseService;
|
||||
private readonly TimeSpan _externalSeriesMetadataCache = TimeSpan.FromDays(30);
|
||||
public static readonly ImmutableArray<LibraryType> NonEligibleLibraryTypes = ImmutableArray.Create<LibraryType>(LibraryType.Comic, LibraryType.Book);
|
||||
public static readonly ImmutableArray<LibraryType> NonEligibleLibraryTypes = ImmutableArray.Create<LibraryType>(LibraryType.Comic, LibraryType.Book, LibraryType.Image, LibraryType.ComicVine);
|
||||
private readonly SeriesDetailPlusDto _defaultReturn = new()
|
||||
{
|
||||
Recommendations = null,
|
||||
|
|
@ -155,6 +155,7 @@ public class ExternalMetadataService : IExternalMetadataService
|
|||
public async Task GetNewSeriesData(int seriesId, LibraryType libraryType)
|
||||
{
|
||||
if (!IsPlusEligible(libraryType)) return;
|
||||
if (!await _licenseService.HasActiveLicense()) return;
|
||||
|
||||
// Generate key based on seriesId and libraryType or any unique identifier for the request
|
||||
// Check if the request is allowed based on the rate limit
|
||||
|
|
|
|||
|
|
@ -181,6 +181,11 @@ public class LicenseService(
|
|||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the sub is active and caches the result. This should not be used too much over cache as it will skip backend caching.
|
||||
/// </summary>
|
||||
/// <param name="license"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<bool> HasActiveSubscription(string? license)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(license)) return false;
|
||||
|
|
|
|||
|
|
@ -11,7 +11,9 @@ using API.DTOs.Scrobbling;
|
|||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Scrobble;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using API.SignalR;
|
||||
using Flurl.Http;
|
||||
using Hangfire;
|
||||
|
|
@ -330,6 +332,15 @@ public class ScrobblingService : IScrobblingService
|
|||
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(seriesId, userId),
|
||||
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
|
||||
};
|
||||
// NOTE: Not sure how to handle scrobbling specials or handling sending loose leaf volumes
|
||||
if (evt.VolumeNumber is Parser.SpecialVolumeNumber)
|
||||
{
|
||||
evt.VolumeNumber = 0;
|
||||
}
|
||||
if (evt.VolumeNumber is Parser.DefaultChapterNumber)
|
||||
{
|
||||
evt.VolumeNumber = 0;
|
||||
}
|
||||
_unitOfWork.ScrobbleRepository.Attach(evt);
|
||||
await _unitOfWork.CommitAsync();
|
||||
_logger.LogDebug("Added Scrobbling Read update on {SeriesName} with Userid {UserId} ", series.Name, userId);
|
||||
|
|
@ -798,7 +809,7 @@ public class ScrobblingService : IScrobblingService
|
|||
SeriesId = evt.SeriesId
|
||||
});
|
||||
evt.IsErrored = true;
|
||||
evt.ErrorDetails = "Series cannot be matched for Scrobbling";
|
||||
evt.ErrorDetails = UnknownSeriesErrorMessage;
|
||||
evt.ProcessDateUtc = DateTime.UtcNow;
|
||||
_unitOfWork.ScrobbleRepository.Update(evt);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
|
|
|||
|
|
@ -51,8 +51,9 @@ public class ReaderService : IReaderService
|
|||
private readonly IImageService _imageService;
|
||||
private readonly IDirectoryService _directoryService;
|
||||
private readonly IScrobblingService _scrobblingService;
|
||||
private readonly ChapterSortComparer _chapterSortComparer = ChapterSortComparer.Default;
|
||||
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = ChapterSortComparerZeroFirst.Default;
|
||||
private readonly ChapterSortComparerDefaultLast _chapterSortComparerDefaultLast = ChapterSortComparerDefaultLast.Default;
|
||||
private readonly ChapterSortComparerDefaultFirst _chapterSortComparerForInChapterSorting = ChapterSortComparerDefaultFirst.Default;
|
||||
private readonly ChapterSortComparerSpecialsLast _chapterSortComparerSpecialsLast = ChapterSortComparerSpecialsLast.Default;
|
||||
|
||||
private const float MinWordsPerHour = 10260F;
|
||||
private const float MaxWordsPerHour = 30000F;
|
||||
|
|
@ -346,11 +347,23 @@ public class ReaderService : IReaderService
|
|||
return page;
|
||||
}
|
||||
|
||||
private int GetNextSpecialChapter(VolumeDto volume, ChapterDto currentChapter)
|
||||
{
|
||||
if (volume.IsSpecial())
|
||||
{
|
||||
// Handle specials by sorting on their Filename aka Range
|
||||
return GetNextChapterId(volume.Chapters.OrderBy(x => x.SortOrder), currentChapter.SortOrder, dto => dto.SortOrder);
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Tries to find the next logical Chapter
|
||||
/// </summary>
|
||||
/// <example>
|
||||
/// V1 → V2 → V3 chapter 0 → V3 chapter 10 → V0 chapter 1 -> V0 chapter 2 -> SP 01 → SP 02
|
||||
/// V1 → V2 → V3 chapter 0 → V3 chapter 10 → V0 chapter 1 -> V0 chapter 2 -> (Annual 1 -> Annual 2) -> (SP 01 → SP 02)
|
||||
/// </example>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <param name="volumeId"></param>
|
||||
|
|
@ -359,112 +372,88 @@ public class ReaderService : IReaderService
|
|||
/// <returns>-1 if nothing can be found</returns>
|
||||
public async Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
|
||||
{
|
||||
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId))
|
||||
.ToList();
|
||||
var currentVolume = volumes.Single(v => v.Id == volumeId);
|
||||
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
|
||||
var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId);
|
||||
|
||||
var currentVolume = volumes.FirstOrDefault(v => v.Id == volumeId);
|
||||
if (currentVolume == null)
|
||||
{
|
||||
// Handle the case where the current volume is not found
|
||||
return -1;
|
||||
}
|
||||
|
||||
var currentChapter = currentVolume.Chapters.FirstOrDefault(c => c.Id == currentChapterId);
|
||||
if (currentChapter == null)
|
||||
{
|
||||
// Handle the case where the current chapter is not found
|
||||
return -1;
|
||||
}
|
||||
|
||||
var currentVolumeIndex = volumes.IndexOf(currentVolume);
|
||||
var chapterId = -1;
|
||||
|
||||
if (currentVolume.IsSpecial())
|
||||
{
|
||||
// Handle specials by sorting on their Range
|
||||
chapterId = GetNextSpecialChapter(currentVolume, currentChapter);
|
||||
return chapterId;
|
||||
}
|
||||
|
||||
if (currentVolume.IsLooseLeaf())
|
||||
{
|
||||
// Handle specials by sorting on their Filename aka Range
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderByNatural(x => x.Range), currentChapter.Range, dto => dto.Range);
|
||||
// Handle loose-leaf chapters
|
||||
chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.SortOrder),
|
||||
currentChapter.SortOrder,
|
||||
dto => dto.SortOrder);
|
||||
if (chapterId > 0) return chapterId;
|
||||
|
||||
// Check specials next, as that is the order
|
||||
if (currentVolumeIndex + 1 >= volumes.Count) return -1; // There are no special volumes, so there is nothing
|
||||
|
||||
var specialVolume = volumes[currentVolumeIndex + 1];
|
||||
if (!specialVolume.IsSpecial()) return -1;
|
||||
return specialVolume.Chapters.OrderByNatural(c => c.Range).FirstOrDefault()?.Id ?? -1;
|
||||
}
|
||||
|
||||
var next = false;
|
||||
foreach (var volume in volumes)
|
||||
// Check within the current volume if the next chapter within it can be next
|
||||
var chapters = currentVolume.Chapters.OrderBy(c => c.MinNumber).ToList();
|
||||
var currentChapterIndex = chapters.IndexOf(currentChapter);
|
||||
if (currentChapterIndex < chapters.Count - 1)
|
||||
{
|
||||
var volumeNumbersMatch = volume.Name == currentVolume.Name;
|
||||
if (volumeNumbersMatch && volume.Chapters.Count > 1)
|
||||
{
|
||||
// Handle Chapters within current Volume
|
||||
// In this case, i need 0 first because 0 represents a full volume file.
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Number.AsFloat(), _chapterSortComparer),
|
||||
currentChapter.Range, dto => dto.Range);
|
||||
if (chapterId > 0) return chapterId;
|
||||
next = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (volumeNumbersMatch)
|
||||
{
|
||||
next = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!next) continue;
|
||||
|
||||
// Handle Chapters within next Volume
|
||||
// ! When selecting the chapter for the next volume, we need to make sure a c0 comes before a c1+
|
||||
var chapters = volume.Chapters.OrderBy(x => x.Number.AsDouble(), _chapterSortComparer).ToList();
|
||||
if (currentChapter.Number.Equals(Parser.DefaultChapter) && chapters[^1].Number.Equals(Parser.DefaultChapter))
|
||||
{
|
||||
// We need to handle an extra check if the current chapter is the last special, as we should return -1
|
||||
if (currentChapter.IsSpecial) return -1;
|
||||
|
||||
return chapters.Last().Id;
|
||||
}
|
||||
|
||||
var firstChapter = chapters.FirstOrDefault();
|
||||
if (firstChapter == null) break;
|
||||
var isSpecial = firstChapter.IsSpecial || currentChapter.IsSpecial;
|
||||
if (isSpecial)
|
||||
{
|
||||
var chapterId = GetNextChapterId(volume.Chapters.OrderByNatural(x => x.Number),
|
||||
currentChapter.Range, dto => dto.Range);
|
||||
if (chapterId > 0) return chapterId;
|
||||
} else if (firstChapter.Number.AsDouble() >= currentChapter.Number.AsDouble()) return firstChapter.Id;
|
||||
// If we are the last chapter and next volume is there, we should try to use it (unless it's volume 0)
|
||||
else if (firstChapter.Number.AsDouble() == Parser.DefaultChapterNumber) return firstChapter.Id;
|
||||
|
||||
// If on last volume AND there are no specials left, then let's return -1
|
||||
var anySpecials = volumes.Where(v => $"{v.MinNumber}" == Parser.LooseLeafVolume)
|
||||
.SelectMany(v => v.Chapters.Where(c => c.IsSpecial)).Any();
|
||||
if (!currentVolume.IsLooseLeaf() && !anySpecials)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
return chapters[currentChapterIndex + 1].Id;
|
||||
}
|
||||
|
||||
// Check within the current Volume
|
||||
chapterId = GetNextChapterId(chapters, currentChapter.SortOrder, dto => dto.SortOrder);
|
||||
if (chapterId > 0) return chapterId;
|
||||
|
||||
|
||||
// If we are the last volume and we didn't find any next volume, loop back to volume 0 and give the first chapter
|
||||
// This has an added problem that it will loop up to the beginning always
|
||||
// Should I change this to Max number? volumes.LastOrDefault()?.Number -> volumes.Max(v => v.Number)
|
||||
|
||||
if (!currentVolume.IsLooseLeaf() && currentVolume.MinNumber == volumes.LastOrDefault()?.MinNumber && volumes.Count > 1)
|
||||
// Now check the next volume
|
||||
var nextVolumeIndex = currentVolumeIndex + 1;
|
||||
if (nextVolumeIndex < volumes.Count)
|
||||
{
|
||||
var chapterVolume = volumes.FirstOrDefault();
|
||||
if (chapterVolume == null || !chapterVolume.IsLooseLeaf()) return -1;
|
||||
// Get the first chapter from the next volume
|
||||
chapterId = volumes[nextVolumeIndex].Chapters.MinBy(c => c.MinNumber, _chapterSortComparerForInChapterSorting)?.Id ?? -1;
|
||||
return chapterId;
|
||||
}
|
||||
|
||||
// This is my attempt at fixing a bug where we loop around to the beginning, but I just can't seem to figure it out
|
||||
// var orderedVolumes = volumes.OrderBy(v => v.Number, SortComparerZeroLast.Default).ToList();
|
||||
// if (currentVolume.Number == orderedVolumes.FirstOrDefault().Number)
|
||||
// {
|
||||
// // We can move into loose leaf chapters
|
||||
// //var firstLooseLeaf = volumes.LastOrDefault().Chapters.MinBy(x => x.Number.AsDouble(), _chapterSortComparer);
|
||||
// var nextChapterId = GetNextChapterId(
|
||||
// volumes.LastOrDefault().Chapters.OrderBy(x => x.Number.AsDouble(), _chapterSortComparer),
|
||||
// "0", dto => dto.Range);
|
||||
// // CHECK if we need a IsSpecial check
|
||||
// if (nextChapterId > 0) return nextChapterId;
|
||||
// }
|
||||
|
||||
|
||||
var firstChapter = chapterVolume.Chapters.MinBy(x => x.Number.AsDouble(), _chapterSortComparer);
|
||||
if (firstChapter == null) return -1;
|
||||
|
||||
|
||||
return firstChapter.Id;
|
||||
// We are the last volume, so we need to check loose leaf
|
||||
if (currentVolumeIndex == volumes.Count - 1)
|
||||
{
|
||||
// Try to find the first loose-leaf chapter in this volume
|
||||
var firstLooseLeafChapter = volumes.WhereLooseLeaf().FirstOrDefault()?.Chapters.MinBy(c => c.MinNumber, _chapterSortComparerForInChapterSorting);
|
||||
if (firstLooseLeafChapter != null)
|
||||
{
|
||||
return firstLooseLeafChapter.Id;
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tries to find the prev logical Chapter
|
||||
/// </summary>
|
||||
/// <example>
|
||||
/// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← V0 chapter 1 ← V0 chapter 2 ← SP 01 ← SP 02
|
||||
/// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← (V0 chapter 1 ← V0 chapter 2 ← SP 01 ← SP 02)
|
||||
/// </example>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <param name="volumeId"></param>
|
||||
|
|
@ -473,52 +462,76 @@ public class ReaderService : IReaderService
|
|||
/// <returns>-1 if nothing can be found</returns>
|
||||
public async Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
|
||||
{
|
||||
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
|
||||
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
|
||||
var currentVolume = volumes.Single(v => v.Id == volumeId);
|
||||
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
|
||||
|
||||
if (currentVolume.IsLooseLeaf())
|
||||
var chapterId = -1;
|
||||
|
||||
if (currentVolume.IsSpecial())
|
||||
{
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderByNatural(x => x.Range).Reverse(), currentChapter.Range,
|
||||
dto => dto.Range);
|
||||
// Check within Specials, if not set the currentVolume to Loose Leaf
|
||||
chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.SortOrder).Reverse(),
|
||||
currentChapter.SortOrder,
|
||||
dto => dto.SortOrder);
|
||||
if (chapterId > 0) return chapterId;
|
||||
currentVolume = volumes.FirstOrDefault(v => v.IsLooseLeaf());
|
||||
}
|
||||
|
||||
var next = false;
|
||||
foreach (var volume in volumes)
|
||||
if (currentVolume != null && currentVolume.IsLooseLeaf())
|
||||
{
|
||||
if (volume.MinNumber == currentVolume.MinNumber)
|
||||
{
|
||||
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Number.AsDouble(), _chapterSortComparerForInChapterSorting).Reverse(),
|
||||
currentChapter.Range, dto => dto.Range);
|
||||
if (chapterId > 0) return chapterId;
|
||||
next = true; // When the diff between volumes is more than 1, we need to explicitly tell that next volume is our use case
|
||||
continue;
|
||||
}
|
||||
if (next)
|
||||
{
|
||||
if (currentVolume.MinNumber - 1 == Parser.LooseLeafVolumeNumber) break; // If we have walked all the way to chapter volume, then we should break so logic outside can work
|
||||
var lastChapter = volume.Chapters.MaxBy(x => x.Number.AsDouble(), _chapterSortComparerForInChapterSorting);
|
||||
if (lastChapter == null) return -1;
|
||||
return lastChapter.Id;
|
||||
}
|
||||
// If loose leaf, handle within the loose leaf. If not there, then set currentVolume to volumes.Last() where not LooseLeaf or Special
|
||||
var currentVolumeChapters = currentVolume.Chapters.OrderBy(x => x.SortOrder).ToList();
|
||||
chapterId = GetPrevChapterId(currentVolumeChapters,
|
||||
currentChapter.SortOrder, dto => dto.SortOrder, c => c.Id);
|
||||
if (chapterId > 0) return chapterId;
|
||||
currentVolume = volumes.FindLast(v => !v.IsLooseLeaf() && !v.IsSpecial());
|
||||
if (currentVolume != null) return currentVolume.Chapters.OrderBy(x => x.SortOrder).Last()?.Id ?? -1;
|
||||
}
|
||||
|
||||
var lastVolume = volumes.MaxBy(v => v.MinNumber);
|
||||
if (currentVolume.IsLooseLeaf() && currentVolume.MinNumber != lastVolume?.MinNumber && lastVolume?.Chapters.Count > 1)
|
||||
// When we started as a special and there was no loose leafs, reset the currentVolume
|
||||
if (currentVolume == null)
|
||||
{
|
||||
var lastChapter = lastVolume.Chapters.MaxBy(x => x.Number.AsDouble(), _chapterSortComparerForInChapterSorting);
|
||||
if (lastChapter == null) return -1;
|
||||
return lastChapter.Id;
|
||||
currentVolume = volumes.FirstOrDefault(v => !v.IsLooseLeaf() && !v.IsSpecial());
|
||||
if (currentVolume == null) return -1;
|
||||
return currentVolume.Chapters.OrderBy(x => x.SortOrder).Last()?.Id ?? -1;
|
||||
}
|
||||
|
||||
// At this point, only need to check within the current Volume else move 1 level back
|
||||
|
||||
// Check current volume
|
||||
chapterId = GetPrevChapterId(currentVolume.Chapters.OrderBy(x => x.SortOrder),
|
||||
currentChapter.SortOrder, dto => dto.SortOrder, c => c.Id);
|
||||
if (chapterId > 0) return chapterId;
|
||||
|
||||
|
||||
var currentVolumeIndex = volumes.IndexOf(currentVolume);
|
||||
if (currentVolumeIndex == 0) return -1;
|
||||
currentVolume = volumes[currentVolumeIndex - 1];
|
||||
if (currentVolume.IsLooseLeaf() || currentVolume.IsSpecial()) return -1;
|
||||
chapterId = currentVolume.Chapters.OrderBy(x => x.SortOrder).Last().Id;
|
||||
if (chapterId > 0) return chapterId;
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static int GetPrevChapterId<T>(IEnumerable<T> source, float currentValue, Func<T, float> selector, Func<T, int> idSelector)
|
||||
{
|
||||
var sortedSource = source.OrderBy(selector).ToList();
|
||||
var currentChapterIndex = sortedSource.FindIndex(x => selector(x).Is(currentValue));
|
||||
|
||||
if (currentChapterIndex > 0)
|
||||
{
|
||||
return idSelector(sortedSource[currentChapterIndex - 1]);
|
||||
}
|
||||
|
||||
// There is no previous chapter
|
||||
return -1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds the chapter to continue reading from. If a chapter has progress and not complete, return that. If not, progress in the
|
||||
/// ordering (Volumes -> Loose Chapters -> Special) to find next chapter. If all are read, return first in order for series.
|
||||
/// ordering (Volumes -> Loose Chapters -> Annuals -> Special) to find next chapter. If all are read, return first in order for series.
|
||||
/// </summary>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <param name="userId"></param>
|
||||
|
|
@ -527,28 +540,42 @@ public class ReaderService : IReaderService
|
|||
{
|
||||
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
|
||||
|
||||
if (!await _unitOfWork.AppUserProgressRepository.AnyUserProgressForSeriesAsync(seriesId, userId))
|
||||
{
|
||||
// I think i need a way to sort volumes last
|
||||
var chapters = volumes.OrderBy(v => v.MinNumber, _chapterSortComparer).First().Chapters
|
||||
.OrderBy(c => c.Number.AsFloat())
|
||||
.ToList();
|
||||
var anyUserProgress =
|
||||
await _unitOfWork.AppUserProgressRepository.AnyUserProgressForSeriesAsync(seriesId, userId);
|
||||
|
||||
// If there are specials, then return the first Non-special
|
||||
if (chapters.Exists(c => c.IsSpecial))
|
||||
{
|
||||
var firstChapter = chapters.FirstOrDefault(c => !c.IsSpecial);
|
||||
if (firstChapter == null)
|
||||
{
|
||||
// If there is no non-special chapter, then return first chapter
|
||||
return chapters[0];
|
||||
}
|
||||
if (!anyUserProgress)
|
||||
{
|
||||
// I think i need a way to sort volumes last
|
||||
volumes = volumes.OrderBy(v => v.MinNumber, _chapterSortComparerSpecialsLast).ToList();
|
||||
|
||||
return firstChapter;
|
||||
}
|
||||
// Else use normal logic
|
||||
return chapters[0];
|
||||
}
|
||||
// Check if we have a non-loose leaf volume
|
||||
var nonLooseLeafNonSpecialVolume = volumes.Find(v => !v.IsLooseLeaf() && !v.IsSpecial());
|
||||
if (nonLooseLeafNonSpecialVolume != null)
|
||||
{
|
||||
return nonLooseLeafNonSpecialVolume.Chapters.MinBy(c => c.SortOrder);
|
||||
}
|
||||
|
||||
// We only have a loose leaf or Special left
|
||||
|
||||
var chapters = volumes.First(v => v.IsLooseLeaf() || v.IsSpecial()).Chapters
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.ToList();
|
||||
|
||||
// If there are specials, then return the first Non-special
|
||||
if (chapters.Exists(c => c.IsSpecial))
|
||||
{
|
||||
var firstChapter = chapters.Find(c => !c.IsSpecial);
|
||||
if (firstChapter == null)
|
||||
{
|
||||
// If there is no non-special chapter, then return first chapter
|
||||
return chapters[0];
|
||||
}
|
||||
|
||||
return firstChapter;
|
||||
}
|
||||
// Else use normal logic
|
||||
return chapters[0];
|
||||
}
|
||||
|
||||
// Loop through all chapters that are not in volume 0
|
||||
var volumeChapters = volumes
|
||||
|
|
@ -559,13 +586,13 @@ public class ReaderService : IReaderService
|
|||
// NOTE: If volume 1 has chapter 1 and volume 2 is just chapter 0 due to being a full volume file, then this fails
|
||||
// If there are any volumes that have progress, return those. If not, move on.
|
||||
var currentlyReadingChapter = volumeChapters
|
||||
.OrderBy(c => c.Number.AsDouble(), _chapterSortComparer)
|
||||
.OrderBy(c => c.MinNumber, _chapterSortComparerDefaultLast)
|
||||
.FirstOrDefault(chapter => chapter.PagesRead < chapter.Pages && chapter.PagesRead > 0);
|
||||
if (currentlyReadingChapter != null) return currentlyReadingChapter;
|
||||
|
||||
// Order with volume 0 last so we prefer the natural order
|
||||
return FindNextReadingChapter(volumes.OrderBy(v => v.MinNumber, SortComparerZeroLast.Default)
|
||||
.SelectMany(v => v.Chapters.OrderBy(c => c.Number.AsDouble()))
|
||||
return FindNextReadingChapter(volumes.OrderBy(v => v.MinNumber, _chapterSortComparerDefaultLast)
|
||||
.SelectMany(v => v.Chapters.OrderBy(c => c.SortOrder))
|
||||
.ToList());
|
||||
}
|
||||
|
||||
|
|
@ -606,7 +633,7 @@ public class ReaderService : IReaderService
|
|||
}
|
||||
|
||||
|
||||
private static int GetNextChapterId(IEnumerable<ChapterDto> chapters, string currentChapterNumber, Func<ChapterDto, string> accessor)
|
||||
private static int GetNextChapterId(IEnumerable<ChapterDto> chapters, float currentChapterNumber, Func<ChapterDto, float> accessor)
|
||||
{
|
||||
var next = false;
|
||||
var chaptersList = chapters.ToList();
|
||||
|
|
@ -636,8 +663,8 @@ public class ReaderService : IReaderService
|
|||
foreach (var volume in volumes.OrderBy(v => v.MinNumber))
|
||||
{
|
||||
var chapters = volume.Chapters
|
||||
.Where(c => !c.IsSpecial && Parser.MaxNumberFromRange(c.Range) <= chapterNumber)
|
||||
.OrderBy(c => c.Number.AsFloat());
|
||||
.Where(c => !c.IsSpecial && c.MaxNumber <= chapterNumber)
|
||||
.OrderBy(c => c.MinNumber);
|
||||
await MarkChaptersAsRead(user, volume.SeriesId, chapters.ToList());
|
||||
}
|
||||
}
|
||||
|
|
@ -770,6 +797,7 @@ public class ReaderService : IReaderService
|
|||
case LibraryType.Manga:
|
||||
return "Chapter" + (includeSpace ? " " : string.Empty);
|
||||
case LibraryType.Comic:
|
||||
case LibraryType.ComicVine:
|
||||
if (includeHash) {
|
||||
return "Issue #";
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services;
|
||||
#nullable enable
|
||||
|
|
@ -12,7 +13,7 @@ public interface IReadingItemService
|
|||
int GetNumberOfPages(string filePath, MangaFormat format);
|
||||
string GetCoverImage(string filePath, string fileName, MangaFormat format, EncodeFormat encodeFormat, CoverImageSize size = CoverImageSize.Default);
|
||||
void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1);
|
||||
ParserInfo? ParseFile(string path, string rootPath, LibraryType type);
|
||||
ParserInfo? ParseFile(string path, string rootPath, string libraryRoot, LibraryType type);
|
||||
}
|
||||
|
||||
public class ReadingItemService : IReadingItemService
|
||||
|
|
@ -21,16 +22,27 @@ public class ReadingItemService : IReadingItemService
|
|||
private readonly IBookService _bookService;
|
||||
private readonly IImageService _imageService;
|
||||
private readonly IDirectoryService _directoryService;
|
||||
private readonly IDefaultParser _defaultParser;
|
||||
private readonly ILogger<ReadingItemService> _logger;
|
||||
private readonly BasicParser _basicParser;
|
||||
private readonly ComicVineParser _comicVineParser;
|
||||
private readonly ImageParser _imageParser;
|
||||
private readonly BookParser _bookParser;
|
||||
private readonly PdfParser _pdfParser;
|
||||
|
||||
public ReadingItemService(IArchiveService archiveService, IBookService bookService, IImageService imageService, IDirectoryService directoryService)
|
||||
public ReadingItemService(IArchiveService archiveService, IBookService bookService, IImageService imageService,
|
||||
IDirectoryService directoryService, ILogger<ReadingItemService> logger)
|
||||
{
|
||||
_archiveService = archiveService;
|
||||
_bookService = bookService;
|
||||
_imageService = imageService;
|
||||
_directoryService = directoryService;
|
||||
_logger = logger;
|
||||
|
||||
_defaultParser = new DefaultParser(directoryService);
|
||||
_comicVineParser = new ComicVineParser(directoryService);
|
||||
_imageParser = new ImageParser(directoryService);
|
||||
_bookParser = new BookParser(directoryService, bookService, _basicParser);
|
||||
_pdfParser = new PdfParser(directoryService);
|
||||
_basicParser = new BasicParser(directoryService, _imageParser);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -59,77 +71,15 @@ public class ReadingItemService : IReadingItemService
|
|||
/// <param name="path">Path of a file</param>
|
||||
/// <param name="rootPath"></param>
|
||||
/// <param name="type">Library type to determine parsing to perform</param>
|
||||
public ParserInfo? ParseFile(string path, string rootPath, LibraryType type)
|
||||
public ParserInfo? ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
var info = Parse(path, rootPath, type);
|
||||
var info = Parse(path, rootPath, libraryRoot, type);
|
||||
if (info == null)
|
||||
{
|
||||
_logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
// This catches when original library type is Manga/Comic and when parsing with non
|
||||
if (Parser.IsEpub(path) && Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume?
|
||||
{
|
||||
var hasVolumeInTitle = !Parser.ParseVolume(info.Title)
|
||||
.Equals(Parser.LooseLeafVolume);
|
||||
var hasVolumeInSeries = !Parser.ParseVolume(info.Series)
|
||||
.Equals(Parser.LooseLeafVolume);
|
||||
|
||||
if (string.IsNullOrEmpty(info.ComicInfo?.Volume) && hasVolumeInTitle && (hasVolumeInSeries || string.IsNullOrEmpty(info.Series)))
|
||||
{
|
||||
// This is likely a light novel for which we can set series from parsed title
|
||||
info.Series = Parser.ParseSeries(info.Title);
|
||||
info.Volumes = Parser.ParseVolume(info.Title);
|
||||
}
|
||||
else
|
||||
{
|
||||
var info2 = _defaultParser.Parse(path, rootPath, LibraryType.Book);
|
||||
info.Merge(info2);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// This is first time ComicInfo is called
|
||||
info.ComicInfo = GetComicInfo(path);
|
||||
if (info.ComicInfo == null) return info;
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
|
||||
{
|
||||
info.Volumes = info.ComicInfo.Volume;
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Series))
|
||||
{
|
||||
info.Series = info.ComicInfo.Series.Trim();
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
|
||||
{
|
||||
info.Chapters = info.ComicInfo.Number;
|
||||
}
|
||||
|
||||
// Patch is SeriesSort from ComicInfo
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
|
||||
{
|
||||
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Format) && Parser.HasComicInfoSpecial(info.ComicInfo.Format))
|
||||
{
|
||||
info.IsSpecial = true;
|
||||
info.Chapters = Parser.DefaultChapter;
|
||||
info.Volumes = Parser.LooseLeafVolume;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.SeriesSort))
|
||||
{
|
||||
info.SeriesSort = info.ComicInfo.SeriesSort.Trim();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
|
||||
{
|
||||
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
|
|
@ -216,8 +166,29 @@ public class ReadingItemService : IReadingItemService
|
|||
/// <param name="rootPath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
private ParserInfo? Parse(string path, string rootPath, LibraryType type)
|
||||
private ParserInfo? Parse(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
return Parser.IsEpub(path) ? _bookService.ParseInfo(path) : _defaultParser.Parse(path, rootPath, type);
|
||||
if (_comicVineParser.IsApplicable(path, type))
|
||||
{
|
||||
return _comicVineParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_imageParser.IsApplicable(path, type))
|
||||
{
|
||||
return _imageParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_bookParser.IsApplicable(path, type))
|
||||
{
|
||||
return _bookParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_pdfParser.IsApplicable(path, type))
|
||||
{
|
||||
return _pdfParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_basicParser.IsApplicable(path, type))
|
||||
{
|
||||
return _basicParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,8 +36,8 @@ public interface IReadingListService
|
|||
Task<bool> AddChaptersToReadingList(int seriesId, IList<int> chapterIds,
|
||||
ReadingList readingList);
|
||||
|
||||
Task<CblImportSummaryDto> ValidateCblFile(int userId, CblReadingList cblReading);
|
||||
Task<CblImportSummaryDto> CreateReadingListFromCbl(int userId, CblReadingList cblReading, bool dryRun = false);
|
||||
Task<CblImportSummaryDto> ValidateCblFile(int userId, CblReadingList cblReading, bool useComicLibraryMatching = false);
|
||||
Task<CblImportSummaryDto> CreateReadingListFromCbl(int userId, CblReadingList cblReading, bool dryRun = false, bool useComicLibraryMatching = false);
|
||||
Task CalculateStartAndEndDates(ReadingList readingListWithItems);
|
||||
/// <summary>
|
||||
/// This is expected to be called from ProcessSeries and has the Full Series present. Will generate on the default admin user.
|
||||
|
|
@ -46,6 +46,8 @@ public interface IReadingListService
|
|||
/// <param name="library"></param>
|
||||
/// <returns></returns>
|
||||
Task CreateReadingListsFromSeries(Series series, Library library);
|
||||
|
||||
Task CreateReadingListsFromSeries(int libraryId, int seriesId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -57,7 +59,7 @@ public class ReadingListService : IReadingListService
|
|||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ILogger<ReadingListService> _logger;
|
||||
private readonly IEventHub _eventHub;
|
||||
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = ChapterSortComparerZeroFirst.Default;
|
||||
private readonly ChapterSortComparerDefaultFirst _chapterSortComparerForInChapterSorting = ChapterSortComparerDefaultFirst.Default;
|
||||
private static readonly Regex JustNumbers = new Regex(@"^\d+$", RegexOptions.Compiled | RegexOptions.IgnoreCase,
|
||||
Parser.RegexTimeout);
|
||||
|
||||
|
|
@ -391,8 +393,8 @@ public class ReadingListService : IReadingListService
|
|||
|
||||
var existingChapterExists = readingList.Items.Select(rli => rli.ChapterId).ToHashSet();
|
||||
var chaptersForSeries = (await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds, ChapterIncludes.Volumes))
|
||||
.OrderBy(c => Parser.MinNumberFromRange(c.Volume.Name))
|
||||
.ThenBy(x => x.Number.AsDouble(), _chapterSortComparerForInChapterSorting)
|
||||
.OrderBy(c => c.Volume.MinNumber)
|
||||
.ThenBy(x => x.MinNumber, _chapterSortComparerForInChapterSorting)
|
||||
.ToList();
|
||||
|
||||
var index = readingList.Items.Count == 0 ? 0 : lastOrder + 1;
|
||||
|
|
@ -407,6 +409,20 @@ public class ReadingListService : IReadingListService
|
|||
return index > lastOrder + 1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create Reading lists from a Series
|
||||
/// </summary>
|
||||
/// <remarks>Execute this from Hangfire</remarks>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <param name="seriesId"></param>
|
||||
public async Task CreateReadingListsFromSeries(int libraryId, int seriesId)
|
||||
{
|
||||
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
|
||||
if (series == null || library == null) return;
|
||||
await CreateReadingListsFromSeries(series, library);
|
||||
}
|
||||
|
||||
public async Task CreateReadingListsFromSeries(Series series, Library library)
|
||||
{
|
||||
if (!library.ManageReadingLists) return;
|
||||
|
|
@ -514,7 +530,8 @@ public class ReadingListService : IReadingListService
|
|||
/// </summary>
|
||||
/// <param name="userId"></param>
|
||||
/// <param name="cblReading"></param>
|
||||
public async Task<CblImportSummaryDto> ValidateCblFile(int userId, CblReadingList cblReading)
|
||||
/// <param name="useComicLibraryMatching">When true, will force ComicVine library naming conventions: Series (Year) for Series name matching.</param>
|
||||
public async Task<CblImportSummaryDto> ValidateCblFile(int userId, CblReadingList cblReading, bool useComicLibraryMatching = false)
|
||||
{
|
||||
var importSummary = new CblImportSummaryDto
|
||||
{
|
||||
|
|
@ -536,9 +553,14 @@ public class ReadingListService : IReadingListService
|
|||
});
|
||||
}
|
||||
|
||||
var uniqueSeries = cblReading.Books.Book.Select(b => Parser.Normalize(b.Series)).Distinct().ToList();
|
||||
|
||||
var uniqueSeries = GetUniqueSeries(cblReading, useComicLibraryMatching);
|
||||
var userSeries =
|
||||
(await _unitOfWork.SeriesRepository.GetAllSeriesByNameAsync(uniqueSeries, userId, SeriesIncludes.Chapters)).ToList();
|
||||
|
||||
// How can we match properly with ComicVine library when year is part of the series unless we do this in 2 passes and see which has a better match
|
||||
|
||||
|
||||
if (!userSeries.Any())
|
||||
{
|
||||
// Report that no series exist in the reading list
|
||||
|
|
@ -568,6 +590,20 @@ public class ReadingListService : IReadingListService
|
|||
return importSummary;
|
||||
}
|
||||
|
||||
private static string GetSeriesFormatting(CblBook book, bool useComicLibraryMatching)
|
||||
{
|
||||
return useComicLibraryMatching ? $"{book.Series} ({book.Volume})" : book.Series;
|
||||
}
|
||||
|
||||
private static List<string> GetUniqueSeries(CblReadingList cblReading, bool useComicLibraryMatching)
|
||||
{
|
||||
if (useComicLibraryMatching)
|
||||
{
|
||||
return cblReading.Books.Book.Select(b => Parser.Normalize(GetSeriesFormatting(b, useComicLibraryMatching))).Distinct().ToList();
|
||||
}
|
||||
return cblReading.Books.Book.Select(b => Parser.Normalize(GetSeriesFormatting(b, useComicLibraryMatching))).Distinct().ToList();
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Imports (or pretends to) a cbl into a reading list. Call <see cref="ValidateCblFile"/> first!
|
||||
|
|
@ -575,8 +611,9 @@ public class ReadingListService : IReadingListService
|
|||
/// <param name="userId"></param>
|
||||
/// <param name="cblReading"></param>
|
||||
/// <param name="dryRun"></param>
|
||||
/// <param name="useComicLibraryMatching">When true, will force ComicVine library naming conventions: Series (Year) for Series name matching.</param>
|
||||
/// <returns></returns>
|
||||
public async Task<CblImportSummaryDto> CreateReadingListFromCbl(int userId, CblReadingList cblReading, bool dryRun = false)
|
||||
public async Task<CblImportSummaryDto> CreateReadingListFromCbl(int userId, CblReadingList cblReading, bool dryRun = false, bool useComicLibraryMatching = false)
|
||||
{
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.ReadingListsWithItems);
|
||||
_logger.LogDebug("Importing {ReadingListName} CBL for User {UserName}", cblReading.Name, user!.UserName);
|
||||
|
|
@ -588,11 +625,11 @@ public class ReadingListService : IReadingListService
|
|||
SuccessfulInserts = new List<CblBookResult>()
|
||||
};
|
||||
|
||||
var uniqueSeries = cblReading.Books.Book.Select(b => Parser.Normalize(b.Series)).Distinct().ToList();
|
||||
var uniqueSeries = GetUniqueSeries(cblReading, useComicLibraryMatching);
|
||||
var userSeries =
|
||||
(await _unitOfWork.SeriesRepository.GetAllSeriesByNameAsync(uniqueSeries, userId, SeriesIncludes.Chapters)).ToList();
|
||||
var allSeries = userSeries.ToDictionary(s => Parser.Normalize(s.Name));
|
||||
var allSeriesLocalized = userSeries.ToDictionary(s => Parser.Normalize(s.LocalizedName));
|
||||
var allSeries = userSeries.ToDictionary(s => s.NormalizedName);
|
||||
var allSeriesLocalized = userSeries.ToDictionary(s => s.NormalizedLocalizedName);
|
||||
|
||||
var readingListNameNormalized = Parser.Normalize(cblReading.Name);
|
||||
// Get all the user's reading lists
|
||||
|
|
@ -619,7 +656,7 @@ public class ReadingListService : IReadingListService
|
|||
readingList.Items ??= new List<ReadingListItem>();
|
||||
foreach (var (book, i) in cblReading.Books.Book.Select((value, i) => ( value, i )))
|
||||
{
|
||||
var normalizedSeries = Parser.Normalize(book.Series);
|
||||
var normalizedSeries = Parser.Normalize(GetSeriesFormatting(book, useComicLibraryMatching));
|
||||
if (!allSeries.TryGetValue(normalizedSeries, out var bookSeries) && !allSeriesLocalized.TryGetValue(normalizedSeries, out bookSeries))
|
||||
{
|
||||
importSummary.Results.Add(new CblBookResult(book)
|
||||
|
|
@ -633,7 +670,9 @@ public class ReadingListService : IReadingListService
|
|||
var bookVolume = string.IsNullOrEmpty(book.Volume)
|
||||
? Parser.LooseLeafVolume
|
||||
: book.Volume;
|
||||
var matchingVolume = bookSeries.Volumes.Find(v => bookVolume == v.Name) ?? bookSeries.Volumes.GetLooseLeafVolumeOrDefault();
|
||||
var matchingVolume = bookSeries.Volumes.Find(v => bookVolume == v.Name)
|
||||
?? bookSeries.Volumes.GetLooseLeafVolumeOrDefault()
|
||||
?? bookSeries.Volumes.GetSpecialVolumeOrDefault();
|
||||
if (matchingVolume == null)
|
||||
{
|
||||
importSummary.Results.Add(new CblBookResult(book)
|
||||
|
|
@ -645,11 +684,11 @@ public class ReadingListService : IReadingListService
|
|||
continue;
|
||||
}
|
||||
|
||||
// We need to handle chapter 0 or empty string when it's just a volume
|
||||
// We need to handle default chapter or empty string when it's just a volume
|
||||
var bookNumber = string.IsNullOrEmpty(book.Number)
|
||||
? Parser.DefaultChapter
|
||||
: book.Number;
|
||||
var chapter = matchingVolume.Chapters.FirstOrDefault(c => c.Number == bookNumber);
|
||||
var chapter = matchingVolume.Chapters.FirstOrDefault(c => c.Range == bookNumber);
|
||||
if (chapter == null)
|
||||
{
|
||||
importSummary.Results.Add(new CblBookResult(book)
|
||||
|
|
@ -707,7 +746,7 @@ public class ReadingListService : IReadingListService
|
|||
private static IList<Series> FindCblImportConflicts(IEnumerable<Series> userSeries)
|
||||
{
|
||||
var dict = new HashSet<string>();
|
||||
return userSeries.Where(series => !dict.Add(Parser.Normalize(series.Name))).ToList();
|
||||
return userSeries.Where(series => !dict.Add(series.NormalizedName)).ToList();
|
||||
}
|
||||
|
||||
private static bool IsCblEmpty(CblReadingList cblReading, CblImportSummaryDto importSummary,
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ public interface ISeriesService
|
|||
Task<string> FormatChapterTitle(int userId, ChapterDto chapter, LibraryType libraryType, bool withHash = true);
|
||||
Task<string> FormatChapterTitle(int userId, Chapter chapter, LibraryType libraryType, bool withHash = true);
|
||||
|
||||
Task<string> FormatChapterTitle(int userId, bool isSpecial, LibraryType libraryType, string? chapterTitle,
|
||||
Task<string> FormatChapterTitle(int userId, bool isSpecial, LibraryType libraryType, string chapterRange, string? chapterTitle,
|
||||
bool withHash);
|
||||
Task<string> FormatChapterName(int userId, LibraryType libraryType, bool withHash = false);
|
||||
Task<NextExpectedChapterDto> GetEstimatedChapterCreationDate(int seriesId, int userId);
|
||||
|
|
@ -59,7 +59,7 @@ public class SeriesService : ISeriesService
|
|||
{
|
||||
ExpectedDate = null,
|
||||
ChapterNumber = 0,
|
||||
VolumeNumber = 0
|
||||
VolumeNumber = Parser.LooseLeafVolumeNumber
|
||||
};
|
||||
|
||||
public SeriesService(IUnitOfWork unitOfWork, IEventHub eventHub, ITaskScheduler taskScheduler,
|
||||
|
|
@ -81,21 +81,21 @@ public class SeriesService : ISeriesService
|
|||
public static Chapter? GetFirstChapterForMetadata(Series series)
|
||||
{
|
||||
var sortedVolumes = series.Volumes
|
||||
.Where(v => float.TryParse(v.Name, CultureInfo.InvariantCulture, out var parsedValue) && parsedValue != Parser.LooseLeafVolumeNumber)
|
||||
.OrderBy(v => float.TryParse(v.Name, CultureInfo.InvariantCulture, out var parsedValue) ? parsedValue : float.MaxValue);
|
||||
.Where(v => v.MinNumber.IsNot(Parser.LooseLeafVolumeNumber))
|
||||
.OrderBy(v => v.MinNumber);
|
||||
var minVolumeNumber = sortedVolumes.MinBy(v => v.MinNumber);
|
||||
|
||||
|
||||
var allChapters = series.Volumes
|
||||
.SelectMany(v => v.Chapters.OrderBy(c => c.Number.AsFloat(), ChapterSortComparer.Default))
|
||||
.SelectMany(v => v.Chapters.OrderBy(c => c.MinNumber, ChapterSortComparerDefaultLast.Default))
|
||||
.ToList();
|
||||
var minChapter = allChapters
|
||||
.FirstOrDefault();
|
||||
|
||||
if (minVolumeNumber != null && minChapter != null && float.TryParse(minChapter.Number, CultureInfo.InvariantCulture, out var chapNum) &&
|
||||
(chapNum >= minVolumeNumber.MinNumber || chapNum == Parser.DefaultChapterNumber))
|
||||
if (minVolumeNumber != null && minChapter != null &&
|
||||
(minChapter.MinNumber >= minVolumeNumber.MinNumber || minChapter.MinNumber.Is(Parser.DefaultChapterNumber)))
|
||||
{
|
||||
return minVolumeNumber.Chapters.MinBy(c => c.Number.AsFloat(), ChapterSortComparer.Default);
|
||||
return minVolumeNumber.Chapters.MinBy(c => c.MinNumber, ChapterSortComparerDefaultLast.Default);
|
||||
}
|
||||
|
||||
return minChapter;
|
||||
|
|
@ -171,7 +171,7 @@ public class SeriesService : ISeriesService
|
|||
}
|
||||
|
||||
|
||||
if (updateSeriesMetadataDto.CollectionTags.Any())
|
||||
if (updateSeriesMetadataDto.CollectionTags.Count > 0)
|
||||
{
|
||||
var allCollectionTags = (await _unitOfWork.CollectionTagRepository
|
||||
.GetAllTagsByNamesAsync(updateSeriesMetadataDto.CollectionTags.Select(t => Parser.Normalize(t.Title)))).ToList();
|
||||
|
|
@ -195,7 +195,7 @@ public class SeriesService : ISeriesService
|
|||
}
|
||||
|
||||
|
||||
if (updateSeriesMetadataDto.SeriesMetadata?.Tags != null && updateSeriesMetadataDto.SeriesMetadata.Tags.Any())
|
||||
if (updateSeriesMetadataDto.SeriesMetadata?.Tags is {Count: > 0})
|
||||
{
|
||||
var allTags = (await _unitOfWork.TagRepository
|
||||
.GetAllTagsByNameAsync(updateSeriesMetadataDto.SeriesMetadata.Tags.Select(t => Parser.Normalize(t.Title))))
|
||||
|
|
@ -207,68 +207,82 @@ public class SeriesService : ISeriesService
|
|||
}, () => series.Metadata.TagsLocked = true);
|
||||
}
|
||||
|
||||
|
||||
if (PersonHelper.HasAnyPeople(updateSeriesMetadataDto.SeriesMetadata))
|
||||
{
|
||||
void HandleAddPerson(Person person)
|
||||
{
|
||||
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
||||
}
|
||||
|
||||
series.Metadata.People ??= new List<Person>();
|
||||
var allWriters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Writer,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Writers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Writer, updateSeriesMetadataDto.SeriesMetadata!.Writers, series, allWriters.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.WriterLocked = true);
|
||||
|
||||
var allCharacters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Character,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Characters.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Character, updateSeriesMetadataDto.SeriesMetadata.Characters, series, allCharacters.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.CharacterLocked = true);
|
||||
|
||||
var allColorists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Colorist,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Colorists.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Colorist, updateSeriesMetadataDto.SeriesMetadata.Colorists, series, allColorists.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.ColoristLocked = true);
|
||||
|
||||
var allEditors = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Editor,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Editors.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Editor, updateSeriesMetadataDto.SeriesMetadata.Editors, series, allEditors.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.EditorLocked = true);
|
||||
|
||||
var allInkers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Inker,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Inkers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Inker, updateSeriesMetadataDto.SeriesMetadata.Inkers, series, allInkers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.InkerLocked = true);
|
||||
|
||||
var allLetterers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Letterer,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Letterers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Letterer, updateSeriesMetadataDto.SeriesMetadata.Letterers, series, allLetterers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.LettererLocked = true);
|
||||
|
||||
var allPencillers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Penciller,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Pencillers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Penciller, updateSeriesMetadataDto.SeriesMetadata.Pencillers, series, allPencillers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.PencillerLocked = true);
|
||||
|
||||
var allPublishers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Publisher,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Publishers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Publisher, updateSeriesMetadataDto.SeriesMetadata.Publishers, series, allPublishers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.PublisherLocked = true);
|
||||
|
||||
var allTranslators = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Translator,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Translators.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Translator, updateSeriesMetadataDto.SeriesMetadata.Translators, series, allTranslators.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.TranslatorLocked = true);
|
||||
|
||||
var allCoverArtists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.CoverArtist,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.CoverArtists.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.CoverArtist, updateSeriesMetadataDto.SeriesMetadata.CoverArtists, series, allCoverArtists.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.CoverArtistLocked = true);
|
||||
}
|
||||
|
||||
if (updateSeriesMetadataDto.SeriesMetadata != null)
|
||||
{
|
||||
if (PersonHelper.HasAnyPeople(updateSeriesMetadataDto.SeriesMetadata))
|
||||
{
|
||||
void HandleAddPerson(Person person)
|
||||
{
|
||||
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
||||
}
|
||||
|
||||
series.Metadata.People ??= new List<Person>();
|
||||
var allWriters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Writer,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Writers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Writer, updateSeriesMetadataDto.SeriesMetadata.Writers, series, allWriters.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.WriterLocked = true);
|
||||
|
||||
var allCharacters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Character,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Characters.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Character, updateSeriesMetadataDto.SeriesMetadata.Characters, series, allCharacters.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.CharacterLocked = true);
|
||||
|
||||
var allColorists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Colorist,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Colorists.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Colorist, updateSeriesMetadataDto.SeriesMetadata.Colorists, series, allColorists.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.ColoristLocked = true);
|
||||
|
||||
var allEditors = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Editor,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Editors.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Editor, updateSeriesMetadataDto.SeriesMetadata.Editors, series, allEditors.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.EditorLocked = true);
|
||||
|
||||
var allInkers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Inker,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Inkers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Inker, updateSeriesMetadataDto.SeriesMetadata.Inkers, series, allInkers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.InkerLocked = true);
|
||||
|
||||
var allLetterers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Letterer,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Letterers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Letterer, updateSeriesMetadataDto.SeriesMetadata.Letterers, series, allLetterers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.LettererLocked = true);
|
||||
|
||||
var allPencillers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Penciller,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Pencillers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Penciller, updateSeriesMetadataDto.SeriesMetadata.Pencillers, series, allPencillers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.PencillerLocked = true);
|
||||
|
||||
var allPublishers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Publisher,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Publishers.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Publisher, updateSeriesMetadataDto.SeriesMetadata.Publishers, series, allPublishers.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.PublisherLocked = true);
|
||||
|
||||
var allImprints = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Imprint,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Imprints.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Imprint, updateSeriesMetadataDto.SeriesMetadata.Imprints, series, allImprints.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.ImprintLocked = true);
|
||||
|
||||
var allTeams = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Team,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Imprints.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Team, updateSeriesMetadataDto.SeriesMetadata.Teams, series, allTeams.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.TeamLocked = true);
|
||||
|
||||
var allLocations = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Location,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Imprints.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Location, updateSeriesMetadataDto.SeriesMetadata.Locations, series, allLocations.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.LocationLocked = true);
|
||||
|
||||
var allTranslators = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Translator,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.Translators.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Translator, updateSeriesMetadataDto.SeriesMetadata.Translators, series, allTranslators.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.TranslatorLocked = true);
|
||||
|
||||
var allCoverArtists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.CoverArtist,
|
||||
updateSeriesMetadataDto.SeriesMetadata!.CoverArtists.Select(p => Parser.Normalize(p.Name)));
|
||||
PersonHelper.UpdatePeopleList(PersonRole.CoverArtist, updateSeriesMetadataDto.SeriesMetadata.CoverArtists, series, allCoverArtists.AsReadOnly(),
|
||||
HandleAddPerson, () => series.Metadata.CoverArtistLocked = true);
|
||||
}
|
||||
|
||||
series.Metadata.AgeRatingLocked = updateSeriesMetadataDto.SeriesMetadata.AgeRatingLocked;
|
||||
series.Metadata.PublicationStatusLocked = updateSeriesMetadataDto.SeriesMetadata.PublicationStatusLocked;
|
||||
series.Metadata.LanguageLocked = updateSeriesMetadataDto.SeriesMetadata.LanguageLocked;
|
||||
|
|
@ -278,6 +292,7 @@ public class SeriesService : ISeriesService
|
|||
series.Metadata.ColoristLocked = updateSeriesMetadataDto.SeriesMetadata.ColoristLocked;
|
||||
series.Metadata.EditorLocked = updateSeriesMetadataDto.SeriesMetadata.EditorLocked;
|
||||
series.Metadata.InkerLocked = updateSeriesMetadataDto.SeriesMetadata.InkerLocked;
|
||||
series.Metadata.ImprintLocked = updateSeriesMetadataDto.SeriesMetadata.ImprintLocked;
|
||||
series.Metadata.LettererLocked = updateSeriesMetadataDto.SeriesMetadata.LettererLocked;
|
||||
series.Metadata.PencillerLocked = updateSeriesMetadataDto.SeriesMetadata.PencillerLocked;
|
||||
series.Metadata.PublisherLocked = updateSeriesMetadataDto.SeriesMetadata.PublisherLocked;
|
||||
|
|
@ -481,74 +496,65 @@ public class SeriesService : ISeriesService
|
|||
|
||||
|
||||
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId);
|
||||
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId))
|
||||
.OrderBy(v => Parser.MinNumberFromRange(v.Name))
|
||||
.ToList();
|
||||
var bookTreatment = libraryType is LibraryType.Book or LibraryType.LightNovel;
|
||||
var volumeLabel = await _localizationService.Translate(userId, "volume-num", string.Empty);
|
||||
var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId);
|
||||
|
||||
// For books, the Name of the Volume is remapped to the actual name of the book, rather than Volume number.
|
||||
var processedVolumes = new List<VolumeDto>();
|
||||
if (libraryType is LibraryType.Book or LibraryType.LightNovel)
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
var volumeLabel = await _localizationService.Translate(userId, "volume-num", string.Empty);
|
||||
foreach (var volume in volumes)
|
||||
if (volume.IsLooseLeaf() || volume.IsSpecial())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
volume.Chapters = volume.Chapters
|
||||
.OrderBy(d => d.MinNumber, ChapterSortComparerDefaultLast.Default)
|
||||
.ToList();
|
||||
|
||||
if (RenameVolumeName(volume, libraryType, volumeLabel) || (bookTreatment && !volume.IsSpecial()))
|
||||
{
|
||||
volume.Chapters = volume.Chapters
|
||||
.OrderBy(d => d.Number.AsDouble(), ChapterSortComparer.Default)
|
||||
.ToList();
|
||||
var firstChapter = volume.Chapters.First();
|
||||
// On Books, skip volumes that are specials, since these will be shown
|
||||
if (firstChapter.IsSpecial) continue;
|
||||
RenameVolumeName(firstChapter, volume, libraryType, volumeLabel);
|
||||
processedVolumes.Add(volume);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
processedVolumes = volumes.Where(v => v.MinNumber > 0).ToList();
|
||||
processedVolumes.ForEach(v =>
|
||||
{
|
||||
v.Name = $"Volume {v.Name}";
|
||||
v.Chapters = v.Chapters.OrderBy(d => d.Number.AsDouble(), ChapterSortComparer.Default).ToList();
|
||||
});
|
||||
}
|
||||
|
||||
var specials = new List<ChapterDto>();
|
||||
var chapters = volumes.SelectMany(v => v.Chapters.Select(c =>
|
||||
{
|
||||
if (v.IsLooseLeaf()) return c;
|
||||
c.VolumeTitle = v.Name;
|
||||
return c;
|
||||
}).OrderBy(c => c.Number.AsFloat(), ChapterSortComparer.Default)).ToList();
|
||||
// Why isn't this doing a check if chapter is not special as it wont get included
|
||||
var chapters = volumes
|
||||
.SelectMany(v => v.Chapters
|
||||
.Select(c =>
|
||||
{
|
||||
if (v.IsLooseLeaf() || v.IsSpecial()) return c;
|
||||
c.VolumeTitle = v.Name;
|
||||
return c;
|
||||
})
|
||||
.OrderBy(c => c.SortOrder))
|
||||
.ToList();
|
||||
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
chapter.Title = await FormatChapterTitle(userId, chapter, libraryType);
|
||||
if (!chapter.IsSpecial) continue;
|
||||
// if (!string.IsNullOrEmpty(chapter.TitleName)) chapter.Title = chapter.TitleName;
|
||||
// else chapter.Title = await FormatChapterTitle(userId, chapter, libraryType);
|
||||
|
||||
if (!string.IsNullOrEmpty(chapter.TitleName)) chapter.Title = chapter.TitleName;
|
||||
chapter.Title = await FormatChapterTitle(userId, chapter, libraryType);
|
||||
|
||||
if (!chapter.IsSpecial) continue;
|
||||
specials.Add(chapter);
|
||||
}
|
||||
|
||||
// Don't show chapter 0 (aka single volume chapters) in the Chapters tab or books that are just single numbers (they show as volumes)
|
||||
IEnumerable<ChapterDto> retChapters;
|
||||
if (libraryType is LibraryType.Book or LibraryType.LightNovel)
|
||||
{
|
||||
retChapters = Array.Empty<ChapterDto>();
|
||||
} else
|
||||
{
|
||||
retChapters = chapters
|
||||
.Where(ShouldIncludeChapter);
|
||||
}
|
||||
IEnumerable<ChapterDto> retChapters = bookTreatment ? Array.Empty<ChapterDto>() : chapters.Where(ShouldIncludeChapter);
|
||||
|
||||
var storylineChapters = volumes
|
||||
.WhereLooseLeaf()
|
||||
.SelectMany(v => v.Chapters.Where(c => !c.IsSpecial))
|
||||
.OrderBy(c => c.Number.AsFloat(), ChapterSortComparer.Default)
|
||||
.OrderBy(c => c.SortOrder)
|
||||
.ToList();
|
||||
|
||||
// When there's chapters without a volume number revert to chapter sorting only as opposed to volume then chapter
|
||||
if (storylineChapters.Any()) {
|
||||
retChapters = retChapters.OrderBy(c => c.Number.AsFloat(), ChapterSortComparer.Default);
|
||||
if (storylineChapters.Count > 0) {
|
||||
retChapters = retChapters.OrderBy(c => c.SortOrder, ChapterSortComparerDefaultLast.Default);
|
||||
}
|
||||
|
||||
return new SeriesDetailDto
|
||||
|
|
@ -569,68 +575,78 @@ public class SeriesService : ISeriesService
|
|||
/// <returns></returns>
|
||||
private static bool ShouldIncludeChapter(ChapterDto chapter)
|
||||
{
|
||||
return !chapter.IsSpecial && !chapter.Number.Equals(Parser.DefaultChapter);
|
||||
return !chapter.IsSpecial && chapter.MinNumber.IsNot(Parser.DefaultChapterNumber);
|
||||
}
|
||||
|
||||
public static void RenameVolumeName(ChapterDto firstChapter, VolumeDto volume, LibraryType libraryType, string volumeLabel = "Volume")
|
||||
public static bool RenameVolumeName(VolumeDto volume, LibraryType libraryType, string volumeLabel = "Volume")
|
||||
{
|
||||
// TODO: Move this into DB
|
||||
if (libraryType is LibraryType.Book or LibraryType.LightNovel)
|
||||
{
|
||||
var firstChapter = volume.Chapters.First();
|
||||
// On Books, skip volumes that are specials, since these will be shown
|
||||
if (firstChapter.IsSpecial) return false;
|
||||
if (string.IsNullOrEmpty(firstChapter.TitleName))
|
||||
{
|
||||
if (firstChapter.Range.Equals(Parser.LooseLeafVolume)) return;
|
||||
if (firstChapter.Range.Equals(Parser.LooseLeafVolume)) return false;
|
||||
var title = Path.GetFileNameWithoutExtension(firstChapter.Range);
|
||||
if (string.IsNullOrEmpty(title)) return;
|
||||
volume.Name += $" - {title}";
|
||||
if (string.IsNullOrEmpty(title)) return false;
|
||||
volume.Name += $" - {title}"; // OPDS smart list 7 (just pdfs) triggered this
|
||||
}
|
||||
else if (volume.Name != Parser.LooseLeafVolume)
|
||||
else if (!volume.IsLooseLeaf())
|
||||
{
|
||||
// If the titleName has Volume inside it, let's just send that back?
|
||||
volume.Name += $" - {firstChapter.TitleName}";
|
||||
volume.Name = firstChapter.TitleName;
|
||||
}
|
||||
// else
|
||||
// {
|
||||
// volume.Name += $"";
|
||||
// }
|
||||
|
||||
return;
|
||||
return true;
|
||||
}
|
||||
|
||||
volume.Name = $"{volumeLabel} {volume.Name}".Trim();
|
||||
volume.Name = $"{volumeLabel.Trim()} {volume.Name}".Trim();
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public async Task<string> FormatChapterTitle(int userId, bool isSpecial, LibraryType libraryType, string? chapterTitle, bool withHash)
|
||||
public async Task<string> FormatChapterTitle(int userId, bool isSpecial, LibraryType libraryType, string chapterRange, string? chapterTitle, bool withHash)
|
||||
{
|
||||
if (string.IsNullOrEmpty(chapterTitle)) throw new ArgumentException("Chapter Title cannot be null");
|
||||
if (string.IsNullOrEmpty(chapterTitle) && (isSpecial || libraryType == LibraryType.Book)) throw new ArgumentException("Chapter Title cannot be null");
|
||||
|
||||
if (isSpecial)
|
||||
{
|
||||
return Parser.CleanSpecialTitle(chapterTitle);
|
||||
return Parser.CleanSpecialTitle(chapterTitle!);
|
||||
}
|
||||
|
||||
var hashSpot = withHash ? "#" : string.Empty;
|
||||
return libraryType switch
|
||||
var baseChapter = libraryType switch
|
||||
{
|
||||
LibraryType.Book => await _localizationService.Translate(userId, "book-num", chapterTitle),
|
||||
LibraryType.LightNovel => await _localizationService.Translate(userId, "book-num", chapterTitle),
|
||||
LibraryType.Comic => await _localizationService.Translate(userId, "issue-num", hashSpot, chapterTitle),
|
||||
LibraryType.Manga => await _localizationService.Translate(userId, "chapter-num", chapterTitle),
|
||||
LibraryType.Book => await _localizationService.Translate(userId, "book-num", chapterTitle!),
|
||||
LibraryType.LightNovel => await _localizationService.Translate(userId, "book-num", chapterRange),
|
||||
LibraryType.Comic => await _localizationService.Translate(userId, "issue-num", hashSpot, chapterRange),
|
||||
LibraryType.ComicVine => await _localizationService.Translate(userId, "issue-num", hashSpot, chapterRange),
|
||||
LibraryType.Manga => await _localizationService.Translate(userId, "chapter-num", chapterRange),
|
||||
LibraryType.Image => await _localizationService.Translate(userId, "chapter-num", chapterRange),
|
||||
_ => await _localizationService.Translate(userId, "chapter-num", ' ')
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(chapterTitle) && libraryType != LibraryType.Book && chapterTitle != chapterRange)
|
||||
{
|
||||
baseChapter += " - " + chapterTitle;
|
||||
}
|
||||
|
||||
|
||||
return baseChapter;
|
||||
}
|
||||
|
||||
public async Task<string> FormatChapterTitle(int userId, ChapterDto chapter, LibraryType libraryType, bool withHash = true)
|
||||
{
|
||||
return await FormatChapterTitle(userId, chapter.IsSpecial, libraryType, chapter.Title, withHash);
|
||||
return await FormatChapterTitle(userId, chapter.IsSpecial, libraryType, chapter.Range, chapter.Title, withHash);
|
||||
}
|
||||
|
||||
public async Task<string> FormatChapterTitle(int userId, Chapter chapter, LibraryType libraryType, bool withHash = true)
|
||||
{
|
||||
return await FormatChapterTitle(userId, chapter.IsSpecial, libraryType, chapter.Title, withHash);
|
||||
return await FormatChapterTitle(userId, chapter.IsSpecial, libraryType, chapter.Range, chapter.Title, withHash);
|
||||
}
|
||||
|
||||
// TODO: Refactor this out and use FormatChapterTitle instead across library
|
||||
public async Task<string> FormatChapterName(int userId, LibraryType libraryType, bool withHash = false)
|
||||
{
|
||||
var hashSpot = withHash ? "#" : string.Empty;
|
||||
|
|
@ -639,6 +655,7 @@ public class SeriesService : ISeriesService
|
|||
LibraryType.Book => await _localizationService.Translate(userId, "book-num", string.Empty),
|
||||
LibraryType.LightNovel => await _localizationService.Translate(userId, "book-num", string.Empty),
|
||||
LibraryType.Comic => await _localizationService.Translate(userId, "issue-num", hashSpot, string.Empty),
|
||||
LibraryType.ComicVine => await _localizationService.Translate(userId, "issue-num", hashSpot, string.Empty),
|
||||
LibraryType.Manga => await _localizationService.Translate(userId, "chapter-num", string.Empty),
|
||||
_ => await _localizationService.Translate(userId, "chapter-num", ' ')
|
||||
}).Trim();
|
||||
|
|
@ -677,6 +694,7 @@ public class SeriesService : ISeriesService
|
|||
UpdateRelationForKind(dto.Prequels, series.Relations.Where(r => r.RelationKind == RelationKind.Prequel).ToList(), series, RelationKind.Prequel);
|
||||
UpdateRelationForKind(dto.Sequels, series.Relations.Where(r => r.RelationKind == RelationKind.Sequel).ToList(), series, RelationKind.Sequel);
|
||||
UpdateRelationForKind(dto.Editions, series.Relations.Where(r => r.RelationKind == RelationKind.Edition).ToList(), series, RelationKind.Edition);
|
||||
UpdateRelationForKind(dto.Annuals, series.Relations.Where(r => r.RelationKind == RelationKind.Annual).ToList(), series, RelationKind.Annual);
|
||||
|
||||
if (!_unitOfWork.HasChanges()) return true;
|
||||
return await _unitOfWork.CommitAsync();
|
||||
|
|
@ -783,16 +801,15 @@ public class SeriesService : ISeriesService
|
|||
: (DateTime?)null;
|
||||
|
||||
// For number and volume number, we need the highest chapter, not the latest created
|
||||
var lastChapter = chapters.MaxBy(c => c.Number.AsFloat())!;
|
||||
float.TryParse(lastChapter.Number, NumberStyles.Number, CultureInfo.InvariantCulture,
|
||||
out var lastChapterNumber);
|
||||
var lastChapter = chapters.MaxBy(c => c.MaxNumber)!;
|
||||
var lastChapterNumber = lastChapter.MaxNumber;
|
||||
|
||||
var lastVolumeNum = chapters.Select(c => c.Volume.MinNumber).Max();
|
||||
|
||||
var result = new NextExpectedChapterDto
|
||||
{
|
||||
ChapterNumber = 0,
|
||||
VolumeNumber = 0,
|
||||
VolumeNumber = Parser.LooseLeafVolumeNumber,
|
||||
ExpectedDate = nextChapterExpected,
|
||||
Title = string.Empty
|
||||
};
|
||||
|
|
|
|||
|
|
@ -336,7 +336,7 @@ public class StatisticService : IStatisticService
|
|||
LibraryId = u.LibraryId,
|
||||
ReadDate = u.LastModified,
|
||||
ChapterId = u.ChapterId,
|
||||
ChapterNumber = _context.Chapter.Single(c => c.Id == u.ChapterId).Number
|
||||
ChapterNumber = _context.Chapter.Single(c => c.Id == u.ChapterId).MinNumber
|
||||
})
|
||||
.OrderByDescending(d => d.ReadDate)
|
||||
.ToListAsync();
|
||||
|
|
|
|||
|
|
@ -14,10 +14,11 @@ using AutoMapper;
|
|||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services;
|
||||
#nullable enable
|
||||
|
||||
public interface ITachiyomiService
|
||||
{
|
||||
Task<ChapterDto?> GetLatestChapter(int seriesId, int userId);
|
||||
Task<TachiyomiChapterDto?> GetLatestChapter(int seriesId, int userId);
|
||||
Task<bool> MarkChaptersUntilAsRead(AppUser userWithProgress, int seriesId, float chapterNumber);
|
||||
}
|
||||
|
||||
|
|
@ -51,7 +52,7 @@ public class TachiyomiService : ITachiyomiService
|
|||
/// If its a chapter, return the chapterDto as is.
|
||||
/// If it's a volume, the volume number gets returned in the 'Number' attribute of a chapterDto encoded.
|
||||
/// The volume number gets divided by 10,000 because that's how Tachiyomi interprets volumes</returns>
|
||||
public async Task<ChapterDto?> GetLatestChapter(int seriesId, int userId)
|
||||
public async Task<TachiyomiChapterDto?> GetLatestChapter(int seriesId, int userId)
|
||||
{
|
||||
var currentChapter = await _readerService.GetContinuePoint(seriesId, userId);
|
||||
|
||||
|
|
@ -74,50 +75,48 @@ public class TachiyomiService : ITachiyomiService
|
|||
{
|
||||
var volumeChapter = _mapper.Map<ChapterDto>(volumes
|
||||
[^1].Chapters
|
||||
.OrderBy(c => c.Number.AsFloat(), ChapterSortComparerZeroFirst.Default)
|
||||
.OrderBy(c => c.MinNumber, ChapterSortComparerDefaultFirst.Default)
|
||||
.Last());
|
||||
if (volumeChapter.Number == Parser.LooseLeafVolume)
|
||||
|
||||
if (volumeChapter.MinNumber.Is(Parser.LooseLeafVolumeNumber))
|
||||
{
|
||||
var volume = volumes.First(v => v.Id == volumeChapter.VolumeId);
|
||||
return new ChapterDto()
|
||||
{
|
||||
// Use R to ensure that localization of underlying system doesn't affect the stringification
|
||||
// https://docs.microsoft.com/en-us/globalization/locale/number-formatting-in-dotnet-framework
|
||||
Number = (volume.MinNumber / 10_000f).ToString("R", EnglishCulture)
|
||||
};
|
||||
return CreateTachiyomiChapterDto(volume.MinNumber);
|
||||
}
|
||||
|
||||
return new ChapterDto()
|
||||
{
|
||||
Number = (int.Parse(volumeChapter.Number) / 10_000f).ToString("R", EnglishCulture)
|
||||
};
|
||||
return CreateTachiyomiChapterDto(volumeChapter.MinNumber);
|
||||
}
|
||||
|
||||
var lastChapter = looseLeafChapterVolume.Chapters
|
||||
.OrderBy(c => double.Parse(c.Number, CultureInfo.InvariantCulture), ChapterSortComparer.Default)
|
||||
.OrderBy(c => c.MinNumber, ChapterSortComparerDefaultLast.Default)
|
||||
.Last();
|
||||
return _mapper.Map<ChapterDto>(lastChapter);
|
||||
|
||||
return _mapper.Map<TachiyomiChapterDto>(lastChapter);
|
||||
}
|
||||
|
||||
// There is progress, we now need to figure out the highest volume or chapter and return that.
|
||||
var prevChapter = (await _unitOfWork.ChapterRepository.GetChapterDtoAsync(prevChapterId))!;
|
||||
|
||||
var volumeWithProgress = await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(prevChapter.VolumeId, userId);
|
||||
var volumeWithProgress = (await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(prevChapter.VolumeId, userId))!;
|
||||
// We only encode for single-file volumes
|
||||
if (!volumeWithProgress!.IsLooseLeaf() && volumeWithProgress.Chapters.Count == 1)
|
||||
if (!volumeWithProgress.IsLooseLeaf() && volumeWithProgress.Chapters.Count == 1)
|
||||
{
|
||||
// The progress is on a volume, encode it as a fake chapterDTO
|
||||
return new ChapterDto()
|
||||
{
|
||||
// Use R to ensure that localization of underlying system doesn't affect the stringification
|
||||
// https://docs.microsoft.com/en-us/globalization/locale/number-formatting-in-dotnet-framework
|
||||
Number = (volumeWithProgress.MinNumber / 10_000f).ToString("R", EnglishCulture)
|
||||
|
||||
};
|
||||
return CreateTachiyomiChapterDto(volumeWithProgress.MinNumber);
|
||||
}
|
||||
|
||||
// Progress is just on a chapter, return as is
|
||||
return prevChapter;
|
||||
return _mapper.Map<TachiyomiChapterDto>(prevChapter);
|
||||
}
|
||||
|
||||
private static TachiyomiChapterDto CreateTachiyomiChapterDto(float number)
|
||||
{
|
||||
return new TachiyomiChapterDto()
|
||||
{
|
||||
// Use R to ensure that localization of underlying system doesn't affect the stringification
|
||||
// https://docs.microsoft.com/en-us/globalization/locale/number-formatting-in-dotnet-framework
|
||||
Number = (number / 10_000f).ToString("R", EnglishCulture)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
|||
|
|
@ -328,13 +328,13 @@ public class TaskScheduler : ITaskScheduler
|
|||
}
|
||||
if (RunningAnyTasksByMethod(ScanTasks, ScanQueue))
|
||||
{
|
||||
_logger.LogInformation("A Library Scan is already running, rescheduling ScanLibrary in 3 hours");
|
||||
_logger.LogInformation("A Scan is already running, rescheduling ScanLibrary in 3 hours");
|
||||
BackgroundJob.Schedule(() => ScanLibrary(libraryId, force), TimeSpan.FromHours(3));
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
|
||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force));
|
||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force, true));
|
||||
// When we do a scan, force cache to re-unpack in case page numbers change
|
||||
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheAndTempDirectories());
|
||||
}
|
||||
|
|
@ -386,6 +386,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
}
|
||||
if (RunningAnyTasksByMethod(ScanTasks, ScanQueue))
|
||||
{
|
||||
// BUG: This can end up triggering a ton of scan series calls (but i haven't seen in practice)
|
||||
_logger.LogInformation("A Scan is already running, rescheduling ScanSeries in 10 minutes");
|
||||
BackgroundJob.Schedule(() => ScanSeries(libraryId, seriesId, forceUpdate), TimeSpan.FromMinutes(10));
|
||||
return;
|
||||
|
|
@ -427,8 +428,14 @@ public class TaskScheduler : ITaskScheduler
|
|||
public static bool HasScanTaskRunningForLibrary(int libraryId, bool checkRunningJobs = true)
|
||||
{
|
||||
return
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true}, ScanQueue, checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false}, ScanQueue, checkRunningJobs);
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true, true}, ScanQueue,
|
||||
checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false, true}, ScanQueue,
|
||||
checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true, false}, ScanQueue,
|
||||
checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false, false}, ScanQueue,
|
||||
checkRunningJobs);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
|||
|
|
@ -45,8 +45,6 @@ public class BackupService : IBackupService
|
|||
_backupFiles = new List<string>()
|
||||
{
|
||||
"appsettings.json",
|
||||
"Hangfire.db", // This is not used atm
|
||||
"Hangfire-log.db", // This is not used atm
|
||||
"kavita.db",
|
||||
"kavita.db-shm", // This wont always be there
|
||||
"kavita.db-wal" // This wont always be there
|
||||
|
|
@ -109,19 +107,21 @@ public class BackupService : IBackupService
|
|||
_directoryService.CopyFilesToDirectory(
|
||||
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory);
|
||||
|
||||
await SendProgress(0.2F, "Copying logs");
|
||||
CopyLogsToBackupDirectory(tempDirectory);
|
||||
|
||||
await SendProgress(0.25F, "Copying cover images");
|
||||
|
||||
await CopyCoverImagesToBackupDirectory(tempDirectory);
|
||||
|
||||
await SendProgress(0.5F, "Copying bookmarks");
|
||||
await SendProgress(0.35F, "Copying templates images");
|
||||
CopyTemplatesToBackupDirectory(tempDirectory);
|
||||
|
||||
await SendProgress(0.5F, "Copying bookmarks");
|
||||
await CopyBookmarksToBackupDirectory(tempDirectory);
|
||||
|
||||
await SendProgress(0.75F, "Copying themes");
|
||||
|
||||
CopyThemesToBackupDirectory(tempDirectory);
|
||||
|
||||
await SendProgress(0.85F, "Copying favicons");
|
||||
CopyFaviconsToBackupDirectory(tempDirectory);
|
||||
|
||||
|
|
@ -150,6 +150,11 @@ public class BackupService : IBackupService
|
|||
_directoryService.CopyDirectoryToDirectory(_directoryService.FaviconDirectory, _directoryService.FileSystem.Path.Join(tempDirectory, "favicons"));
|
||||
}
|
||||
|
||||
private void CopyTemplatesToBackupDirectory(string tempDirectory)
|
||||
{
|
||||
_directoryService.CopyDirectoryToDirectory(_directoryService.TemplateDirectory, _directoryService.FileSystem.Path.Join(tempDirectory, "templates"));
|
||||
}
|
||||
|
||||
private async Task CopyCoverImagesToBackupDirectory(string tempDirectory)
|
||||
{
|
||||
var outputTempDir = Path.Join(tempDirectory, "covers");
|
||||
|
|
|
|||
|
|
@ -31,9 +31,52 @@ public class ParsedSeries
|
|||
public required MangaFormat Format { get; init; }
|
||||
}
|
||||
|
||||
public class ScanResult
|
||||
{
|
||||
/// <summary>
|
||||
/// A list of files in the Folder. Empty if HasChanged = false
|
||||
/// </summary>
|
||||
public IList<string> Files { get; set; }
|
||||
/// <summary>
|
||||
/// A nested folder from Library Root (at any level)
|
||||
/// </summary>
|
||||
public string Folder { get; set; }
|
||||
/// <summary>
|
||||
/// The library root
|
||||
/// </summary>
|
||||
public string LibraryRoot { get; set; }
|
||||
/// <summary>
|
||||
/// Was the Folder scanned or not. If not modified since last scan, this will be false and Files empty
|
||||
/// </summary>
|
||||
public bool HasChanged { get; set; }
|
||||
/// <summary>
|
||||
/// Set in Stage 2: Parsed Info from the Files
|
||||
/// </summary>
|
||||
public IList<ParserInfo> ParserInfos { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The final product of ParseScannedFiles. This has all the processed parserInfo and is ready for tracking/processing into entities
|
||||
/// </summary>
|
||||
public class ScannedSeriesResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Was the Folder scanned or not. If not modified since last scan, this will be false and indicates that upstream should count this as skipped
|
||||
/// </summary>
|
||||
public bool HasChanged { get; set; }
|
||||
/// <summary>
|
||||
/// The Parsed Series information used for tracking
|
||||
/// </summary>
|
||||
public ParsedSeries ParsedSeries { get; set; }
|
||||
/// <summary>
|
||||
/// Parsed files
|
||||
/// </summary>
|
||||
public IList<ParserInfo> ParsedInfos { get; set; }
|
||||
}
|
||||
|
||||
public class SeriesModified
|
||||
{
|
||||
public required string FolderPath { get; set; }
|
||||
public required string? FolderPath { get; set; }
|
||||
public required string SeriesName { get; set; }
|
||||
public DateTime LastScanned { get; set; }
|
||||
public MangaFormat Format { get; set; }
|
||||
|
|
@ -75,112 +118,79 @@ public class ParseScannedFiles
|
|||
/// <param name="scanDirectoryByDirectory">Scan directory by directory and for each, call folderAction</param>
|
||||
/// <param name="seriesPaths">A dictionary mapping a normalized path to a list of <see cref="SeriesModified"/> to help scanner skip I/O</param>
|
||||
/// <param name="folderPath">A library folder or series folder</param>
|
||||
/// <param name="folderAction">A callback async Task to be called once all files for each folder path are found</param>
|
||||
/// <param name="forceCheck">If we should bypass any folder last write time checks on the scan and force I/O</param>
|
||||
public async Task ProcessFiles(string folderPath, bool scanDirectoryByDirectory,
|
||||
IDictionary<string, IList<SeriesModified>> seriesPaths, Func<IList<string>, string,Task> folderAction, Library library, bool forceCheck = false)
|
||||
public IList<ScanResult> ProcessFiles(string folderPath, bool scanDirectoryByDirectory,
|
||||
IDictionary<string, IList<SeriesModified>> seriesPaths, Library library, bool forceCheck = false)
|
||||
{
|
||||
string normalizedPath;
|
||||
var result = new List<ScanResult>();
|
||||
var fileExtensions = string.Join("|", library.LibraryFileTypes.Select(l => l.FileTypeGroup.GetRegex()));
|
||||
if (scanDirectoryByDirectory)
|
||||
{
|
||||
// This is used in library scan, so we should check first for a ignore file and use that here as well
|
||||
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile);
|
||||
var matcher = _directoryService.CreateMatcherFromFile(potentialIgnoreFile);
|
||||
if (matcher != null)
|
||||
var matcher = new GlobMatcher();
|
||||
foreach (var pattern in library.LibraryExcludePatterns.Where(p => !string.IsNullOrEmpty(p.Pattern)))
|
||||
{
|
||||
_logger.LogWarning(".kavitaignore found! Ignore files is deprecated in favor of Library Settings. Please update and remove file at {Path}", potentialIgnoreFile);
|
||||
matcher.AddExclude(pattern.Pattern);
|
||||
}
|
||||
|
||||
if (library.LibraryExcludePatterns.Count != 0)
|
||||
{
|
||||
matcher ??= new GlobMatcher();
|
||||
foreach (var pattern in library.LibraryExcludePatterns.Where(p => !string.IsNullOrEmpty(p.Pattern)))
|
||||
{
|
||||
|
||||
matcher.AddExclude(pattern.Pattern);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
var directories = _directoryService.GetDirectories(folderPath, matcher).ToList();
|
||||
|
||||
foreach (var directory in directories)
|
||||
{
|
||||
// Since this is a loop, we need a list return
|
||||
normalizedPath = Parser.Parser.NormalizePath(directory);
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
|
||||
{
|
||||
await folderAction(new List<string>(), directory);
|
||||
result.Add(new ScanResult()
|
||||
{
|
||||
Files = ArraySegment<string>.Empty,
|
||||
Folder = directory,
|
||||
LibraryRoot = folderPath,
|
||||
HasChanged = false
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
// For a scan, this is doing everything in the directory loop before the folder Action is called...which leads to no progress indication
|
||||
await folderAction(_directoryService.ScanFiles(directory, fileExtensions, matcher), directory);
|
||||
result.Add(new ScanResult()
|
||||
{
|
||||
Files = _directoryService.ScanFiles(directory, fileExtensions, matcher),
|
||||
Folder = directory,
|
||||
LibraryRoot = folderPath,
|
||||
HasChanged = true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
return result;
|
||||
}
|
||||
|
||||
normalizedPath = Parser.Parser.NormalizePath(folderPath);
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
|
||||
{
|
||||
await folderAction(new List<string>(), folderPath);
|
||||
return;
|
||||
}
|
||||
// We need to calculate all folders till library root and see if any kavitaignores
|
||||
var seriesMatcher = BuildIgnoreFromLibraryRoot(folderPath, seriesPaths);
|
||||
|
||||
await folderAction(_directoryService.ScanFiles(folderPath, fileExtensions, seriesMatcher), folderPath);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Used in ScanSeries, which enters at a lower level folder and hence needs a .kavitaignore from higher (up to root) to be built before
|
||||
/// the scan takes place.
|
||||
/// </summary>
|
||||
/// <param name="folderPath"></param>
|
||||
/// <param name="seriesPaths"></param>
|
||||
/// <returns>A GlobMatter. Empty if not applicable</returns>
|
||||
private GlobMatcher BuildIgnoreFromLibraryRoot(string folderPath, IDictionary<string, IList<SeriesModified>> seriesPaths)
|
||||
{
|
||||
var seriesMatcher = new GlobMatcher();
|
||||
try
|
||||
{
|
||||
var roots = seriesPaths[folderPath][0].LibraryRoots.Select(Parser.Parser.NormalizePath).ToList();
|
||||
var libraryFolder = roots.SingleOrDefault(folderPath.Contains);
|
||||
|
||||
if (string.IsNullOrEmpty(libraryFolder) || !Directory.Exists(folderPath))
|
||||
result.Add(new ScanResult()
|
||||
{
|
||||
return seriesMatcher;
|
||||
}
|
||||
|
||||
var allParents = _directoryService.GetFoldersTillRoot(libraryFolder, folderPath);
|
||||
var path = libraryFolder;
|
||||
|
||||
// Apply the library root level kavitaignore
|
||||
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile);
|
||||
seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile));
|
||||
|
||||
// Then apply kavitaignores for each folder down to where the series folder is
|
||||
foreach (var folderPart in allParents.Reverse())
|
||||
{
|
||||
path = Parser.Parser.NormalizePath(Path.Join(libraryFolder, folderPart));
|
||||
potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile);
|
||||
seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile));
|
||||
}
|
||||
Files = ArraySegment<string>.Empty,
|
||||
Folder = folderPath,
|
||||
LibraryRoot = folderPath,
|
||||
HasChanged = false
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
result.Add(new ScanResult()
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an error trying to find and apply .kavitaignores above the Series Folder. Scanning without them present");
|
||||
}
|
||||
Files = _directoryService.ScanFiles(folderPath, fileExtensions),
|
||||
Folder = folderPath,
|
||||
LibraryRoot = folderPath,
|
||||
HasChanged = true
|
||||
});
|
||||
|
||||
return seriesMatcher;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
|
||||
/// Attempts to either add a new instance of a series mapping to the _scannedSeries bag or adds to an existing.
|
||||
/// This will check if the name matches an existing series name (multiple fields) <see cref="MergeName"/>
|
||||
/// </summary>
|
||||
/// <param name="scannedSeries">A localized list of a series' parsed infos</param>
|
||||
|
|
@ -290,20 +300,62 @@ public class ParseScannedFiles
|
|||
/// <param name="folders"></param>
|
||||
/// <param name="isLibraryScan">If true, does a directory scan first (resulting in folders being tackled in parallel), else does an immediate scan files</param>
|
||||
/// <param name="seriesPaths">A map of Series names -> existing folder paths to handle skipping folders</param>
|
||||
/// <param name="processSeriesInfos">Action which returns if the folder was skipped and the infos from said folder</param>
|
||||
/// <param name="forceCheck">Defaults to false</param>
|
||||
/// <returns></returns>
|
||||
public async Task ScanLibrariesForSeries(Library library,
|
||||
public async Task<IList<ScannedSeriesResult>> ScanLibrariesForSeries(Library library,
|
||||
IEnumerable<string> folders, bool isLibraryScan,
|
||||
IDictionary<string, IList<SeriesModified>> seriesPaths, Func<Tuple<bool, IList<ParserInfo>>, Task>? processSeriesInfos, bool forceCheck = false)
|
||||
IDictionary<string, IList<SeriesModified>> seriesPaths, bool forceCheck = false)
|
||||
{
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
|
||||
|
||||
var processedScannedSeries = new List<ScannedSeriesResult>();
|
||||
foreach (var folderPath in folders)
|
||||
{
|
||||
try
|
||||
{
|
||||
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, ProcessFolder, library, forceCheck);
|
||||
var scanResults = ProcessFiles(folderPath, isLibraryScan, seriesPaths, library, forceCheck);
|
||||
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
// scanResult is updated with the parsed infos
|
||||
await ProcessScanResult(scanResult, seriesPaths, library);
|
||||
|
||||
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
|
||||
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
|
||||
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
|
||||
|
||||
// Combine everything into scannedSeries
|
||||
foreach (var info in scanResult.ParserInfos)
|
||||
{
|
||||
try
|
||||
{
|
||||
TrackSeries(scannedSeries, info);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||
info?.FullFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count <= 0) continue;
|
||||
|
||||
UpdateSortOrder(scannedSeries, series);
|
||||
|
||||
processedScannedSeries.Add(new ScannedSeriesResult()
|
||||
{
|
||||
HasChanged = scanResult.HasChanged,
|
||||
ParsedSeries = series,
|
||||
ParsedInfos = scannedSeries[series]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (ArgumentException ex)
|
||||
{
|
||||
|
|
@ -313,64 +365,120 @@ public class ParseScannedFiles
|
|||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
|
||||
|
||||
async Task ProcessFolder(IList<string> files, string folder)
|
||||
return processedScannedSeries;
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For a given ScanResult, sets the ParserInfos on the result
|
||||
/// </summary>
|
||||
/// <param name="result"></param>
|
||||
/// <param name="seriesPaths"></param>
|
||||
/// <param name="library"></param>
|
||||
private async Task ProcessScanResult(ScanResult result, IDictionary<string, IList<SeriesModified>> seriesPaths, Library library)
|
||||
{
|
||||
// If the folder hasn't changed, generate fake ParserInfos for the Series that were in that folder.
|
||||
if (!result.HasChanged)
|
||||
{
|
||||
var normalizedFolder = Parser.Parser.NormalizePath(folder);
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck))
|
||||
var normalizedFolder = Parser.Parser.NormalizePath(result.Folder);
|
||||
result.ParserInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
|
||||
{
|
||||
var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
|
||||
{
|
||||
Series = fp.SeriesName,
|
||||
Format = fp.Format,
|
||||
}).ToList();
|
||||
if (processSeriesInfos != null)
|
||||
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
|
||||
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent("Skipped " + normalizedFolder, library.Name, ProgressEventType.Updated));
|
||||
return;
|
||||
}
|
||||
Series = fp.SeriesName,
|
||||
Format = fp.Format,
|
||||
}).ToList();
|
||||
|
||||
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder);
|
||||
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", normalizedFolder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent($"{files.Count} files in {folder}", library.Name, ProgressEventType.Updated));
|
||||
if (files.Count == 0)
|
||||
MessageFactory.FileScanProgressEvent("Skipped " + normalizedFolder, library.Name, ProgressEventType.Updated));
|
||||
return;
|
||||
}
|
||||
|
||||
var files = result.Files;
|
||||
var folder = result.Folder;
|
||||
var libraryRoot = result.LibraryRoot;
|
||||
|
||||
// When processing files for a folder and we do enter, we need to parse the information and combine parser infos
|
||||
// NOTE: We might want to move the merge step later in the process, like return and combine.
|
||||
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent($"{files.Count} files in {folder}", library.Name, ProgressEventType.Updated));
|
||||
if (files.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] {Folder} is empty, no longer in this location, or has no file types that match Library File Types", folder);
|
||||
result.ParserInfos = ArraySegment<ParserInfo>.Empty;
|
||||
return;
|
||||
}
|
||||
|
||||
// Multiple Series can exist within a folder. We should instead put these infos on the result and perform merging above
|
||||
IList<ParserInfo> infos = files
|
||||
.Select(file => _readingItemService.ParseFile(file, folder, libraryRoot, library.Type))
|
||||
.Where(info => info != null)
|
||||
.ToList()!;
|
||||
|
||||
result.ParserInfos = infos;
|
||||
}
|
||||
|
||||
|
||||
private void UpdateSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParsedSeries series)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Set the Sort order per Volume
|
||||
var volumes = scannedSeries[series].GroupBy(info => info.Volumes);
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] {Folder} is empty or is no longer in this location", folder);
|
||||
return;
|
||||
}
|
||||
var infos = scannedSeries[series].Where(info => info.Volumes == volume.Key).ToList();
|
||||
IList<ParserInfo> chapters;
|
||||
var specialTreatment = infos.TrueForAll(info => info.IsSpecial);
|
||||
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
var infos = files
|
||||
.Select(file => _readingItemService.ParseFile(file, folder, library.Type))
|
||||
.Where(info => info != null)
|
||||
.ToList();
|
||||
|
||||
|
||||
MergeLocalizedSeriesWithSeries(infos);
|
||||
|
||||
foreach (var info in infos)
|
||||
{
|
||||
try
|
||||
if (specialTreatment)
|
||||
{
|
||||
TrackSeries(scannedSeries, info);
|
||||
chapters = infos
|
||||
.OrderBy(info => info.SpecialIndex)
|
||||
.ToList();
|
||||
}
|
||||
catch (Exception ex)
|
||||
else
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||
info?.FullFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
|
||||
{
|
||||
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
|
||||
chapters = infos
|
||||
.OrderByNatural(info => info.Chapters)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
|
||||
var counter = 0f;
|
||||
var prevIssue = string.Empty;
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
if (float.TryParse(chapter.Chapters, out var parsedChapter))
|
||||
{
|
||||
counter = parsedChapter;
|
||||
if (!string.IsNullOrEmpty(prevIssue) && float.TryParse(prevIssue, out var prevIssueFloat) && parsedChapter.Is(prevIssueFloat))
|
||||
{
|
||||
// Bump by 0.1
|
||||
counter += 0.1f;
|
||||
}
|
||||
chapter.IssueOrder = counter;
|
||||
prevIssue = $"{parsedChapter}";
|
||||
}
|
||||
else
|
||||
{
|
||||
// I need to bump by 0.1f as if the prevIssue matches counter
|
||||
if (!string.IsNullOrEmpty(prevIssue) && prevIssue == counter + "")
|
||||
{
|
||||
// Bump by 0.1
|
||||
counter += 0.1f;
|
||||
}
|
||||
chapter.IssueOrder = counter;
|
||||
counter++;
|
||||
prevIssue = chapter.Chapters;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an issue setting IssueOrder");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -399,7 +507,7 @@ public class ParseScannedFiles
|
|||
/// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration"
|
||||
/// </example>
|
||||
/// <param name="infos">A collection of ParserInfos</param>
|
||||
private void MergeLocalizedSeriesWithSeries(IReadOnlyCollection<ParserInfo?> infos)
|
||||
private void MergeLocalizedSeriesWithSeries(IList<ParserInfo> infos)
|
||||
{
|
||||
var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries));
|
||||
if (!hasLocalizedSeries) return;
|
||||
|
|
|
|||
117
API/Services/Tasks/Scanner/Parser/BasicParser.cs
Normal file
117
API/Services/Tasks/Scanner/Parser/BasicParser.cs
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
using System.IO;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
#nullable enable
|
||||
|
||||
/// <summary>
|
||||
/// This is the basic parser for handling Manga/Comic/Book libraries. This was previously DefaultParser before splitting each parser
|
||||
/// into their own classes.
|
||||
/// </summary>
|
||||
public class BasicParser(IDirectoryService directoryService, IDefaultParser imageParser) : DefaultParser(directoryService)
|
||||
{
|
||||
public override ParserInfo? Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo? comicInfo = null)
|
||||
{
|
||||
var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
|
||||
// TODO: Potential Bug: This will return null, but on Image libraries, if all images, we would want to include this.
|
||||
if (type != LibraryType.Image && Parser.IsCoverImage(directoryService.FileSystem.Path.GetFileName(filePath))) return null;
|
||||
|
||||
if (Parser.IsImage(filePath))
|
||||
{
|
||||
return imageParser.Parse(filePath, rootPath, libraryRoot, LibraryType.Image, comicInfo);
|
||||
}
|
||||
|
||||
var ret = new ParserInfo()
|
||||
{
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName),
|
||||
FullFilePath = filePath,
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo
|
||||
};
|
||||
|
||||
// This will be called if the epub is already parsed once then we call and merge the information, if the
|
||||
if (Parser.IsEpub(filePath))
|
||||
{
|
||||
ret.Chapters = Parser.ParseChapter(fileName);
|
||||
ret.Series = Parser.ParseSeries(fileName);
|
||||
ret.Volumes = Parser.ParseVolume(fileName);
|
||||
}
|
||||
else
|
||||
{
|
||||
ret.Chapters = type == LibraryType.Comic
|
||||
? Parser.ParseComicChapter(fileName)
|
||||
: Parser.ParseChapter(fileName);
|
||||
ret.Series = type == LibraryType.Comic ? Parser.ParseComicSeries(fileName) : Parser.ParseSeries(fileName);
|
||||
ret.Volumes = type == LibraryType.Comic ? Parser.ParseComicVolume(fileName) : Parser.ParseVolume(fileName);
|
||||
}
|
||||
|
||||
if (ret.Series == string.Empty || Parser.IsImage(filePath))
|
||||
{
|
||||
// Try to parse information out of each folder all the way to rootPath
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
var edition = Parser.ParseEdition(fileName);
|
||||
if (!string.IsNullOrEmpty(edition))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(ret.Series.Replace(edition, string.Empty), type is LibraryType.Comic);
|
||||
ret.Edition = edition;
|
||||
}
|
||||
|
||||
var isSpecial = type == LibraryType.Comic ? Parser.IsComicSpecial(fileName) : Parser.IsMangaSpecial(fileName);
|
||||
// We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that
|
||||
// could cause a problem as Omake is a special term, but there is valid volume/chapter information.
|
||||
if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.LooseLeafVolume && isSpecial)
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret); // NOTE: This can cause some complications, we should try to be a bit less aggressive to fallback to folder
|
||||
}
|
||||
|
||||
// If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name
|
||||
if (Parser.HasSpecialMarker(fileName))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.SpecialIndex = Parser.ParseSpecialIndex(fileName);
|
||||
ret.Chapters = Parser.DefaultChapter;
|
||||
ret.Volumes = Parser.SpecialVolume;
|
||||
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(ret.Series))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(fileName, type is LibraryType.Comic);
|
||||
}
|
||||
|
||||
// Pdfs may have .pdf in the series name, remove that
|
||||
if (Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
|
||||
{
|
||||
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
|
||||
}
|
||||
|
||||
// Patch in other information from ComicInfo
|
||||
UpdateFromComicInfo(ret);
|
||||
|
||||
// v0.8.x: Introducing a change where Specials will go in a separate Volume with a reserved number
|
||||
if (ret.IsSpecial)
|
||||
{
|
||||
ret.Volumes = Parser.SpecialVolume;
|
||||
}
|
||||
|
||||
return ret.Series == string.Empty ? null : ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Applicable for everything but ComicVine and Image library types
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
public override bool IsApplicable(string filePath, LibraryType type)
|
||||
{
|
||||
return type != LibraryType.ComicVine && type != LibraryType.Image;
|
||||
}
|
||||
}
|
||||
47
API/Services/Tasks/Scanner/Parser/BookParser.cs
Normal file
47
API/Services/Tasks/Scanner/Parser/BookParser.cs
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
public class BookParser(IDirectoryService directoryService, IBookService bookService, IDefaultParser basicParser) : DefaultParser(directoryService)
|
||||
{
|
||||
public override ParserInfo Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo comicInfo = null)
|
||||
{
|
||||
var info = bookService.ParseInfo(filePath);
|
||||
if (info == null) return null;
|
||||
|
||||
// This catches when original library type is Manga/Comic and when parsing with non
|
||||
if (Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume?
|
||||
{
|
||||
var hasVolumeInTitle = !Parser.ParseVolume(info.Title)
|
||||
.Equals(Parser.LooseLeafVolume);
|
||||
var hasVolumeInSeries = !Parser.ParseVolume(info.Series)
|
||||
.Equals(Parser.LooseLeafVolume);
|
||||
|
||||
if (string.IsNullOrEmpty(info.ComicInfo?.Volume) && hasVolumeInTitle && (hasVolumeInSeries || string.IsNullOrEmpty(info.Series)))
|
||||
{
|
||||
// This is likely a light novel for which we can set series from parsed title
|
||||
info.Series = Parser.ParseSeries(info.Title);
|
||||
info.Volumes = Parser.ParseVolume(info.Title);
|
||||
}
|
||||
else
|
||||
{
|
||||
var info2 = basicParser.Parse(filePath, rootPath, libraryRoot, LibraryType.Book, comicInfo);
|
||||
info.Merge(info2);
|
||||
}
|
||||
}
|
||||
|
||||
return string.IsNullOrEmpty(info.Series) ? null : info;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Only applicable for Epub files
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
public override bool IsApplicable(string filePath, LibraryType type)
|
||||
{
|
||||
return Parser.IsEpub(filePath);
|
||||
}
|
||||
}
|
||||
105
API/Services/Tasks/Scanner/Parser/ComicVineParser.cs
Normal file
105
API/Services/Tasks/Scanner/Parser/ComicVineParser.cs
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
using System.IO;
|
||||
using System.Linq;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
#nullable enable
|
||||
|
||||
/// <summary>
|
||||
/// Responsible for Parsing ComicVine Comics.
|
||||
/// </summary>
|
||||
/// <param name="directoryService"></param>
|
||||
public class ComicVineParser(IDirectoryService directoryService) : DefaultParser(directoryService)
|
||||
{
|
||||
/// <summary>
|
||||
/// This Parser generates Series name to be defined as Series + first Issue Volume, so "Batman (2020)".
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="rootPath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
public override ParserInfo? Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo? comicInfo = null)
|
||||
{
|
||||
if (type != LibraryType.ComicVine) return null;
|
||||
|
||||
var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
|
||||
// Mylar often outputs cover.jpg, ignore it by default
|
||||
if (string.IsNullOrEmpty(fileName) || Parser.IsCoverImage(directoryService.FileSystem.Path.GetFileName(filePath))) return null;
|
||||
|
||||
var directoryName = directoryService.FileSystem.DirectoryInfo.New(rootPath).Name;
|
||||
|
||||
var info = new ParserInfo()
|
||||
{
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = filePath,
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo,
|
||||
Chapters = Parser.ParseComicChapter(fileName),
|
||||
Volumes = Parser.ParseComicVolume(fileName)
|
||||
};
|
||||
|
||||
// See if we can formulate the name from the ComicInfo
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo?.Series) && !string.IsNullOrEmpty(info.ComicInfo?.Volume))
|
||||
{
|
||||
info.Series = $"{info.ComicInfo.Series} ({info.ComicInfo.Volume})";
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(info.Series))
|
||||
{
|
||||
// Check if we need to fallback to the Folder name AND that the folder matches the format "Series (Year)"
|
||||
var directories = directoryService.GetFoldersTillRoot(rootPath, filePath).ToList();
|
||||
if (directories.Count > 0)
|
||||
{
|
||||
foreach (var directory in directories)
|
||||
{
|
||||
if (!Parser.IsSeriesAndYear(directory)) continue;
|
||||
info.Series = directory;
|
||||
info.Volumes = Parser.ParseYear(directory);
|
||||
break;
|
||||
}
|
||||
|
||||
// When there was at least one directory and we failed to parse the series, this is the final fallback
|
||||
if (string.IsNullOrEmpty(info.Series))
|
||||
{
|
||||
info.Series = Parser.CleanTitle(directories[0], true, true);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (Parser.IsSeriesAndYear(directoryName))
|
||||
{
|
||||
info.Series = directoryName;
|
||||
info.Volumes = Parser.ParseYear(directoryName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this is a Special/Annual
|
||||
info.IsSpecial = Parser.IsComicSpecial(info.Filename) || Parser.IsComicSpecial(info.ComicInfo?.Format);
|
||||
|
||||
// Patch in other information from ComicInfo
|
||||
UpdateFromComicInfo(info);
|
||||
|
||||
if (string.IsNullOrEmpty(info.Series))
|
||||
{
|
||||
info.Series = Parser.CleanTitle(directoryName, true, true);
|
||||
}
|
||||
|
||||
|
||||
return string.IsNullOrEmpty(info.Series) ? null : info;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Only applicable for ComicVine library type
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
public override bool IsApplicable(string filePath, LibraryType type)
|
||||
{
|
||||
return type == LibraryType.ComicVine;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
using System.IO;
|
||||
using System.Linq;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
|
|
@ -7,158 +8,26 @@ namespace API.Services.Tasks.Scanner.Parser;
|
|||
|
||||
public interface IDefaultParser
|
||||
{
|
||||
ParserInfo? Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga);
|
||||
ParserInfo? Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo? comicInfo = null);
|
||||
void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret);
|
||||
bool IsApplicable(string filePath, LibraryType type);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This is an implementation of the Parser that is the basis for everything
|
||||
/// </summary>
|
||||
public class DefaultParser : IDefaultParser
|
||||
public abstract class DefaultParser(IDirectoryService directoryService) : IDefaultParser
|
||||
{
|
||||
private readonly IDirectoryService _directoryService;
|
||||
|
||||
public DefaultParser(IDirectoryService directoryService)
|
||||
{
|
||||
_directoryService = directoryService;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed
|
||||
/// Parses information out of a file path. Can fallback to using directory name if Series couldn't be parsed
|
||||
/// from filename.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="rootPath">Root folder</param>
|
||||
/// <param name="type">Defaults to Manga. Allows different Regex to be used for parsing.</param>
|
||||
/// <param name="type">Allows different Regex to be used for parsing.</param>
|
||||
/// <returns><see cref="ParserInfo"/> or null if Series was empty</returns>
|
||||
public ParserInfo? Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga)
|
||||
{
|
||||
var fileName = _directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
|
||||
// TODO: Potential Bug: This will return null, but on Image libraries, if all images, we would want to include this.
|
||||
if (type != LibraryType.Image && Parser.IsCoverImage(_directoryService.FileSystem.Path.GetFileName(filePath))) return null;
|
||||
|
||||
var ret = new ParserInfo()
|
||||
{
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Path.GetFileNameWithoutExtension(fileName),
|
||||
FullFilePath = filePath,
|
||||
Series = string.Empty
|
||||
};
|
||||
|
||||
// If library type is Image or this is not a cover image in a non-image library, then use dedicated parsing mechanism
|
||||
if (type == LibraryType.Image || Parser.IsImage(filePath))
|
||||
{
|
||||
// TODO: We can move this up one level
|
||||
return ParseImage(filePath, rootPath, ret);
|
||||
}
|
||||
|
||||
|
||||
// This will be called if the epub is already parsed once then we call and merge the information, if the
|
||||
if (Parser.IsEpub(filePath))
|
||||
{
|
||||
ret.Chapters = Parser.ParseChapter(fileName);
|
||||
ret.Series = Parser.ParseSeries(fileName);
|
||||
ret.Volumes = Parser.ParseVolume(fileName);
|
||||
}
|
||||
else
|
||||
{
|
||||
ret.Chapters = type == LibraryType.Comic
|
||||
? Parser.ParseComicChapter(fileName)
|
||||
: Parser.ParseChapter(fileName);
|
||||
ret.Series = type == LibraryType.Comic ? Parser.ParseComicSeries(fileName) : Parser.ParseSeries(fileName);
|
||||
ret.Volumes = type == LibraryType.Comic ? Parser.ParseComicVolume(fileName) : Parser.ParseVolume(fileName);
|
||||
}
|
||||
|
||||
if (ret.Series == string.Empty || Parser.IsImage(filePath))
|
||||
{
|
||||
// Try to parse information out of each folder all the way to rootPath
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
var edition = Parser.ParseEdition(fileName);
|
||||
if (!string.IsNullOrEmpty(edition))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(ret.Series.Replace(edition, string.Empty), type is LibraryType.Comic);
|
||||
ret.Edition = edition;
|
||||
}
|
||||
|
||||
var isSpecial = type == LibraryType.Comic ? Parser.IsComicSpecial(fileName) : Parser.IsMangaSpecial(fileName);
|
||||
// We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that
|
||||
// could cause a problem as Omake is a special term, but there is valid volume/chapter information.
|
||||
if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.LooseLeafVolume && isSpecial)
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret); // NOTE: This can cause some complications, we should try to be a bit less aggressive to fallback to folder
|
||||
}
|
||||
|
||||
// If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name
|
||||
if (Parser.HasSpecialMarker(fileName))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.Chapters = Parser.DefaultChapter;
|
||||
ret.Volumes = Parser.LooseLeafVolume;
|
||||
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(ret.Series))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(fileName, type is LibraryType.Comic);
|
||||
}
|
||||
|
||||
// Pdfs may have .pdf in the series name, remove that
|
||||
if (Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
|
||||
{
|
||||
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
|
||||
}
|
||||
|
||||
return ret.Series == string.Empty ? null : ret;
|
||||
}
|
||||
|
||||
private ParserInfo ParseImage(string filePath, string rootPath, ParserInfo ret)
|
||||
{
|
||||
ret.Volumes = Parser.LooseLeafVolume;
|
||||
ret.Chapters = Parser.DefaultChapter;
|
||||
var directoryName = _directoryService.FileSystem.DirectoryInfo.New(rootPath).Name;
|
||||
ret.Series = directoryName;
|
||||
|
||||
ParseFromFallbackFolders(filePath, rootPath, LibraryType.Image, ref ret);
|
||||
|
||||
|
||||
if (IsEmptyOrDefault(ret.Volumes, ret.Chapters))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
var parsedVolume = Parser.ParseVolume(ret.Filename);
|
||||
var parsedChapter = Parser.ParseChapter(ret.Filename);
|
||||
if (IsEmptyOrDefault(ret.Volumes, string.Empty) && !parsedVolume.Equals(Parser.LooseLeafVolume))
|
||||
{
|
||||
ret.Volumes = parsedVolume;
|
||||
}
|
||||
if (IsEmptyOrDefault(string.Empty, ret.Chapters) && !parsedChapter.Equals(Parser.DefaultChapter))
|
||||
{
|
||||
ret.Chapters = parsedChapter;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Override the series name, as fallback folders needs it to try and parse folder name
|
||||
if (string.IsNullOrEmpty(ret.Series) || ret.Series.Equals(directoryName))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(directoryName, replaceSpecials: false);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
private static bool IsEmptyOrDefault(string volumes, string chapters)
|
||||
{
|
||||
return (string.IsNullOrEmpty(chapters) || chapters == Parser.DefaultChapter) &&
|
||||
(string.IsNullOrEmpty(volumes) || volumes == Parser.LooseLeafVolume);
|
||||
}
|
||||
public abstract ParserInfo? Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo? comicInfo = null);
|
||||
|
||||
/// <summary>
|
||||
/// Fills out <see cref="ParserInfo"/> by trying to parse volume, chapters, and series from folders
|
||||
|
|
@ -169,13 +38,13 @@ public class DefaultParser : IDefaultParser
|
|||
/// <param name="ret">Expects a non-null ParserInfo which this method will populate</param>
|
||||
public void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret)
|
||||
{
|
||||
var fallbackFolders = _directoryService.GetFoldersTillRoot(rootPath, filePath)
|
||||
var fallbackFolders = directoryService.GetFoldersTillRoot(rootPath, filePath)
|
||||
.Where(f => !Parser.IsMangaSpecial(f))
|
||||
.ToList();
|
||||
|
||||
if (fallbackFolders.Count == 0)
|
||||
{
|
||||
var rootFolderName = _directoryService.FileSystem.DirectoryInfo.New(rootPath).Name;
|
||||
var rootFolderName = directoryService.FileSystem.DirectoryInfo.New(rootPath).Name;
|
||||
var series = Parser.ParseSeries(rootFolderName);
|
||||
|
||||
if (string.IsNullOrEmpty(series))
|
||||
|
|
@ -229,4 +98,45 @@ public class DefaultParser : IDefaultParser
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void UpdateFromComicInfo(ParserInfo info)
|
||||
{
|
||||
if (info.ComicInfo == null) return;
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
|
||||
{
|
||||
info.Volumes = info.ComicInfo.Volume;
|
||||
}
|
||||
if (string.IsNullOrEmpty(info.Series) && !string.IsNullOrEmpty(info.ComicInfo.Series))
|
||||
{
|
||||
info.Series = info.ComicInfo.Series.Trim();
|
||||
}
|
||||
if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
|
||||
{
|
||||
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
|
||||
{
|
||||
info.Chapters = info.ComicInfo.Number;
|
||||
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
|
||||
{
|
||||
info.IsSpecial = false;
|
||||
info.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
}
|
||||
}
|
||||
|
||||
// Patch is SeriesSort from ComicInfo
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
|
||||
{
|
||||
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
|
||||
}
|
||||
}
|
||||
|
||||
public abstract bool IsApplicable(string filePath, LibraryType type);
|
||||
|
||||
protected static bool IsEmptyOrDefault(string volumes, string chapters)
|
||||
{
|
||||
return (string.IsNullOrEmpty(chapters) || chapters == Parser.DefaultChapter) &&
|
||||
(string.IsNullOrEmpty(volumes) || volumes == Parser.LooseLeafVolume);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
54
API/Services/Tasks/Scanner/Parser/ImageParser.cs
Normal file
54
API/Services/Tasks/Scanner/Parser/ImageParser.cs
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
using System.IO;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
#nullable enable
|
||||
|
||||
public class ImageParser(IDirectoryService directoryService) : DefaultParser(directoryService)
|
||||
{
|
||||
public override ParserInfo? Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo? comicInfo = null)
|
||||
{
|
||||
if (type != LibraryType.Image || !Parser.IsImage(filePath)) return null;
|
||||
|
||||
var directoryName = directoryService.FileSystem.DirectoryInfo.New(rootPath).Name;
|
||||
var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
|
||||
var ret = new ParserInfo
|
||||
{
|
||||
Series = directoryName,
|
||||
Volumes = Parser.LooseLeafVolume,
|
||||
Chapters = Parser.DefaultChapter,
|
||||
ComicInfo = comicInfo,
|
||||
Format = MangaFormat.Image,
|
||||
Filename = Path.GetFileName(filePath),
|
||||
FullFilePath = filePath,
|
||||
Title = fileName,
|
||||
};
|
||||
ParseFromFallbackFolders(filePath, libraryRoot, LibraryType.Image, ref ret);
|
||||
|
||||
if (IsEmptyOrDefault(ret.Volumes, ret.Chapters))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
}
|
||||
|
||||
// Override the series name, as fallback folders needs it to try and parse folder name
|
||||
if (string.IsNullOrEmpty(ret.Series) || ret.Series.Equals(directoryName))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(directoryName, replaceSpecials: false);
|
||||
}
|
||||
|
||||
return string.IsNullOrEmpty(ret.Series) ? null : ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Only applicable for Image files and Image library type
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
public override bool IsApplicable(string filePath, LibraryType type)
|
||||
{
|
||||
return type == LibraryType.Image && Parser.IsImage(filePath);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
|
@ -12,10 +13,16 @@ namespace API.Services.Tasks.Scanner.Parser;
|
|||
public static class Parser
|
||||
{
|
||||
// NOTE: If you change this, don't forget to change in the UI (see Series Detail)
|
||||
public const string DefaultChapter = "0"; // -2147483648
|
||||
public const string LooseLeafVolume = "0";
|
||||
public const int DefaultChapterNumber = 0;
|
||||
public const int LooseLeafVolumeNumber = 0;
|
||||
public const string DefaultChapter = "-100000"; // -2147483648
|
||||
public const string LooseLeafVolume = "-100000";
|
||||
public const int DefaultChapterNumber = -100_000;
|
||||
public const int LooseLeafVolumeNumber = -100_000;
|
||||
/// <summary>
|
||||
/// The Volume Number of Specials to reside in
|
||||
/// </summary>
|
||||
public const int SpecialVolumeNumber = 100_000;
|
||||
public const string SpecialVolume = "100000";
|
||||
|
||||
public static readonly TimeSpan RegexTimeout = TimeSpan.FromMilliseconds(500);
|
||||
|
||||
public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg|\.webp|\.gif|\.avif)"; // Don't forget to update CoverChooser
|
||||
|
|
@ -99,6 +106,12 @@ public static class Parser
|
|||
private static readonly Regex NormalizeRegex = new Regex(@"[^\p{L}0-9\+!]",
|
||||
MatchOptions, RegexTimeout);
|
||||
|
||||
/// <summary>
|
||||
/// Supports Batman (2020) or Batman (2)
|
||||
/// </summary>
|
||||
private static readonly Regex SeriesAndYearRegex = new Regex(@"^\D+\s\((?<Year>\d+)\)$",
|
||||
MatchOptions, RegexTimeout);
|
||||
|
||||
/// <summary>
|
||||
/// Recognizes the Special token only
|
||||
/// </summary>
|
||||
|
|
@ -628,7 +641,7 @@ public static class Parser
|
|||
|
||||
private static readonly Regex ComicSpecialRegex = new Regex(
|
||||
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
|
||||
$@"\b(?:{CommonSpecial}|\d.+?(\W|-|^)Annual|Annual(\W|-|$)|Book \d.+?|Compendium(\W|-|$|\s.+?)|Omnibus(\W|-|$|\s.+?)|FCBD \d.+?|Absolute(\W|-|$|\s.+?)|Preview(\W|-|$|\s.+?)|Hors[ -]S[ée]rie|TPB|HS|THS)\b",
|
||||
$@"\b(?:{CommonSpecial}|\d.+?(\W|-|^)Annual|Annual(\W|-|$|\s#)|Book \d.+?|Compendium(\W|-|$|\s.+?)|Omnibus(\W|-|$|\s.+?)|FCBD \d.+?|Absolute(\W|-|$|\s.+?)|Preview(\W|-|$|\s.+?)|Hors[ -]S[ée]rie|TPB|HS|THS)\b",
|
||||
MatchOptions, RegexTimeout
|
||||
);
|
||||
|
||||
|
|
@ -678,14 +691,22 @@ public static class Parser
|
|||
return SpecialMarkerRegex.IsMatch(filePath);
|
||||
}
|
||||
|
||||
public static int ParseSpecialIndex(string filePath)
|
||||
{
|
||||
var match = SpecialMarkerRegex.Match(filePath).Value.Replace("SP", string.Empty);
|
||||
if (string.IsNullOrEmpty(match)) return 0;
|
||||
return int.Parse(match);
|
||||
}
|
||||
|
||||
public static bool IsMangaSpecial(string filePath)
|
||||
{
|
||||
filePath = ReplaceUnderscores(filePath);
|
||||
return MangaSpecialRegex.IsMatch(filePath);
|
||||
}
|
||||
|
||||
public static bool IsComicSpecial(string filePath)
|
||||
public static bool IsComicSpecial(string? filePath)
|
||||
{
|
||||
if (string.IsNullOrEmpty(filePath)) return false;
|
||||
filePath = ReplaceUnderscores(filePath);
|
||||
return ComicSpecialRegex.IsMatch(filePath);
|
||||
}
|
||||
|
|
@ -944,35 +965,52 @@ public static class Parser
|
|||
{
|
||||
try
|
||||
{
|
||||
if (!Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
// Check if the range string is not null or empty
|
||||
if (string.IsNullOrEmpty(range) || !Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-");
|
||||
return tokens.Min(t => t.AsFloat());
|
||||
// Check if there is a range or not
|
||||
if (Regex.IsMatch(range, @"\d-{1}\d"))
|
||||
{
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-", StringSplitOptions.RemoveEmptyEntries);
|
||||
return tokens.Min(t => t.AsFloat());
|
||||
}
|
||||
|
||||
return float.Parse(range);
|
||||
}
|
||||
catch
|
||||
catch (Exception)
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static float MaxNumberFromRange(string range)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
// Check if the range string is not null or empty
|
||||
if (string.IsNullOrEmpty(range) || !Regex.IsMatch(range, @"^[\d\-.]+$", MatchOptions, RegexTimeout))
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-");
|
||||
return tokens.Max(t => t.AsFloat());
|
||||
// Check if there is a range or not
|
||||
if (Regex.IsMatch(range, @"\d-{1}\d"))
|
||||
{
|
||||
|
||||
var tokens = range.Replace("_", string.Empty).Split("-", StringSplitOptions.RemoveEmptyEntries);
|
||||
return tokens.Max(t => t.AsFloat());
|
||||
}
|
||||
|
||||
return float.Parse(range);
|
||||
}
|
||||
catch
|
||||
catch (Exception)
|
||||
{
|
||||
return (float) 0.0;
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1094,9 +1132,39 @@ public static class Parser
|
|||
|
||||
// NOTE: This is failing for //localhost:5000/api/book/29919/book-resources?file=OPS/images/tick1.jpg
|
||||
var importFile = match.Groups["Filename"].Value;
|
||||
if (!importFile.Contains("?")) return importFile;
|
||||
if (!importFile.Contains('?')) return importFile;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// If the name matches exactly Series (Volume digits)
|
||||
/// </summary>
|
||||
/// <param name="name"></param>
|
||||
/// <returns></returns>
|
||||
public static bool IsSeriesAndYear(string? name)
|
||||
{
|
||||
return !string.IsNullOrEmpty(name) && SeriesAndYearRegex.IsMatch(name);
|
||||
}
|
||||
|
||||
public static string ParseYear(string? name)
|
||||
{
|
||||
if (string.IsNullOrEmpty(name)) return string.Empty;
|
||||
var match = SeriesAndYearRegex.Match(name);
|
||||
if (!match.Success) return string.Empty;
|
||||
|
||||
return match.Groups["Year"].Value;
|
||||
}
|
||||
|
||||
public static string? RemoveExtensionIfSupported(string? filename)
|
||||
{
|
||||
if (string.IsNullOrEmpty(filename)) return filename;
|
||||
|
||||
if (Regex.IsMatch(filename, SupportedExtensions))
|
||||
{
|
||||
return Regex.Replace(filename, SupportedExtensions, string.Empty);
|
||||
}
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -60,6 +60,10 @@ public class ParserInfo
|
|||
/// If the file contains no volume/chapter information or contains Special Keywords <see cref="Parser.MangaSpecialRegex"/>
|
||||
/// </summary>
|
||||
public bool IsSpecial { get; set; }
|
||||
/// <summary>
|
||||
/// If the file has a Special Marker explicitly, this will contain the index
|
||||
/// </summary>
|
||||
public int SpecialIndex { get; set; } = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Used for specials or books, stores what the UI should show.
|
||||
|
|
@ -67,6 +71,12 @@ public class ParserInfo
|
|||
/// </summary>
|
||||
public string Title { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// This can be filled in from ComicInfo.xml during scanning. Will update the SortOrder field on <see cref="Entities.Chapter"/>.
|
||||
/// Falls back to Parsed Chapter number
|
||||
/// </summary>
|
||||
public float IssueOrder { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// If the ParserInfo has the IsSpecial tag or both volumes and chapters are default aka 0
|
||||
/// </summary>
|
||||
|
|
|
|||
100
API/Services/Tasks/Scanner/Parser/PdfParser.cs
Normal file
100
API/Services/Tasks/Scanner/Parser/PdfParser.cs
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
using System.IO;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
public class PdfParser(IDirectoryService directoryService) : DefaultParser(directoryService)
|
||||
{
|
||||
public override ParserInfo Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo comicInfo = null)
|
||||
{
|
||||
var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
|
||||
var ret = new ParserInfo
|
||||
{
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = filePath,
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo,
|
||||
Chapters = type == LibraryType.Comic
|
||||
? Parser.ParseComicChapter(fileName)
|
||||
: Parser.ParseChapter(fileName)
|
||||
};
|
||||
|
||||
ret.Series = type == LibraryType.Comic ? Parser.ParseComicSeries(fileName) : Parser.ParseSeries(fileName);
|
||||
ret.Volumes = type == LibraryType.Comic ? Parser.ParseComicVolume(fileName) : Parser.ParseVolume(fileName);
|
||||
|
||||
if (ret.Series == string.Empty)
|
||||
{
|
||||
// Try to parse information out of each folder all the way to rootPath
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
var edition = Parser.ParseEdition(fileName);
|
||||
if (!string.IsNullOrEmpty(edition))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(ret.Series.Replace(edition, string.Empty), type is LibraryType.Comic);
|
||||
ret.Edition = edition;
|
||||
}
|
||||
|
||||
var isSpecial = type == LibraryType.Comic ? Parser.IsComicSpecial(fileName) : Parser.IsMangaSpecial(fileName);
|
||||
// We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that
|
||||
// could cause a problem as Omake is a special term, but there is valid volume/chapter information.
|
||||
if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.LooseLeafVolume && isSpecial)
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
// NOTE: This can cause some complications, we should try to be a bit less aggressive to fallback to folder
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
// If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name
|
||||
if (Parser.HasSpecialMarker(fileName))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.SpecialIndex = Parser.ParseSpecialIndex(fileName);
|
||||
ret.Chapters = Parser.DefaultChapter;
|
||||
ret.Volumes = Parser.SpecialVolume;
|
||||
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.LooseLeafVolume && type == LibraryType.Book)
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.Chapters = Parser.DefaultChapter;
|
||||
ret.Volumes = Parser.SpecialVolume;
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(ret.Series))
|
||||
{
|
||||
ret.Series = Parser.CleanTitle(fileName, type is LibraryType.Comic);
|
||||
}
|
||||
|
||||
// Pdfs may have .pdf in the series name, remove that
|
||||
if (Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
|
||||
{
|
||||
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
|
||||
}
|
||||
|
||||
// v0.8.x: Introducing a change where Specials will go in a separate Volume with a reserved number
|
||||
if (ret.IsSpecial)
|
||||
{
|
||||
ret.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
}
|
||||
|
||||
return string.IsNullOrEmpty(ret.Series) ? null : ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Only applicable for PDF files
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
public override bool IsApplicable(string filePath, LibraryType type)
|
||||
{
|
||||
return Parser.IsPdf(filePath);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,13 +1,11 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
|
|
@ -31,15 +29,9 @@ public interface IProcessSeries
|
|||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task Prime();
|
||||
Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false);
|
||||
void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false);
|
||||
|
||||
// These exists only for Unit testing
|
||||
void UpdateSeriesMetadata(Series series, Library library);
|
||||
void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos, bool forceUpdate = false);
|
||||
void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos, bool forceUpdate = false);
|
||||
void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info, bool forceUpdate = false);
|
||||
void UpdateChapterFromComicInfo(Chapter chapter, ComicInfo? comicInfo, bool forceUpdate = false);
|
||||
void Reset();
|
||||
Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -59,16 +51,14 @@ public class ProcessSeries : IProcessSeries
|
|||
private readonly ICollectionTagService _collectionTagService;
|
||||
private readonly IReadingListService _readingListService;
|
||||
private readonly IExternalMetadataService _externalMetadataService;
|
||||
private readonly ITagManagerService _tagManagerService;
|
||||
|
||||
private Dictionary<string, Genre> _genres;
|
||||
private IList<Person> _people;
|
||||
private Dictionary<string, Tag> _tags;
|
||||
private Dictionary<string, CollectionTag> _collectionTags;
|
||||
|
||||
public ProcessSeries(IUnitOfWork unitOfWork, ILogger<ProcessSeries> logger, IEventHub eventHub,
|
||||
IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService,
|
||||
IFileService fileService, IMetadataService metadataService, IWordCountAnalyzerService wordCountAnalyzerService,
|
||||
ICollectionTagService collectionTagService, IReadingListService readingListService, IExternalMetadataService externalMetadataService)
|
||||
ICollectionTagService collectionTagService, IReadingListService readingListService,
|
||||
IExternalMetadataService externalMetadataService, ITagManagerService tagManagerService)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_logger = logger;
|
||||
|
|
@ -82,12 +72,7 @@ public class ProcessSeries : IProcessSeries
|
|||
_collectionTagService = collectionTagService;
|
||||
_readingListService = readingListService;
|
||||
_externalMetadataService = externalMetadataService;
|
||||
|
||||
|
||||
_genres = new Dictionary<string, Genre>();
|
||||
_people = new List<Person>();
|
||||
_tags = new Dictionary<string, Tag>();
|
||||
_collectionTags = new Dictionary<string, CollectionTag>();
|
||||
_tagManagerService = tagManagerService;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
@ -95,12 +80,22 @@ public class ProcessSeries : IProcessSeries
|
|||
/// </summary>
|
||||
public async Task Prime()
|
||||
{
|
||||
_genres = (await _unitOfWork.GenreRepository.GetAllGenresAsync()).ToDictionary(t => t.NormalizedTitle);
|
||||
_people = await _unitOfWork.PersonRepository.GetAllPeople();
|
||||
_tags = (await _unitOfWork.TagRepository.GetAllTagsAsync()).ToDictionary(t => t.NormalizedTitle);
|
||||
_collectionTags = (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync(CollectionTagIncludes.SeriesMetadata))
|
||||
.ToDictionary(t => t.NormalizedTitle);
|
||||
try
|
||||
{
|
||||
await _tagManagerService.Prime();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "Unable to prime tag manager. Scan cannot proceed. Report to Kavita dev");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Frees up memory
|
||||
/// </summary>
|
||||
public void Reset()
|
||||
{
|
||||
_tagManagerService.Reset();
|
||||
}
|
||||
|
||||
public async Task ProcessSeriesAsync(IList<ParserInfo> parsedInfos, Library library, bool forceUpdate = false)
|
||||
|
|
@ -112,42 +107,22 @@ public class ProcessSeries : IProcessSeries
|
|||
var seriesName = parsedInfos[0].Series;
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Updated, seriesName));
|
||||
_logger.LogInformation("[ScannerService] Beginning series update on {SeriesName}", seriesName);
|
||||
_logger.LogInformation("[ScannerService] Beginning series update on {SeriesName}, Forced: {ForceUpdate}", seriesName, forceUpdate);
|
||||
|
||||
// Check if there is a Series
|
||||
var firstInfo = parsedInfos[0];
|
||||
Series? series;
|
||||
try
|
||||
{
|
||||
// There is an opportunity to allow duplicate series here. Like if One is in root/marvel/batman and another is root/dc/batman
|
||||
// by changing to a ToList() and if multiple, doing a firstInfo.FirstFolder/RootFolder type check
|
||||
series =
|
||||
await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(firstInfo.Series, firstInfo.LocalizedSeries,
|
||||
library.Id, firstInfo.Format);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format);
|
||||
|
||||
seriesCollisions = seriesCollisions.Where(collision =>
|
||||
collision.Name != firstInfo.Series || collision.LocalizedName != firstInfo.LocalizedSeries).ToList();
|
||||
|
||||
if (seriesCollisions.Count > 1)
|
||||
{
|
||||
var firstCollision = seriesCollisions[0];
|
||||
var secondCollision = seriesCollisions[1];
|
||||
|
||||
var tableRows = $"<tr><td>Name: {firstCollision.Name}</td><td>Name: {secondCollision.Name}</td></tr>" +
|
||||
$"<tr><td>Localized: {firstCollision.LocalizedName}</td><td>Localized: {secondCollision.LocalizedName}</td></tr>" +
|
||||
$"<tr><td>Filename: {Parser.Parser.NormalizePath(firstCollision.FolderPath)}</td><td>Filename: {Parser.Parser.NormalizePath(secondCollision.FolderPath)}</td></tr>";
|
||||
|
||||
var htmlTable = $"<table class='table table-striped'><thead><tr><th>Series 1</th><th>Series 2</th></tr></thead><tbody>{string.Join(string.Empty, tableRows)}</tbody></table>";
|
||||
|
||||
_logger.LogError(ex, "Scanner found a Series {SeriesName} which matched another Series {LocalizedName} in a different folder parallel to Library {LibraryName} root folder. This is not allowed. Please correct",
|
||||
firstInfo.Series, firstInfo.LocalizedSeries, library.Name);
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
||||
MessageFactory.ErrorEvent($"Library {library.Name} Series collision on {firstInfo.Series}",
|
||||
htmlTable));
|
||||
}
|
||||
await ReportDuplicateSeriesLookup(library, firstInfo, ex);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -169,7 +144,7 @@ public class ProcessSeries : IProcessSeries
|
|||
// parsedInfos[0] is not the first volume or chapter. We need to find it using a ComicInfo check (as it uses firstParsedInfo for series sort)
|
||||
var firstParsedInfo = parsedInfos.FirstOrDefault(p => p.ComicInfo != null, firstInfo);
|
||||
|
||||
UpdateVolumes(series, parsedInfos, forceUpdate);
|
||||
await UpdateVolumes(series, parsedInfos, forceUpdate);
|
||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||
|
||||
series.NormalizedName = series.Name.ToNormalized();
|
||||
|
|
@ -200,7 +175,7 @@ public class ProcessSeries : IProcessSeries
|
|||
series.NormalizedLocalizedName = series.LocalizedName.ToNormalized();
|
||||
}
|
||||
|
||||
UpdateSeriesMetadata(series, library);
|
||||
await UpdateSeriesMetadata(series, library);
|
||||
|
||||
// Update series FolderPath here
|
||||
await UpdateSeriesFolderPath(parsedInfos, library, series);
|
||||
|
|
@ -219,14 +194,6 @@ public class ProcessSeries : IProcessSeries
|
|||
_logger.LogCritical(ex,
|
||||
"[ScannerService] There was an issue writing to the database for series {SeriesName}",
|
||||
series.Name);
|
||||
_logger.LogTrace("[ScannerService] Series Metadata Dump: {@Series}", series.Metadata);
|
||||
_logger.LogTrace("[ScannerService] People Dump: {@People}", _people
|
||||
.Select(p =>
|
||||
new {p.Id, p.Name, SeriesMetadataIds =
|
||||
p.SeriesMetadatas?.Select(m => m.Id),
|
||||
ChapterMetadataIds =
|
||||
p.ChapterMetadatas?.Select(m => m.Id)
|
||||
.ToList()}));
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
||||
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series.OriginalName}",
|
||||
|
|
@ -234,18 +201,25 @@ public class ProcessSeries : IProcessSeries
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
// Process reading list after commit as we need to commit per list
|
||||
await _readingListService.CreateReadingListsFromSeries(series, library);
|
||||
BackgroundJob.Enqueue(() => _readingListService.CreateReadingListsFromSeries(library.Id, series.Id));
|
||||
|
||||
if (seriesAdded)
|
||||
{
|
||||
// See if any recommendations can link up to the series and pre-fetch external metadata for the series
|
||||
_logger.LogInformation("Linking up External Recommendations new series (if applicable)");
|
||||
await _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type);
|
||||
await _unitOfWork.ExternalSeriesMetadataRepository.LinkRecommendationsToSeries(series);
|
||||
|
||||
BackgroundJob.Enqueue(() =>
|
||||
_externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
|
||||
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _unitOfWork.ExternalSeriesMetadataRepository.LinkRecommendationsToSeries(series);
|
||||
}
|
||||
|
||||
_logger.LogInformation("[ScannerService] Finished series update on {SeriesName} in {Milliseconds} ms", seriesName, scanWatch.ElapsedMilliseconds);
|
||||
}
|
||||
|
|
@ -253,18 +227,47 @@ public class ProcessSeries : IProcessSeries
|
|||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "[ScannerService] There was an exception updating series for {SeriesName}", series.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
|
||||
await _metadataService.GenerateCoversForSeries(series, settings.EncodeMediaAs, settings.CoverImageSize);
|
||||
EnqueuePostSeriesProcessTasks(series.LibraryId, series.Id);
|
||||
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
|
||||
}
|
||||
|
||||
private async Task ReportDuplicateSeriesLookup(Library library, ParserInfo firstInfo, Exception ex)
|
||||
{
|
||||
var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format);
|
||||
|
||||
seriesCollisions = seriesCollisions.Where(collision =>
|
||||
collision.Name != firstInfo.Series || collision.LocalizedName != firstInfo.LocalizedSeries).ToList();
|
||||
|
||||
if (seriesCollisions.Count > 1)
|
||||
{
|
||||
var firstCollision = seriesCollisions[0];
|
||||
var secondCollision = seriesCollisions[1];
|
||||
|
||||
var tableRows = $"<tr><td>Name: {firstCollision.Name}</td><td>Name: {secondCollision.Name}</td></tr>" +
|
||||
$"<tr><td>Localized: {firstCollision.LocalizedName}</td><td>Localized: {secondCollision.LocalizedName}</td></tr>" +
|
||||
$"<tr><td>Filename: {Parser.Parser.NormalizePath(firstCollision.FolderPath)}</td><td>Filename: {Parser.Parser.NormalizePath(secondCollision.FolderPath)}</td></tr>";
|
||||
|
||||
var htmlTable = $"<table class='table table-striped'><thead><tr><th>Series 1</th><th>Series 2</th></tr></thead><tbody>{string.Join(string.Empty, tableRows)}</tbody></table>";
|
||||
|
||||
_logger.LogError(ex, "Scanner found a Series {SeriesName} which matched another Series {LocalizedName} in a different folder parallel to Library {LibraryName} root folder. This is not allowed. Please correct",
|
||||
firstInfo.Series, firstInfo.LocalizedSeries, library.Name);
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.Error,
|
||||
MessageFactory.ErrorEvent($"Library {library.Name} Series collision on {firstInfo.Series}",
|
||||
htmlTable));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private async Task UpdateSeriesFolderPath(IEnumerable<ParserInfo> parsedInfos, Library library, Series series)
|
||||
{
|
||||
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(l => l.Path),
|
||||
parsedInfos.Select(f => f.FullFilePath).ToList());
|
||||
var libraryFolders = library.Folders.Select(l => Parser.Parser.NormalizePath(l.Path)).ToList();
|
||||
var seriesFiles = parsedInfos.Select(f => Parser.Parser.NormalizePath(f.FullFilePath)).ToList();
|
||||
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(libraryFolders, seriesFiles);
|
||||
if (seriesDirs.Keys.Count == 0)
|
||||
{
|
||||
_logger.LogCritical(
|
||||
|
|
@ -278,18 +281,23 @@ public class ProcessSeries : IProcessSeries
|
|||
// Don't save FolderPath if it's a library Folder
|
||||
if (!library.Folders.Select(f => f.Path).Contains(seriesDirs.Keys.First()))
|
||||
{
|
||||
// BUG: FolderPath can be a level higher than it needs to be. I'm not sure why it's like this, but I thought it should be one level lower.
|
||||
// I think it's like this because higher level is checked or not checked. But i think we can do both
|
||||
series.FolderPath = Parser.Parser.NormalizePath(seriesDirs.Keys.First());
|
||||
_logger.LogDebug("Updating {Series} FolderPath to {FolderPath}", series.Name, series.FolderPath);
|
||||
}
|
||||
}
|
||||
|
||||
var lowestFolder = _directoryService.FindLowestDirectoriesFromFiles(libraryFolders, seriesFiles);
|
||||
if (!string.IsNullOrEmpty(lowestFolder))
|
||||
{
|
||||
series.LowestFolderPath = lowestFolder;
|
||||
_logger.LogDebug("Updating {Series} LowestFolderPath to {FolderPath}", series.Name, series.LowestFolderPath);
|
||||
}
|
||||
}
|
||||
|
||||
public void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
{
|
||||
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(libraryId, seriesId, forceUpdate));
|
||||
}
|
||||
|
||||
public void UpdateSeriesMetadata(Series series, Library library)
|
||||
private async Task UpdateSeriesMetadata(Series series, Library library)
|
||||
{
|
||||
series.Metadata ??= new SeriesMetadataBuilder().Build();
|
||||
var firstChapter = SeriesService.GetFirstChapterForMetadata(series);
|
||||
|
|
@ -314,8 +322,8 @@ public class ProcessSeries : IProcessSeries
|
|||
// The actual number of count's defined across all chapter's metadata
|
||||
series.Metadata.MaxCount = chapters.Max(chapter => chapter.Count);
|
||||
|
||||
var maxVolume = series.Volumes.Max(v => (int) Parser.Parser.MaxNumberFromRange(v.Name));
|
||||
var maxChapter = chapters.Max(c => (int) Parser.Parser.MaxNumberFromRange(c.Range));
|
||||
var maxVolume = (int) series.Volumes.Max(v => v.MaxNumber);
|
||||
var maxChapter = (int) chapters.Max(c => c.MaxNumber);
|
||||
|
||||
// Single books usually don't have a number in their Range (filename)
|
||||
if (series.Format == MangaFormat.Epub || series.Format == MangaFormat.Pdf && chapters.Count == 1)
|
||||
|
|
@ -363,14 +371,9 @@ public class ProcessSeries : IProcessSeries
|
|||
_logger.LogDebug("Collection tag(s) found for {SeriesName}, updating collections", series.Name);
|
||||
foreach (var collection in firstChapter.SeriesGroup.Split(',', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
var normalizedName = Parser.Parser.Normalize(collection);
|
||||
if (!_collectionTags.TryGetValue(normalizedName, out var tag))
|
||||
{
|
||||
tag = _collectionTagService.CreateTag(collection);
|
||||
_collectionTags.Add(normalizedName, tag);
|
||||
}
|
||||
|
||||
_collectionTagService.AddTagToSeriesMetadata(tag, series.Metadata);
|
||||
var t = await _tagManagerService.GetCollectionTag(collection);
|
||||
if (t == null) continue;
|
||||
_collectionTagService.AddTagToSeriesMetadata(t, series.Metadata);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -445,6 +448,30 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
}
|
||||
|
||||
if (!series.Metadata.ImprintLocked)
|
||||
{
|
||||
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Imprint))
|
||||
{
|
||||
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
||||
}
|
||||
}
|
||||
|
||||
if (!series.Metadata.TeamLocked)
|
||||
{
|
||||
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Team))
|
||||
{
|
||||
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
||||
}
|
||||
}
|
||||
|
||||
if (!series.Metadata.LocationLocked)
|
||||
{
|
||||
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Location))
|
||||
{
|
||||
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
|
||||
}
|
||||
}
|
||||
|
||||
if (!series.Metadata.LettererLocked)
|
||||
{
|
||||
foreach (var person in chapter.People.Where(p => p.Role == PersonRole.Letterer))
|
||||
|
|
@ -502,6 +529,9 @@ public class ProcessSeries : IProcessSeries
|
|||
case PersonRole.Inker:
|
||||
if (!series.Metadata.InkerLocked) series.Metadata.People.Remove(person);
|
||||
break;
|
||||
case PersonRole.Imprint:
|
||||
if (!series.Metadata.ImprintLocked) series.Metadata.People.Remove(person);
|
||||
break;
|
||||
case PersonRole.Colorist:
|
||||
if (!series.Metadata.ColoristLocked) series.Metadata.People.Remove(person);
|
||||
break;
|
||||
|
|
@ -534,7 +564,7 @@ public class ProcessSeries : IProcessSeries
|
|||
|
||||
}
|
||||
|
||||
public void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
||||
private async Task UpdateVolumes(Series series, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
||||
{
|
||||
// Add new volumes and update chapters per volume
|
||||
var distinctVolumes = parsedInfos.DistinctVolumes();
|
||||
|
|
@ -544,10 +574,12 @@ public class ProcessSeries : IProcessSeries
|
|||
Volume? volume;
|
||||
try
|
||||
{
|
||||
volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
|
||||
// With the Name change to be formatted, Name no longer working because Name returns "1" and volumeNumber is "1.0", so we use LookupName as the original
|
||||
volume = series.Volumes.SingleOrDefault(s => s.LookupName == volumeNumber);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// TODO: Push this to UI in some way
|
||||
if (!ex.Message.Equals("Sequence contains more than one matching element")) throw;
|
||||
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||
throw new KavitaException(
|
||||
|
|
@ -561,7 +593,8 @@ public class ProcessSeries : IProcessSeries
|
|||
series.Volumes.Add(volume);
|
||||
}
|
||||
|
||||
volume.Name = volumeNumber;
|
||||
volume.LookupName = volumeNumber;
|
||||
volume.Name = volume.GetNumberTitle();
|
||||
|
||||
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
||||
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
|
||||
|
|
@ -576,7 +609,7 @@ public class ProcessSeries : IProcessSeries
|
|||
try
|
||||
{
|
||||
var firstChapterInfo = infos.SingleOrDefault(i => i.FullFilePath.Equals(firstFile.FilePath));
|
||||
UpdateChapterFromComicInfo(chapter, firstChapterInfo?.ComicInfo, forceUpdate);
|
||||
await UpdateChapterFromComicInfo(chapter, firstChapterInfo?.ComicInfo, forceUpdate);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
|
@ -586,7 +619,9 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
|
||||
// Remove existing volumes that aren't in parsedInfos
|
||||
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
|
||||
var nonDeletedVolumes = series.Volumes
|
||||
.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.LookupName))
|
||||
.ToList();
|
||||
if (series.Volumes.Count != nonDeletedVolumes.Count)
|
||||
{
|
||||
_logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
|
||||
|
|
@ -597,8 +632,9 @@ public class ProcessSeries : IProcessSeries
|
|||
var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? string.Empty;
|
||||
if (!string.IsNullOrEmpty(file) && _directoryService.FileSystem.File.Exists(file))
|
||||
{
|
||||
// This can happen when file is renamed and volume is removed
|
||||
_logger.LogInformation(
|
||||
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
|
||||
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk (usually volume marker removed) File: {File}",
|
||||
file);
|
||||
}
|
||||
|
||||
|
|
@ -609,7 +645,7 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
}
|
||||
|
||||
public void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
||||
private void UpdateChapters(Series series, Volume volume, IList<ParserInfo> parsedInfos, bool forceUpdate = false)
|
||||
{
|
||||
// Add new chapters
|
||||
foreach (var info in parsedInfos)
|
||||
|
|
@ -640,12 +676,19 @@ public class ProcessSeries : IProcessSeries
|
|||
chapter.UpdateFrom(info);
|
||||
}
|
||||
|
||||
if (chapter == null) continue;
|
||||
if (chapter == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Add files
|
||||
var specialTreatment = info.IsSpecialInfo();
|
||||
AddOrUpdateFileForChapter(chapter, info, forceUpdate);
|
||||
|
||||
// TODO: Investigate using the ChapterBuilder here
|
||||
chapter.Number = Parser.Parser.MinNumberFromRange(info.Chapters).ToString(CultureInfo.InvariantCulture);
|
||||
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
||||
chapter.MinNumber = Parser.Parser.MinNumberFromRange(info.Chapters);
|
||||
chapter.MaxNumber = Parser.Parser.MaxNumberFromRange(info.Chapters);
|
||||
chapter.SortOrder = info.IssueOrder;
|
||||
chapter.Range = chapter.GetNumberTitle();
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -669,7 +712,7 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
}
|
||||
|
||||
public void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info, bool forceUpdate = false)
|
||||
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info, bool forceUpdate = false)
|
||||
{
|
||||
chapter.Files ??= new List<MangaFile>();
|
||||
var existingFile = chapter.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
|
||||
|
|
@ -680,6 +723,7 @@ public class ProcessSeries : IProcessSeries
|
|||
if (!forceUpdate && !_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
|
||||
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
|
||||
existingFile.Extension = fileInfo.Extension.ToLowerInvariant();
|
||||
existingFile.FileName = Parser.Parser.RemoveExtensionIfSupported(existingFile.FilePath);
|
||||
existingFile.Bytes = fileInfo.Length;
|
||||
// We skip updating DB here with last modified time so that metadata refresh can do it
|
||||
}
|
||||
|
|
@ -694,7 +738,7 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
}
|
||||
|
||||
public void UpdateChapterFromComicInfo(Chapter chapter, ComicInfo? comicInfo, bool forceUpdate = false)
|
||||
private async Task UpdateChapterFromComicInfo(Chapter chapter, ComicInfo? comicInfo, bool forceUpdate = false)
|
||||
{
|
||||
if (comicInfo == null) return;
|
||||
var firstFile = chapter.Files.MinBy(x => x.Chapter);
|
||||
|
|
@ -753,9 +797,7 @@ public class ProcessSeries : IProcessSeries
|
|||
if (!string.IsNullOrEmpty(comicInfo.Web))
|
||||
{
|
||||
chapter.WebLinks = string.Join(",", comicInfo.Web
|
||||
.Split(",")
|
||||
.Where(s => !string.IsNullOrEmpty(s))
|
||||
.Select(s => s.Trim())
|
||||
.Split(",", StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
|
||||
);
|
||||
|
||||
// For each weblink, try to parse out some MetadataIds and store in the Chapter directly for matching (CBL)
|
||||
|
|
@ -774,21 +816,6 @@ public class ProcessSeries : IProcessSeries
|
|||
// This needs to check against both Number and Volume to calculate Count
|
||||
chapter.Count = comicInfo.CalculatedCount();
|
||||
|
||||
void AddPerson(Person person)
|
||||
{
|
||||
PersonHelper.AddPersonIfNotExists(chapter.People, person);
|
||||
}
|
||||
|
||||
void AddGenre(Genre genre, bool newTag)
|
||||
{
|
||||
chapter.Genres.Add(genre);
|
||||
}
|
||||
|
||||
void AddTag(Tag tag, bool added)
|
||||
{
|
||||
chapter.Tags.Add(tag);
|
||||
}
|
||||
|
||||
|
||||
if (comicInfo.Year > 0)
|
||||
{
|
||||
|
|
@ -797,148 +824,87 @@ public class ProcessSeries : IProcessSeries
|
|||
chapter.ReleaseDate = new DateTime(comicInfo.Year, month, day);
|
||||
}
|
||||
|
||||
var people = GetTagValues(comicInfo.Colorist);
|
||||
var people = TagHelper.GetTagValues(comicInfo.Colorist);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Colorist);
|
||||
UpdatePeople(people, PersonRole.Colorist, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Colorist);
|
||||
|
||||
people = GetTagValues(comicInfo.Characters);
|
||||
people = TagHelper.GetTagValues(comicInfo.Characters);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Character);
|
||||
UpdatePeople(people, PersonRole.Character, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Character);
|
||||
|
||||
|
||||
people = GetTagValues(comicInfo.Translator);
|
||||
people = TagHelper.GetTagValues(comicInfo.Translator);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Translator);
|
||||
UpdatePeople(people, PersonRole.Translator, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Translator);
|
||||
|
||||
|
||||
people = GetTagValues(comicInfo.Writer);
|
||||
people = TagHelper.GetTagValues(comicInfo.Writer);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Writer);
|
||||
UpdatePeople(people, PersonRole.Writer, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Writer);
|
||||
|
||||
people = GetTagValues(comicInfo.Editor);
|
||||
people = TagHelper.GetTagValues(comicInfo.Editor);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Editor);
|
||||
UpdatePeople(people, PersonRole.Editor, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Editor);
|
||||
|
||||
people = GetTagValues(comicInfo.Inker);
|
||||
people = TagHelper.GetTagValues(comicInfo.Inker);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Inker);
|
||||
UpdatePeople(people, PersonRole.Inker, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Inker);
|
||||
|
||||
people = GetTagValues(comicInfo.Letterer);
|
||||
people = TagHelper.GetTagValues(comicInfo.Letterer);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Letterer);
|
||||
UpdatePeople(people, PersonRole.Letterer, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Letterer);
|
||||
|
||||
people = GetTagValues(comicInfo.Penciller);
|
||||
people = TagHelper.GetTagValues(comicInfo.Penciller);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Penciller);
|
||||
UpdatePeople(people, PersonRole.Penciller, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Penciller);
|
||||
|
||||
people = GetTagValues(comicInfo.CoverArtist);
|
||||
people = TagHelper.GetTagValues(comicInfo.CoverArtist);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.CoverArtist);
|
||||
UpdatePeople(people, PersonRole.CoverArtist, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.CoverArtist);
|
||||
|
||||
people = GetTagValues(comicInfo.Publisher);
|
||||
people = TagHelper.GetTagValues(comicInfo.Publisher);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Publisher);
|
||||
UpdatePeople(people, PersonRole.Publisher, AddPerson);
|
||||
await UpdatePeople(chapter, people, PersonRole.Publisher);
|
||||
|
||||
var genres = GetTagValues(comicInfo.Genre);
|
||||
people = TagHelper.GetTagValues(comicInfo.Imprint);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Imprint);
|
||||
await UpdatePeople(chapter, people, PersonRole.Imprint);
|
||||
|
||||
people = TagHelper.GetTagValues(comicInfo.Teams);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Team);
|
||||
await UpdatePeople(chapter, people, PersonRole.Team);
|
||||
|
||||
people = TagHelper.GetTagValues(comicInfo.Locations);
|
||||
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Location);
|
||||
await UpdatePeople(chapter, people, PersonRole.Location);
|
||||
|
||||
var genres = TagHelper.GetTagValues(comicInfo.Genre);
|
||||
GenreHelper.KeepOnlySameGenreBetweenLists(chapter.Genres,
|
||||
genres.Select(g => new GenreBuilder(g).Build()).ToList());
|
||||
UpdateGenre(genres, AddGenre);
|
||||
foreach (var genre in genres)
|
||||
{
|
||||
var g = await _tagManagerService.GetGenre(genre);
|
||||
if (g == null) continue;
|
||||
chapter.Genres.Add(g);
|
||||
}
|
||||
|
||||
var tags = GetTagValues(comicInfo.Tags);
|
||||
var tags = TagHelper.GetTagValues(comicInfo.Tags);
|
||||
TagHelper.KeepOnlySameTagBetweenLists(chapter.Tags, tags.Select(t => new TagBuilder(t).Build()).ToList());
|
||||
UpdateTag(tags, AddTag);
|
||||
}
|
||||
|
||||
private static IList<string> GetTagValues(string comicInfoTagSeparatedByComma)
|
||||
{
|
||||
// TODO: Move this to an extension and test it
|
||||
if (string.IsNullOrEmpty(comicInfoTagSeparatedByComma))
|
||||
foreach (var tag in tags)
|
||||
{
|
||||
return ImmutableList<string>.Empty;
|
||||
}
|
||||
|
||||
return comicInfoTagSeparatedByComma.Split(",")
|
||||
.Select(s => s.Trim())
|
||||
.DistinctBy(Parser.Parser.Normalize)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Given a list of all existing people, this will check the new names and roles and if it doesn't exist in allPeople, will create and
|
||||
/// add an entry. For each person in name, the callback will be executed.
|
||||
/// </summary>
|
||||
/// <remarks>This does not remove people if an empty list is passed into names</remarks>
|
||||
/// <remarks>This is used to add new people to a list without worrying about duplicating rows in the DB</remarks>
|
||||
/// <param name="names"></param>
|
||||
/// <param name="role"></param>
|
||||
/// <param name="action"></param>
|
||||
private void UpdatePeople(IEnumerable<string> names, PersonRole role, Action<Person> action)
|
||||
{
|
||||
var allPeopleTypeRole = _people.Where(p => p.Role == role).ToList();
|
||||
|
||||
foreach (var name in names)
|
||||
{
|
||||
var normalizedName = name.ToNormalized();
|
||||
var person = allPeopleTypeRole.Find(p =>
|
||||
p.NormalizedName != null && p.NormalizedName.Equals(normalizedName));
|
||||
|
||||
if (person == null)
|
||||
{
|
||||
person = new PersonBuilder(name, role).Build();
|
||||
_people.Add(person);
|
||||
}
|
||||
action(person);
|
||||
var t = await _tagManagerService.GetTag(tag);
|
||||
if (t == null) continue;
|
||||
chapter.Tags.Add(t);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="names"></param>
|
||||
/// <param name="action">Executes for each tag</param>
|
||||
private void UpdateGenre(IEnumerable<string> names, Action<Genre, bool> action)
|
||||
private async Task UpdatePeople(Chapter chapter, IList<string> people, PersonRole role)
|
||||
{
|
||||
foreach (var name in names)
|
||||
foreach (var person in people)
|
||||
{
|
||||
var normalizedName = name.ToNormalized();
|
||||
if (string.IsNullOrEmpty(normalizedName)) continue;
|
||||
|
||||
_genres.TryGetValue(normalizedName, out var genre);
|
||||
var newTag = genre == null;
|
||||
if (newTag)
|
||||
{
|
||||
genre = new GenreBuilder(name).Build();
|
||||
_genres.Add(normalizedName, genre);
|
||||
_unitOfWork.GenreRepository.Attach(genre);
|
||||
}
|
||||
|
||||
action(genre!, newTag);
|
||||
var p = await _tagManagerService.GetPerson(person, role);
|
||||
if (p == null) continue;
|
||||
chapter.People.Add(p);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="names"></param>
|
||||
/// <param name="action">Callback for every item. Will give said item back and a bool if item was added</param>
|
||||
private void UpdateTag(IEnumerable<string> names, Action<Tag, bool> action)
|
||||
{
|
||||
foreach (var name in names)
|
||||
{
|
||||
if (string.IsNullOrEmpty(name.Trim())) continue;
|
||||
|
||||
var normalizedName = name.ToNormalized();
|
||||
_tags.TryGetValue(normalizedName, out var tag);
|
||||
|
||||
var added = tag == null;
|
||||
if (tag == null)
|
||||
{
|
||||
tag = new TagBuilder(name).Build();
|
||||
_tags.Add(normalizedName, tag);
|
||||
}
|
||||
|
||||
action(tag, added);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue