Local Metadata Integration Part 1 (#817)

* Started with some basic plumbing with comic info parsing updating Series/Volume.

* We can now get chapter title from comicInfo.xml

* Hooked in the ability to store people into the chapter metadata.

* Removed no longer used imports, fixed up some foreign key constraints on deleting series with person linked.

* Refactored Summary out of the UI for Series into SeriesMetadata. Updated application to .net 6. There is a bug in metadata code for updating.

* Removed the parallel.ForEach with a normal foreach which lets us use async. For I/O heavy code, shouldn't change much.

* Refactored scan code to only check extensions with comic info, fixed a bug on scan events not using correct method name, removed summary field (still buggy)

* Fixed a bug where on cancelling a metadata request in modal, underlying button would get stuck in a disabled state.

* Changed how metadata selects the first volume to read summary info from. It will now select the first non-special volume rather than Volume 1.

* More debugging and found more bugs to fix

* Redid all the migrations as one single one. Fixed a bug with GetChapterInfo returning null when ChapterMetadata didn't exist for that Chapter.

Fixed an issue with mapper failing on GetChapterMetadata. Started work on adding people and a design for people.

* Fixed a bug where checking if file modified now takes into account if file has been processed at least once. Introduced a bug in saving people to series.

* Just made code compilable again

* Fixed up code. Now people for series and chapters add correctly without any db issues.

* Things are working, but I'm not happy with how the management of Person is. I need to take into account that 1 person needs to map to an image and role is arbitrary.

* Started adding UI code to showcase chapter metadata

* Updated workflow to be .NET 6

* WIP of updating card detail to show the information more clearly and without so many if statements

* Removed ChatperMetadata and store on the Chapter itself. Much easier to use and less joins.

* Implemented Genre on SeriesMetadata level

* Genres and People are now removed from Series level if they are no longer on comicInfo

* PeopleHelper is done with unit tests. Everything is working.

* Unit tests in place for Genre Helper

* Starting on CacheHelper

* Finished tests for ShouldUpdateCoverImage. Fixed and added tests in ArchiveService/ScannerService.

* CacheHelper is fully tested

* Some DI cleanup

* Scanner Service now calls GetComicInfo for books. Added ability to update Series Sort name from metadata files (mainly epub as comicinfo doesn't have a field)

* Forgot to move a line of code

* SortName now populates from metadata (epub only, ComicInfo has no tags)

* Cards now show the chapter title name if it's set on hover, else will default back to title.

* Fixed a major issue with how MangaFiles were being updated with LastModified, which messed up our logic for avoiding refreshes.

* Woohoo, more tests and some refactors to be able to test more services wtih mock filesystem. Fixed an issue where SortName was getting set as first chapter, but the Series was in a group.

* Refactored the MangaFile creation code into the DbFactory where we also setup the first LastModified update.

* Has file changed bug is now finally fixed

* Remove dead genres, refactor genre to use title instead of name.

* Refactored out a directory from ShouldUpdateCoverImage() to keep the code clean

* Unit tests for ComicInfo on BookService.

* Refactored series detail into it's own component

* Series-detail now received refresh metadata events to refresh what's on screen

* Removed references to Artist on PersonRole as it has no metadata mapping

* Security audit

* Fixed a benchmark

* Updated JWT Token generator to use new methods in .NET 6

* Updated all the docker and build commands to use net6.0

* Commented out sonar scan since it's not setup for net6.0 yet.
This commit is contained in:
Joseph Milazzo 2021-12-02 11:02:34 -06:00 committed by GitHub
parent 10a6a3a544
commit e7619e6b0a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
140 changed files with 9315 additions and 1545 deletions

View file

@ -322,7 +322,12 @@ namespace API.Services
return null;
}
public ComicInfo GetComicInfo(string archivePath)
/// <summary>
/// This can be null if nothing is found or any errors occur during access
/// </summary>
/// <param name="archivePath"></param>
/// <returns></returns>
public ComicInfo? GetComicInfo(string archivePath)
{
if (!IsValidArchive(archivePath)) return null;
@ -336,7 +341,7 @@ namespace API.Services
case ArchiveLibrary.Default:
{
using var archive = ZipFile.OpenRead(archivePath);
var entry = archive.Entries.SingleOrDefault(x =>
var entry = archive.Entries.FirstOrDefault(x =>
!Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
&& Path.GetFileNameWithoutExtension(x.Name)?.ToLower() == ComicInfoFilename
&& !Path.GetFileNameWithoutExtension(x.Name)
@ -346,7 +351,18 @@ namespace API.Services
{
using var stream = entry.Open();
var serializer = new XmlSerializer(typeof(ComicInfo));
return (ComicInfo) serializer.Deserialize(stream);
var info = (ComicInfo) serializer.Deserialize(stream);
if (info != null)
{
info.Writer = Parser.Parser.CleanAuthor(info.Writer);
info.Colorist = Parser.Parser.CleanAuthor(info.Colorist);
info.Editor = Parser.Parser.CleanAuthor(info.Editor);
info.Inker = Parser.Parser.CleanAuthor(info.Inker);
info.Letterer = Parser.Parser.CleanAuthor(info.Letterer);
info.Penciller = Parser.Parser.CleanAuthor(info.Penciller);
info.Publisher = Parser.Parser.CleanAuthor(info.Publisher);
}
return info;
}
break;
@ -354,7 +370,7 @@ namespace API.Services
case ArchiveLibrary.SharpCompress:
{
using var archive = ArchiveFactory.Open(archivePath);
return FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
var info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser
.HasBlacklistedFolderInPath(
Path.GetDirectoryName(
@ -365,6 +381,18 @@ namespace API.Services
.Parser
.MacOsMetadataFileStartsWith)
&& Parser.Parser.IsXml(entry.Key)));
if (info != null)
{
info.Writer = Parser.Parser.CleanAuthor(info.Writer);
info.Colorist = Parser.Parser.CleanAuthor(info.Colorist);
info.Editor = Parser.Parser.CleanAuthor(info.Editor);
info.Inker = Parser.Parser.CleanAuthor(info.Inker);
info.Letterer = Parser.Parser.CleanAuthor(info.Letterer);
info.Penciller = Parser.Parser.CleanAuthor(info.Penciller);
info.Publisher = Parser.Parser.CleanAuthor(info.Publisher);
}
return info;
}
case ArchiveLibrary.NotSupported:
_logger.LogWarning("[GetComicInfo] This archive cannot be read: {ArchivePath}", archivePath);

View file

@ -201,11 +201,15 @@ namespace API.Services
var info = new ComicInfo()
{
// TODO: Summary is in html, we need to turn it into string
Summary = epubBook.Schema.Package.Metadata.Description,
Writer = string.Join(",", epubBook.Schema.Package.Metadata.Creators),
Writer = string.Join(",", epubBook.Schema.Package.Metadata.Creators.Select(c => Parser.Parser.CleanAuthor(c.Creator))),
Publisher = string.Join(",", epubBook.Schema.Package.Metadata.Publishers),
Month = !string.IsNullOrEmpty(publicationDate) ? DateTime.Parse(publicationDate).Month : 0,
Year = !string.IsNullOrEmpty(publicationDate) ? DateTime.Parse(publicationDate).Year : 0,
Title = epubBook.Title,
Genre = string.Join(",", epubBook.Schema.Package.Metadata.Subjects.Select(s => s.ToLower().Trim())),
};
// Parse tags not exposed via Library
foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems)
@ -215,6 +219,9 @@ namespace API.Services
case "calibre:rating":
info.UserRating = float.Parse(metadataItem.Content);
break;
case "calibre:title_sort":
info.TitleSort = metadataItem.Content;
break;
}
}
@ -305,8 +312,6 @@ namespace API.Services
{
using var epubBook = EpubReader.OpenBook(filePath);
// If the epub has the following tags, we can group the books as Volumes
// <meta content="5.0" name="calibre:series_index"/>
// <meta content="The Dark Tower" name="calibre:series"/>
// <meta content="Wolves of the Calla" name="calibre:title_sort"/>
// If all three are present, we can take that over dc:title and format as:
@ -323,6 +328,7 @@ namespace API.Services
var series = string.Empty;
var specialName = string.Empty;
var groupPosition = string.Empty;
var titleSort = string.Empty;
foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems)
@ -338,6 +344,7 @@ namespace API.Services
break;
case "calibre:title_sort":
specialName = metadataItem.Content;
titleSort = metadataItem.Content;
break;
}
@ -363,18 +370,26 @@ namespace API.Services
{
specialName = epubBook.Title;
}
return new ParserInfo()
var info = new ParserInfo()
{
Chapters = Parser.Parser.DefaultChapter,
Edition = string.Empty,
Format = MangaFormat.Epub,
Filename = Path.GetFileName(filePath),
Title = specialName.Trim(),
Title = specialName?.Trim(),
FullFilePath = filePath,
IsSpecial = false,
Series = series.Trim(),
Volumes = seriesIndex
};
// Don't set titleSort if the book belongs to a group
if (!string.IsNullOrEmpty(titleSort) && string.IsNullOrEmpty(seriesIndex))
{
info.SeriesSort = titleSort;
}
return info;
}
}
catch (Exception)
@ -392,7 +407,7 @@ namespace API.Services
FullFilePath = filePath,
IsSpecial = false,
Series = epubBook.Title.Trim(),
Volumes = Parser.Parser.DefaultVolume
Volumes = Parser.Parser.DefaultVolume,
};
}
catch (Exception ex)
@ -494,6 +509,7 @@ namespace API.Services
private static void GetPdfPage(IDocReader docReader, int pageNumber, Stream stream)
{
// TODO: BUG: Most of this Bitmap code is only supported on Windows. Refactor.
using var pageReader = docReader.GetPageReader(pageNumber);
var rawBytes = pageReader.GetImage(new NaiveTransparencyRemover());
var width = pageReader.GetPageWidth();

View file

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.IO.Abstractions;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
@ -13,6 +14,8 @@ namespace API.Services
public class DirectoryService : IDirectoryService
{
private readonly ILogger<DirectoryService> _logger;
private readonly IFileSystem _fileSystem;
private static readonly Regex ExcludeDirectories = new Regex(
@"@eaDir|\.DS_Store",
RegexOptions.Compiled | RegexOptions.IgnoreCase);
@ -23,9 +26,10 @@ namespace API.Services
public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups");
public static readonly string ConfigDirectory = Path.Join(Directory.GetCurrentDirectory(), "config");
public DirectoryService(ILogger<DirectoryService> logger)
public DirectoryService(ILogger<DirectoryService> logger, IFileSystem fileSystem)
{
_logger = logger;
_logger = logger;
_fileSystem = fileSystem;
}
/// <summary>
@ -91,6 +95,11 @@ namespace API.Services
return paths;
}
/// <summary>
/// Does Directory Exist
/// </summary>
/// <param name="directory"></param>
/// <returns></returns>
public bool Exists(string directory)
{
var di = new DirectoryInfo(directory);
@ -365,7 +374,7 @@ namespace API.Services
/// <param name="searchPattern">Regex pattern to search against</param>
/// <param name="logger"></param>
/// <exception cref="ArgumentException"></exception>
public static int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern, ILogger logger)
public int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern, ILogger logger)
{
//Count of files traversed and timer for diagnostic output
var fileCount = 0;

View file

@ -0,0 +1,46 @@
using System;
using System.IO.Abstractions;
using API.Extensions;
namespace API.Services;
public interface IFileService
{
IFileSystem GetFileSystem();
bool HasFileBeenModifiedSince(string filePath, DateTime time);
bool Exists(string filePath);
}
public class FileService : IFileService
{
private readonly IFileSystem _fileSystem;
public FileService(IFileSystem fileSystem)
{
_fileSystem = fileSystem;
}
public FileService() : this(fileSystem: new FileSystem()) { }
public IFileSystem GetFileSystem()
{
return _fileSystem;
}
/// <summary>
/// If the File on disk's last modified time is after passed time
/// </summary>
/// <remarks>This has a resolution to the minute. Will ignore seconds and milliseconds</remarks>
/// <param name="filePath">Full qualified path of file</param>
/// <param name="time"></param>
/// <returns></returns>
public bool HasFileBeenModifiedSince(string filePath, DateTime time)
{
return !string.IsNullOrEmpty(filePath) && _fileSystem.File.GetLastWriteTime(filePath).Truncate(TimeSpan.TicksPerMinute) > time.Truncate(TimeSpan.TicksPerMinute);
}
public bool Exists(string filePath)
{
return _fileSystem.File.Exists(filePath);
}
}

View file

@ -5,8 +5,10 @@ using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.Data;
using API.Data.Metadata;
using API.Data.Repositories;
using API.Data.Scanner;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
@ -17,317 +19,499 @@ using API.SignalR;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
namespace API.Services
namespace API.Services;
public class MetadataService : IMetadataService
{
public class MetadataService : IMetadataService
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<MetadataService> _logger;
private readonly IArchiveService _archiveService;
private readonly IBookService _bookService;
private readonly IImageService _imageService;
private readonly IHubContext<MessageHub> _messageHub;
private readonly ICacheHelper _cacheHelper;
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger,
IArchiveService archiveService, IBookService bookService, IImageService imageService,
IHubContext<MessageHub> messageHub, ICacheHelper cacheHelper)
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<MetadataService> _logger;
private readonly IArchiveService _archiveService;
private readonly IBookService _bookService;
private readonly IImageService _imageService;
private readonly IHubContext<MessageHub> _messageHub;
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_bookService = bookService;
_imageService = imageService;
_messageHub = messageHub;
_cacheHelper = cacheHelper;
}
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger,
IArchiveService archiveService, IBookService bookService, IImageService imageService, IHubContext<MessageHub> messageHub)
/// <summary>
/// Gets the cover image for the file
/// </summary>
/// <remarks>Has side effect of marking the file as updated</remarks>
/// <param name="file"></param>
/// <param name="volumeId"></param>
/// <param name="chapterId"></param>
/// <returns></returns>
private string GetCoverImage(MangaFile file, int volumeId, int chapterId)
{
//file.UpdateLastModified();
switch (file.Format)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_bookService = bookService;
_imageService = imageService;
_messageHub = messageHub;
case MangaFormat.Pdf:
case MangaFormat.Epub:
return _bookService.GetCoverImage(file.FilePath, ImageService.GetChapterFormat(chapterId, volumeId));
case MangaFormat.Image:
var coverImage = _imageService.GetCoverFile(file);
return _imageService.GetCoverImage(coverImage, ImageService.GetChapterFormat(chapterId, volumeId));
case MangaFormat.Archive:
return _archiveService.GetCoverImage(file.FilePath, ImageService.GetChapterFormat(chapterId, volumeId));
case MangaFormat.Unknown:
default:
return string.Empty;
}
/// <summary>
/// Determines whether an entity should regenerate cover image.
/// </summary>
/// <remarks>If a cover image is locked but the underlying file has been deleted, this will allow regenerating. </remarks>
/// <param name="coverImage"></param>
/// <param name="firstFile"></param>
/// <param name="forceUpdate"></param>
/// <param name="isCoverLocked"></param>
/// <param name="coverImageDirectory">Directory where cover images are. Defaults to <see cref="DirectoryService.CoverImageDirectory"/></param>
/// <returns></returns>
public static bool ShouldUpdateCoverImage(string coverImage, MangaFile firstFile, bool forceUpdate = false,
bool isCoverLocked = false, string coverImageDirectory = null)
{
if (string.IsNullOrEmpty(coverImageDirectory))
{
coverImageDirectory = DirectoryService.CoverImageDirectory;
}
}
var fileExists = File.Exists(Path.Join(coverImageDirectory, coverImage));
if (isCoverLocked && fileExists) return false;
if (forceUpdate) return true;
return (firstFile != null && firstFile.HasFileBeenModified()) || !HasCoverImage(coverImage, fileExists);
}
private static bool HasCoverImage(string coverImage)
{
return HasCoverImage(coverImage, File.Exists(coverImage));
}
private static bool HasCoverImage(string coverImage, bool fileExists)
{
return !string.IsNullOrEmpty(coverImage) && fileExists;
}
private string GetCoverImage(MangaFile file, int volumeId, int chapterId)
{
file.UpdateLastModified();
switch (file.Format)
{
case MangaFormat.Pdf:
case MangaFormat.Epub:
return _bookService.GetCoverImage(file.FilePath, ImageService.GetChapterFormat(chapterId, volumeId));
case MangaFormat.Image:
var coverImage = _imageService.GetCoverFile(file);
return _imageService.GetCoverImage(coverImage, ImageService.GetChapterFormat(chapterId, volumeId));
case MangaFormat.Archive:
return _archiveService.GetCoverImage(file.FilePath, ImageService.GetChapterFormat(chapterId, volumeId));
default:
return string.Empty;
}
}
/// <summary>
/// Updates the metadata for a Chapter
/// </summary>
/// <param name="chapter"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public bool UpdateMetadata(Chapter chapter, bool forceUpdate)
{
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked))
{
_logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile?.FilePath);
chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id);
return true;
}
/// <summary>
/// Updates the metadata for a Chapter
/// </summary>
/// <param name="chapter"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
private bool UpdateChapterCoverImage(Chapter chapter, bool forceUpdate)
{
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (!_cacheHelper.ShouldUpdateCoverImage(Path.Join(DirectoryService.CoverImageDirectory, chapter.CoverImage), firstFile, chapter.Created, forceUpdate, chapter.CoverImageLocked))
return false;
_logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile?.FilePath);
chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id);
return true;
}
private void UpdateChapterMetadata(Chapter chapter, ICollection<Person> allPeople, bool forceUpdate)
{
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile == null || _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, forceUpdate, firstFile)) return;
UpdateChapterFromComicInfo(chapter, allPeople, firstFile);
firstFile.UpdateLastModified();
}
private void UpdateChapterFromComicInfo(Chapter chapter, ICollection<Person> allPeople, MangaFile firstFile)
{
var comicInfo = GetComicInfo(firstFile); // TODO: Think about letting the higher level loop have access for series to avoid duplicate IO operations
if (comicInfo == null) return;
if (!string.IsNullOrEmpty(comicInfo.Title))
{
chapter.TitleName = comicInfo.Title.Trim();
}
/// <summary>
/// Updates the metadata for a Volume
/// </summary>
/// <param name="volume"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public bool UpdateMetadata(Volume volume, bool forceUpdate)
if (!string.IsNullOrEmpty(comicInfo.Colorist))
{
// We need to check if Volume coverImage matches first chapters if forceUpdate is false
if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate)) return false;
volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault();
if (firstChapter == null) return false;
volume.CoverImage = firstChapter.CoverImage;
return true;
var people = comicInfo.Colorist.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Colorist);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Colorist,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
/// <summary>
/// Updates metadata for Series
/// </summary>
/// <param name="series"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public bool UpdateMetadata(Series series, bool forceUpdate)
if (!string.IsNullOrEmpty(comicInfo.Writer))
{
var madeUpdate = false;
if (series == null) return false;
var people = comicInfo.Writer.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Writer);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Writer,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
// NOTE: This will fail if we replace the cover of the first volume on a first scan. Because the series will already have a cover image
if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked))
if (!string.IsNullOrEmpty(comicInfo.Editor))
{
var people = comicInfo.Editor.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Editor);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Editor,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
if (!string.IsNullOrEmpty(comicInfo.Inker))
{
var people = comicInfo.Inker.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Inker);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Inker,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
if (!string.IsNullOrEmpty(comicInfo.Letterer))
{
var people = comicInfo.Letterer.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Letterer);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Letterer,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
if (!string.IsNullOrEmpty(comicInfo.Penciller))
{
var people = comicInfo.Penciller.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Penciller);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Penciller,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
if (!string.IsNullOrEmpty(comicInfo.CoverArtist))
{
var people = comicInfo.CoverArtist.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.CoverArtist);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.CoverArtist,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
if (!string.IsNullOrEmpty(comicInfo.Publisher))
{
var people = comicInfo.Publisher.Split(",");
PersonHelper.RemovePeople(chapter.People, people, PersonRole.Publisher);
PersonHelper.UpdatePeople(allPeople, people, PersonRole.Publisher,
person => PersonHelper.AddPersonIfNotExists(chapter.People, person));
}
}
/// <summary>
/// Updates the cover image for a Volume
/// </summary>
/// <param name="volume"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
private bool UpdateVolumeCoverImage(Volume volume, bool forceUpdate)
{
// We need to check if Volume coverImage matches first chapters if forceUpdate is false
if (volume == null || !_cacheHelper.ShouldUpdateCoverImage(Path.Join(DirectoryService.CoverImageDirectory, volume.CoverImage), null, volume.Created, forceUpdate)) return false;
volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault();
if (firstChapter == null) return false;
volume.CoverImage = firstChapter.CoverImage;
return true;
}
/// <summary>
/// Updates metadata for Series
/// </summary>
/// <param name="series"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
private void UpdateSeriesCoverImage(Series series, bool forceUpdate)
{
if (series == null) return;
// NOTE: This will fail if we replace the cover of the first volume on a first scan. Because the series will already have a cover image
if (!_cacheHelper.ShouldUpdateCoverImage(Path.Join(DirectoryService.CoverImageDirectory, series.CoverImage), null, series.Created, forceUpdate, series.CoverImageLocked))
return;
series.Volumes ??= new List<Volume>();
var firstCover = series.Volumes.GetCoverImage(series.Format);
string coverImage = null;
if (firstCover == null && series.Volumes.Any())
{
// If firstCover is null and one volume, the whole series is Chapters under Vol 0.
if (series.Volumes.Count == 1)
{
series.Volumes ??= new List<Volume>();
var firstCover = series.Volumes.GetCoverImage(series.Format);
string coverImage = null;
if (firstCover == null && series.Volumes.Any())
{
// If firstCover is null and one volume, the whole series is Chapters under Vol 0.
if (series.Volumes.Count == 1)
{
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
madeUpdate = true;
}
if (!HasCoverImage(coverImage))
{
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)
.FirstOrDefault()?.CoverImage;
madeUpdate = true;
}
}
series.CoverImage = firstCover?.CoverImage ?? coverImage;
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
}
return UpdateSeriesSummary(series, forceUpdate) || madeUpdate ;
}
private bool UpdateSeriesSummary(Series series, bool forceUpdate)
{
// NOTE: This can be problematic when the file changes and a summary already exists, but it is likely
// better to let the user kick off a refresh metadata on an individual Series than having overhead of
// checking File last write time.
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return false;
var isBook = series.Library.Type == LibraryType.Book;
var firstVolume = series.Volumes.FirstWithChapters(isBook);
var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles();
var firstFile = firstChapter?.Files.FirstOrDefault();
if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return false;
if (Parser.Parser.IsPdf(firstFile.FilePath)) return false;
var comicInfo = GetComicInfo(series.Format, firstFile);
if (string.IsNullOrEmpty(comicInfo?.Summary)) return false;
series.Summary = comicInfo.Summary;
return true;
}
private ComicInfo GetComicInfo(MangaFormat format, MangaFile firstFile)
{
if (format is MangaFormat.Archive or MangaFormat.Epub)
if (!_cacheHelper.CoverImageExists(coverImage))
{
return Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetComicInfo(firstFile.FilePath) : _archiveService.GetComicInfo(firstFile.FilePath);
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)
.FirstOrDefault()?.CoverImage;
}
}
series.CoverImage = firstCover?.CoverImage ?? coverImage;
}
return null;
private void UpdateSeriesMetadata(Series series, ICollection<Person> allPeople, ICollection<Genre> allGenres, bool forceUpdate)
{
var isBook = series.Library.Type == LibraryType.Book;
var firstVolume = series.Volumes.OrderBy(c => c.Number, new ChapterSortComparer()).FirstWithChapters(isBook);
var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles();
var firstFile = firstChapter?.Files.FirstOrDefault();
if (firstFile == null || _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(firstChapter, forceUpdate, firstFile)) return;
if (Parser.Parser.IsPdf(firstFile.FilePath)) return;
var comicInfo = GetComicInfo(firstFile);
if (comicInfo == null) return;
// Summary Info
if (!string.IsNullOrEmpty(comicInfo.Summary))
{
series.Metadata.Summary = comicInfo.Summary; // NOTE: I can move this to the bottom as I have a comicInfo selection, save me an extra read
}
/// <summary>
/// Refreshes Metadata for a whole library
/// </summary>
/// <remarks>This can be heavy on memory first run</remarks>
/// <param name="libraryId"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public async Task RefreshMetadata(int libraryId, bool forceUpdate = false)
foreach (var chapter in series.Volumes.SelectMany(volume => volume.Chapters))
{
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_logger.LogInformation("[MetadataService] Beginning metadata refresh of {LibraryName}", library.Name);
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Writer).Select(p => p.Name), PersonRole.Writer,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew();
var totalTime = 0L;
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 0F));
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.CoverArtist).Select(p => p.Name), PersonRole.CoverArtist,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
var i = 0;
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++, i++)
{
if (chunkInfo.TotalChunks == 0) continue;
totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart();
_logger.LogInformation("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd}",
chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize);
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Publisher).Select(p => p.Name), PersonRole.Publisher,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
new UserParams()
{
PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
_logger.LogDebug("[MetadataService] Fetched {SeriesCount} series for refresh", nonLibrarySeries.Count);
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Character).Select(p => p.Name), PersonRole.Character,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
Parallel.ForEach(nonLibrarySeries, series =>
{
try
{
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
var chapterUpdated = false;
foreach (var chapter in volume.Chapters)
{
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
}
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Colorist).Select(p => p.Name), PersonRole.Colorist,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
}
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Editor).Select(p => p.Name), PersonRole.Editor,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
UpdateMetadata(series, volumeUpdated || forceUpdate);
}
catch (Exception)
{
/* Swallow exception */
}
});
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Inker).Select(p => p.Name), PersonRole.Inker,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Letterer).Select(p => p.Name), PersonRole.Letterer,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
foreach (var series in nonLibrarySeries)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(library.Id, series.Id));
}
}
else
{
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
}
var progress = Math.Max(0F, Math.Min(1F, i * 1F / chunkInfo.TotalChunks));
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, progress));
}
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 1F));
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
PersonHelper.UpdatePeople(allPeople, chapter.People.Where(p => p.Role == PersonRole.Penciller).Select(p => p.Name), PersonRole.Penciller,
person => PersonHelper.AddPersonIfNotExists(series.Metadata.People, person));
}
var comicInfos = series.Volumes
.SelectMany(volume => volume.Chapters)
.SelectMany(c => c.Files)
.Select(GetComicInfo)
.Where(ci => ci != null)
.ToList();
/// <summary>
/// Refreshes Metadata for a Series. Will always force updates.
/// </summary>
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false)
var genres = comicInfos.SelectMany(i => i.Genre.Split(",")).Distinct().ToList();
var people = series.Volumes.SelectMany(volume => volume.Chapters).SelectMany(c => c.People).ToList();
PersonHelper.KeepOnlySamePeopleBetweenLists(series.Metadata.People,
people, person => series.Metadata.People.Remove(person));
GenreHelper.UpdateGenre(allGenres, genres, false, genre => GenreHelper.AddGenreIfNotExists(series.Metadata.Genres, genre));
GenreHelper.KeepOnlySameGenreBetweenLists(series.Metadata.Genres, genres.Select(g => DbFactory.Genre(g, false)).ToList(),
genre => series.Metadata.Genres.Remove(genre));
}
private ComicInfo GetComicInfo(MangaFile firstFile)
{
if (firstFile?.Format is MangaFormat.Archive or MangaFormat.Epub)
{
return Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetComicInfo(firstFile.FilePath) : _archiveService.GetComicInfo(firstFile.FilePath);
}
return null;
}
/// <summary>
///
/// </summary>
/// <remarks>This cannot have any Async code within. It is used within Parallel.ForEach</remarks>
/// <param name="series"></param>
/// <param name="forceUpdate"></param>
private void ProcessSeriesMetadataUpdate(Series series, IDictionary<int, IList<int>> chapterIds, ICollection<Person> allPeople, ICollection<Genre> allGenres, bool forceUpdate)
{
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
try
{
var sw = Stopwatch.StartNew();
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
if (series == null)
{
_logger.LogError("[MetadataService] Series {SeriesId} was not found on Library {LibraryId}", seriesId, libraryId);
return;
}
_logger.LogInformation("[MetadataService] Beginning metadata refresh of {SeriesName}", series.Name);
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
var chapterUpdated = false;
foreach (var chapter in volume.Chapters)
{
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
chapterUpdated = UpdateChapterCoverImage(chapter, forceUpdate);
UpdateChapterMetadata(chapter, allPeople, forceUpdate || chapterUpdated);
}
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
volumeUpdated = UpdateVolumeCoverImage(volume, chapterUpdated || forceUpdate);
}
UpdateMetadata(series, volumeUpdated || forceUpdate);
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(series.LibraryId, series.Id));
}
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
UpdateSeriesCoverImage(series, volumeUpdated || forceUpdate);
UpdateSeriesMetadata(series, allPeople, allGenres, forceUpdate);
}
catch (Exception ex)
{
_logger.LogError(ex, "[MetadataService] There was an exception during updating metadata for {SeriesName} ", series.Name);
}
}
/// <summary>
/// Refreshes Metadata for a whole library
/// </summary>
/// <remarks>This can be heavy on memory first run</remarks>
/// <param name="libraryId"></param>
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public async Task RefreshMetadata(int libraryId, bool forceUpdate = false)
{
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_logger.LogInformation("[MetadataService] Beginning metadata refresh of {LibraryName}", library.Name);
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew();
var totalTime = 0L;
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 0F));
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{
if (chunkInfo.TotalChunks == 0) continue;
totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart();
_logger.LogInformation("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd}",
chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize);
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
new UserParams()
{
PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
_logger.LogDebug("[MetadataService] Fetched {SeriesCount} series for refresh", nonLibrarySeries.Count);
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(nonLibrarySeries.Select(s => s.Id).ToArray());
var allPeople = await _unitOfWork.PersonRepository.GetAllPeople();
var allGenres = await _unitOfWork.GenreRepository.GetAllGenres();
var seriesIndex = 0;
foreach (var series in nonLibrarySeries)
{
try
{
ProcessSeriesMetadataUpdate(series, chapterIds, allPeople, allGenres, forceUpdate);
}
catch (Exception ex)
{
_logger.LogError(ex, "[MetadataService] There was an exception during metadata refresh for {SeriesName}", series.Name);
}
var index = chunk * seriesIndex;
var progress = Math.Max(0F, Math.Min(1F, index * 1F / chunkInfo.TotalSize));
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, progress));
seriesIndex++;
}
await _unitOfWork.CommitAsync();
foreach (var series in nonLibrarySeries)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(library.Id, series.Id));
}
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
}
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 1F));
// TODO: Remove any leftover People from DB
await _unitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
await _unitOfWork.GenreRepository.RemoveAllGenreNoLongerAssociated();
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
}
// TODO: I can probably refactor RefreshMetadata and RefreshMetadataForSeries to be the same by utilizing chunk size of 1, so most of the code can be the same.
private async Task PerformScan(Library library, bool forceUpdate, Action<int, Chunk> action)
{
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew();
var totalTime = 0L;
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 0F));
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{
if (chunkInfo.TotalChunks == 0) continue;
totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart();
action(chunk, chunkInfo);
// _logger.LogInformation("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd}",
// chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize);
// var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
// new UserParams()
// {
// PageNumber = chunk,
// PageSize = chunkInfo.ChunkSize
// });
// _logger.LogDebug("[MetadataService] Fetched {SeriesCount} series for refresh", nonLibrarySeries.Count);
//
// var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(nonLibrarySeries.Select(s => s.Id).ToArray());
// var allPeople = await _unitOfWork.PersonRepository.GetAllPeople();
// var allGenres = await _unitOfWork.GenreRepository.GetAllGenres();
//
//
// var seriesIndex = 0;
// foreach (var series in nonLibrarySeries)
// {
// try
// {
// ProcessSeriesMetadataUpdate(series, chapterIds, allPeople, allGenres, forceUpdate);
// }
// catch (Exception ex)
// {
// _logger.LogError(ex, "[MetadataService] There was an exception during metadata refresh for {SeriesName}", series.Name);
// }
// var index = chunk * seriesIndex;
// var progress = Math.Max(0F, Math.Min(1F, index * 1F / chunkInfo.TotalSize));
//
// await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
// MessageFactory.RefreshMetadataProgressEvent(library.Id, progress));
// seriesIndex++;
// }
await _unitOfWork.CommitAsync();
}
}
/// <summary>
/// Refreshes Metadata for a Series. Will always force updates.
/// </summary>
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = true)
{
var sw = Stopwatch.StartNew();
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
if (series == null)
{
_logger.LogError("[MetadataService] Series {SeriesId} was not found on Library {LibraryId}", seriesId, libraryId);
return;
}
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(libraryId, 0F));
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(new [] { seriesId });
var allPeople = await _unitOfWork.PersonRepository.GetAllPeople();
var allGenres = await _unitOfWork.GenreRepository.GetAllGenres();
ProcessSeriesMetadataUpdate(series, chapterIds, allPeople, allGenres, forceUpdate);
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress,
MessageFactory.RefreshMetadataProgressEvent(libraryId, 1F));
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(series.LibraryId, series.Id));
}
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
}
}

View file

@ -1,5 +1,4 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using API.Entities.Enums;

View file

@ -4,6 +4,7 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using API.Data.Metadata;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces.Services;
@ -25,6 +26,8 @@ namespace API.Services.Tasks.Scanner
private readonly ConcurrentDictionary<ParsedSeries, List<ParserInfo>> _scannedSeries;
private readonly IBookService _bookService;
private readonly ILogger _logger;
private readonly IArchiveService _archiveService;
private readonly IDirectoryService _directoryService;
/// <summary>
/// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos.
@ -32,10 +35,13 @@ namespace API.Services.Tasks.Scanner
/// </summary>
/// <param name="bookService"></param>
/// <param name="logger"></param>
public ParseScannedFiles(IBookService bookService, ILogger logger)
public ParseScannedFiles(IBookService bookService, ILogger logger, IArchiveService archiveService,
IDirectoryService directoryService)
{
_bookService = bookService;
_logger = logger;
_archiveService = archiveService;
_directoryService = directoryService;
_scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
}
@ -53,6 +59,20 @@ namespace API.Services.Tasks.Scanner
return existingKey != null ? parsedSeries[existingKey] : new List<ParserInfo>();
}
private ComicInfo GetComicInfo(string path)
{
if (Parser.Parser.IsEpub(path))
{
return _bookService.GetComicInfo(path);
}
if (Parser.Parser.IsComicInfoExtension(path))
{
return _archiveService.GetComicInfo(path);
}
return null;
}
/// <summary>
/// Processes files found during a library scan.
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.
@ -90,9 +110,32 @@ namespace API.Services.Tasks.Scanner
info.Merge(info2);
}
// TODO: Think about doing this before the Fallback code to speed up
info.ComicInfo = GetComicInfo(path);
if (info.ComicInfo != null)
{
var sw = Stopwatch.StartNew();
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
{
info.Volumes = info.ComicInfo.Volume;
}
if (!string.IsNullOrEmpty(info.ComicInfo.Series))
{
info.Series = info.ComicInfo.Series;
}
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
{
info.Chapters = info.ComicInfo.Number;
}
_logger.LogDebug("ComicInfo read added {Time} ms to processing", sw.ElapsedMilliseconds);
}
TrackSeries(info);
}
/// <summary>
/// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
/// This will check if the name matches an existing series name (multiple fields) <see cref="MergeName"/>
@ -161,12 +204,12 @@ namespace API.Services.Tasks.Scanner
{
var sw = Stopwatch.StartNew();
totalFiles = 0;
var searchPattern = GetLibrarySearchPattern();
var searchPattern = Parser.Parser.SupportedExtensions;
foreach (var folderPath in folders)
{
try
{
totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath, (f) =>
totalFiles += _directoryService.TraverseTreeParallelForEach(folderPath, (f) =>
{
try
{
@ -191,11 +234,6 @@ namespace API.Services.Tasks.Scanner
return SeriesWithInfos();
}
private static string GetLibrarySearchPattern()
{
return Parser.Parser.SupportedExtensions;
}
/// <summary>
/// Returns any series where there were parsed infos
/// </summary>

File diff suppressed because it is too large Load diff