Book Feedback and small bugs (#183)

* Remove automatic retry for scanLibraries as if something fails, it wont pass magically. Catch exceptions when opening books for parsing and swallow to ignore the file.

* Delete extra attempts

* Switched to using FirstOrDefault for finding existing series. This will help avoid pointless crashes.

* Updated message when duplicate series are found (not sure how this happens)

* Fixed a negation for deleting volumes where files still exist.

* Implemented the ability to automatically scale the manga reader based on screen size.
This commit is contained in:
Joseph Milazzo 2021-04-29 17:52:34 -05:00 committed by GitHub
parent a01613f80f
commit 35a47f5d88
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 65 additions and 23 deletions

View file

@ -187,22 +187,37 @@ namespace API.Services
return dict;
}
public static ParserInfo ParseInfo(string filePath)
/// <summary>
/// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books)
/// then null is returned.
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public ParserInfo ParseInfo(string filePath)
{
var epubBook = EpubReader.OpenBook(filePath);
return new ParserInfo()
try
{
Chapters = "0",
Edition = "",
Format = MangaFormat.Book,
Filename = Path.GetFileName(filePath),
Title = epubBook.Title,
FullFilePath = filePath,
IsSpecial = false,
Series = epubBook.Title,
Volumes = "0"
};
var epubBook = EpubReader.OpenBook(filePath);
return new ParserInfo()
{
Chapters = "0",
Edition = "",
Format = MangaFormat.Book,
Filename = Path.GetFileName(filePath),
Title = epubBook.Title,
FullFilePath = filePath,
IsSpecial = false,
Series = epubBook.Title,
Volumes = "0"
};
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when opening epub book: {FileName}", filePath);
}
return null;
}
public byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true)

View file

@ -42,12 +42,12 @@ namespace API.Services.Tasks
[DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibraries()
{
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
foreach (var lib in libraries)
{
// BUG?: I think we need to keep _scannedSeries within the ScanLibrary instance since this is multithreaded.
ScanLibrary(lib.Id, false);
}
}
@ -68,6 +68,7 @@ namespace API.Services.Tasks
}
[DisableConcurrentExecution(360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate)
{
var sw = Stopwatch.StartNew();
@ -203,7 +204,27 @@ namespace API.Services.Tasks
foreach (var (key, infos) in parsedSeries)
{
// Key is normalized already
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key);
Series existingSeries = null;
try
{
existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key);
}
catch (Exception e)
{
_logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key);
var duplicateSeries = library.Series.Where(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key).ToList();
//var firstSeries = duplicateSeries.First();
//duplicateSeries.
foreach (var series in duplicateSeries)
{
_logger.LogCritical("{Key} maps with {Series}", key, series.OriginalName);
}
// Merge them together?
//_unitOfWork.AppUserProgressRepository.MapSeriesProgressFromTo(firstSeries.Id, );
continue;
}
if (existingSeries == null)
{
existingSeries = DbFactory.Series(infos[0].Series);
@ -292,7 +313,7 @@ namespace API.Services.Tasks
foreach (var volume in deletedVolumes)
{
var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files";
if (!new FileInfo(file).Exists)
if (new FileInfo(file).Exists)
{
_logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file);
}
@ -435,7 +456,7 @@ namespace API.Services.Tasks
if (type == LibraryType.Book && Parser.Parser.IsEpub(path))
{
info = BookService.ParseInfo(path);
info = _bookService.ParseInfo(path);
}
else
{
@ -451,7 +472,7 @@ namespace API.Services.Tasks
if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0")
{
info = Parser.Parser.Parse(path, rootPath, type);
var info2 = BookService.ParseInfo(path);
var info2 = _bookService.ParseInfo(path);
info.Merge(info2);
}