v0.6.1 Hotfix RC (#1635)
* Swapped out SQLite for Memory, but the one from hangfire. Added DisableConcurrentExecution on ProcessChange to avoid duplication when multiple threads execute at once. * Fixed the Hangfire SQL issues with CPU/ram utilization some users are facing * Fixed a case in SharpCompress fallback where an invalid ComicInfo wasn't picked up. * When parsing epubs, if there is a volume in the epub title, try to parse and group. This is beneficial for Light Novels which are generally tagged this way. * Fixed delete series in series detail not triggering * Fixed some parsing logic for how we treat specials, like Annual and Omnibus. * When scanning files, if the file is the cover image (loose leaf image), we reject it more quickly than previously. * Added a potential bug marker * Fixed a bug where Info was only showing Error level loggers * Code smells
This commit is contained in:
parent
6dd79d8c6a
commit
3f51cb2a02
21 changed files with 96 additions and 15 deletions
|
|
@ -281,6 +281,17 @@ public class ArchiveServiceTests
|
|||
Assert.Equal("BTOOOM! - Duplicate", comicInfo.Series);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldHaveComicInfo_OutsideRoot_SharpCompress()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
|
||||
var archive = Path.Join(testDirectory, "ComicInfo_outside_root_SharpCompress.cb7");
|
||||
|
||||
var comicInfo = _archiveService.GetComicInfo(archive);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("Fire Punch", comicInfo.Series);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CanParseComicInfo
|
||||
|
|
|
|||
|
|
@ -54,4 +54,28 @@ public class BookServiceTests
|
|||
Assert.Equal("Roger Starbuck,Junya Inoue", comicInfo.Writer);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldParseAsVolumeGroup_WithoutSeriesIndex()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var archive = Path.Join(testDirectory, "TitleWithVolume_NoSeriesOrSeriesIndex.epub");
|
||||
|
||||
var comicInfo = _bookService.GetComicInfo(archive);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("1", comicInfo.Volume);
|
||||
Assert.Equal("Accel World", comicInfo.Series);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldParseAsVolumeGroup_WithSeriesIndex()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var archive = Path.Join(testDirectory, "TitleWithVolume.epub");
|
||||
|
||||
var comicInfo = _bookService.GetComicInfo(archive);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("1.0", comicInfo.Volume);
|
||||
Assert.Equal("Accel World", comicInfo.Series);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -603,7 +603,7 @@ public class DirectoryServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
ds.CopyFilesToDirectory(new []{MockUnixSupport.Path($"{testDirectory}file.zip")}, "/manga/output/", new [] {"01"});
|
||||
var outputFiles = ds.GetFiles("/manga/output/").Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList();
|
||||
Assert.Equal(1, outputFiles.Count()); // we have 2 already there and 2 copies
|
||||
Assert.Single(outputFiles);
|
||||
// For some reason, this has C:/ on directory even though everything is emulated (System.IO.Abstractions issue, not changing)
|
||||
// https://github.com/TestableIO/System.IO.Abstractions/issues/831
|
||||
Assert.True(outputFiles.Contains(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath("/manga/output/01.zip"))
|
||||
|
|
|
|||
Binary file not shown.
BIN
API.Tests/Services/Test Data/BookService/TitleWithVolume.epub
Normal file
BIN
API.Tests/Services/Test Data/BookService/TitleWithVolume.epub
Normal file
Binary file not shown.
Binary file not shown.
Loading…
Add table
Add a link
Reference in a new issue