Logging Enhancements (#1521)

* Recreated Kavita Logging with Serilog instead of Default. This needs to be move out of the appsettings now, to allow auto updater to patch.

* Refactored the code to be completely configured via Code rather than appsettings.json. This is a required step for Auto Updating.

* Added in the ability to send logs directly to the UI only for users on the log route. Stopping implementation as Alerts page will handle the rest of the implementation.

* Fixed up the backup service to not rely on Config from appsettings.json

* Tweaked the Logging levels available

* Moved everything over to File-scoped namespaces

* Moved everything over to File-scoped namespaces

* Code cleanup, removed an old migration and changed so debug logging doesn't print sensitive db data

* Removed dead code
This commit is contained in:
Joseph Milazzo 2022-09-12 19:25:48 -05:00 committed by GitHub
parent 9f715cc35f
commit d1a14f7e68
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
212 changed files with 16599 additions and 16834 deletions

View file

@ -14,317 +14,316 @@ using NSubstitute.Extensions;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services
namespace API.Tests.Services;
public class ArchiveServiceTests
{
public class ArchiveServiceTests
private readonly ITestOutputHelper _testOutputHelper;
private readonly ArchiveService _archiveService;
private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>();
private readonly ILogger<DirectoryService> _directoryServiceLogger = Substitute.For<ILogger<DirectoryService>>();
private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
public ArchiveServiceTests(ITestOutputHelper testOutputHelper)
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ArchiveService _archiveService;
private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>();
private readonly ILogger<DirectoryService> _directoryServiceLogger = Substitute.For<ILogger<DirectoryService>>();
private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
public ArchiveServiceTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
_archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService));
}
[Theory]
[InlineData("flat file.zip", false)]
[InlineData("file in folder in folder.zip", true)]
[InlineData("file in folder.zip", true)]
[InlineData("file in folder_alt.zip", true)]
public void ArchiveNeedsFlatteningTest(string archivePath, bool expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var file = Path.Join(testDirectory, archivePath);
using ZipArchive archive = ZipFile.OpenRead(file);
Assert.Equal(expected, _archiveService.ArchiveNeedsFlattening(archive));
}
[Theory]
[InlineData("non existent file.zip", false)]
[InlineData("winrar.rar", true)]
[InlineData("empty.zip", true)]
[InlineData("flat file.zip", true)]
[InlineData("file in folder in folder.zip", true)]
[InlineData("file in folder.zip", true)]
[InlineData("file in folder_alt.zip", true)]
public void IsValidArchiveTest(string archivePath, bool expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.Equal(expected, _archiveService.IsValidArchive(Path.Join(testDirectory, archivePath)));
}
[Theory]
[InlineData("non existent file.zip", 0)]
[InlineData("winrar.rar", 0)]
[InlineData("empty.zip", 0)]
[InlineData("flat file.zip", 1)]
[InlineData("file in folder in folder.zip", 1)]
[InlineData("file in folder.zip", 1)]
[InlineData("file in folder_alt.zip", 1)]
[InlineData("macos_none.zip", 0)]
[InlineData("macos_one.zip", 1)]
[InlineData("macos_native.zip", 21)]
[InlineData("macos_withdotunder_one.zip", 1)]
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var sw = Stopwatch.StartNew();
Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath)));
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("non existent file.zip", ArchiveLibrary.NotSupported)]
[InlineData("winrar.rar", ArchiveLibrary.SharpCompress)]
[InlineData("empty.zip", ArchiveLibrary.Default)]
[InlineData("flat file.zip", ArchiveLibrary.Default)]
[InlineData("file in folder in folder.zip", ArchiveLibrary.Default)]
[InlineData("file in folder.zip", ArchiveLibrary.Default)]
[InlineData("file in folder_alt.zip", ArchiveLibrary.Default)]
public void CanOpenArchive(string archivePath, ArchiveLibrary expected)
{
var sw = Stopwatch.StartNew();
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.Equal(expected, _archiveService.CanOpen(Path.Join(testDirectory, archivePath)));
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("non existent file.zip", 0)]
[InlineData("winrar.rar", 0)]
[InlineData("empty.zip", 0)]
[InlineData("flat file.zip", 1)]
[InlineData("file in folder in folder.zip", 1)]
[InlineData("file in folder.zip", 1)]
[InlineData("file in folder_alt.zip", 1)]
public void CanExtractArchive(string archivePath, int expectedFileCount)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction");
_directoryService.ClearAndDeleteDirectory(extractDirectory);
var sw = Stopwatch.StartNew();
_archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory);
var di1 = new DirectoryInfo(extractDirectory);
Assert.Equal(expectedFileCount, di1.Exists ? _directoryService.GetFiles(extractDirectory, searchOption:SearchOption.AllDirectories).Count() : 0);
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
_directoryService.ClearAndDeleteDirectory(extractDirectory);
}
[Theory]
[InlineData(new [] {"folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"vol1/"}, "")]
[InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")]
[InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "")]
[InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "folder.jpg")]
public void FindFolderEntry(string[] files, string expected)
{
var foundFile = ArchiveService.FindFolderEntry(files);
Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile);
}
[Theory]
[InlineData(new [] {"folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"vol1/"}, "")]
[InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")]
[InlineData(new [] {"page 2.jpg", "page 10.jpg"}, "page 2.jpg")]
[InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "vol1/page 01.jpg")]
[InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg")]
[InlineData(new [] {"001.jpg", "001 - chapter 1/001.jpg"}, "001.jpg")]
[InlineData(new [] {"chapter 1/001.jpg", "chapter 2/002.jpg", "somefile.jpg"}, "somefile.jpg")]
public void FindFirstEntry(string[] files, string expected)
{
var foundFile = ArchiveService.FirstFileEntry(files, string.Empty);
Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile);
}
[Theory]
[InlineData("v10.cbz", "v10.expected.png")]
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
[InlineData("macos_native.zip", "macos_native.png")]
[InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
[InlineData("sorting.zip", "sorting.expected.png")]
[InlineData("test.zip", "test.expected.jpg")]
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
{
var ds = Substitute.For<DirectoryService>(_directoryServiceLogger, new FileSystem());
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds);
var archiveService = Substitute.For<ArchiveService>(_logger, ds, imageService);
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
var expectedBytes = Image.Thumbnail(Path.Join(testDirectory, expectedOutputFile), 320).WriteToBuffer(".png");
archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default);
var outputDir = Path.Join(testDirectory, "output");
_directoryService.ClearDirectory(outputDir);
_directoryService.ExistOrCreate(outputDir);
var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
Path.GetFileNameWithoutExtension(inputFile) + "_output", outputDir);
var actual = File.ReadAllBytes(Path.Join(outputDir, coverImagePath));
Assert.Equal(expectedBytes, actual);
_directoryService.ClearAndDeleteDirectory(outputDir);
}
[Theory]
[InlineData("v10.cbz", "v10.expected.png")]
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
[InlineData("macos_native.zip", "macos_native.png")]
[InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
[InlineData("sorting.zip", "sorting.expected.png")]
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
{
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
var archiveService = Substitute.For<ArchiveService>(_logger,
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService);
var testDirectory = API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages")));
var outputDir = Path.Join(testDirectory, "output");
_directoryService.ClearDirectory(outputDir);
_directoryService.ExistOrCreate(outputDir);
archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.SharpCompress);
var coverOutputFile = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
Path.GetFileNameWithoutExtension(inputFile), outputDir);
var actualBytes = File.ReadAllBytes(Path.Join(outputDir, coverOutputFile));
var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile));
Assert.Equal(expectedBytes, actualBytes);
_directoryService.ClearAndDeleteDirectory(outputDir);
}
[Theory]
[InlineData("Archives/macos_native.zip")]
[InlineData("Formats/One File with DB_Supported.zip")]
public void CanParseCoverImage(string inputFile)
{
var imageService = Substitute.For<IImageService>();
imageService.WriteCoverThumbnail(Arg.Any<Stream>(), Arg.Any<string>(), Arg.Any<string>()).Returns(x => "cover.jpg");
var archiveService = new ArchiveService(_logger, _directoryService, imageService);
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/");
var inputPath = Path.GetFullPath(Path.Join(testDirectory, inputFile));
var outputPath = Path.Join(testDirectory, Path.GetFileNameWithoutExtension(inputFile) + "_output");
new DirectoryInfo(outputPath).Create();
var expectedImage = archiveService.GetCoverImage(inputPath, inputFile, outputPath);
Assert.Equal("cover.jpg", expectedImage);
new DirectoryInfo(outputPath).Delete();
}
#region ShouldHaveComicInfo
[Fact]
public void ShouldHaveComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo.zip");
const string summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?";
var comicInfo = _archiveService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal(summaryInfo, comicInfo.Summary);
}
[Fact]
public void ShouldHaveComicInfo_WithAuthors()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo_authors.zip");
var comicInfo = _archiveService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal("Junya Inoue", comicInfo.Writer);
}
[Fact]
public void ShouldHaveComicInfo_TopLevelFileOnly()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo_duplicateInfos.zip");
var comicInfo = _archiveService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal("BTOOOM!", comicInfo.Series);
}
#endregion
#region CanParseComicInfo
[Fact]
public void CanParseComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo.zip");
var actual = _archiveService.GetComicInfo(archive);
var expected = new ComicInfo()
{
Publisher = "Yen Press",
Genre = "Manga, Movies & TV",
Summary =
"By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?",
PageCount = 194,
LanguageISO = "en",
Notes = "Scraped metadata from Comixology [CMXDB450184]",
Series = "BTOOOM!",
Title = "v01",
Web = "https://www.comixology.com/BTOOOM/digital-comic/450184"
};
Assert.NotStrictEqual(expected, actual);
}
#endregion
#region FindCoverImageFilename
[Theory]
[InlineData(new string[] {}, "", null)]
[InlineData(new [] {"001.jpg", "002.jpg"}, "Test.zip", "001.jpg")]
[InlineData(new [] {"001.jpg", "!002.jpg"}, "Test.zip", "!002.jpg")]
[InlineData(new [] {"001.jpg", "!001.jpg"}, "Test.zip", "!001.jpg")]
[InlineData(new [] {"001.jpg", "cover.jpg"}, "Test.zip", "cover.jpg")]
[InlineData(new [] {"001.jpg", "Chapter 20/cover.jpg", "Chapter 21/0001.jpg"}, "Test.zip", "Chapter 20/cover.jpg")]
[InlineData(new [] {"._/001.jpg", "._/cover.jpg", "010.jpg"}, "Test.zip", "010.jpg")]
[InlineData(new [] {"001.txt", "002.txt", "a.jpg"}, "Test.zip", "a.jpg")]
public void FindCoverImageFilename(string[] filenames, string archiveName, string expected)
{
Assert.Equal(expected, ArchiveService.FindCoverImageFilename(archiveName, filenames));
}
#endregion
#region CreateZipForDownload
//[Fact]
public void CreateZipForDownloadTest()
{
var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
//_archiveService.CreateZipForDownload(new []{}, outputDirectory)
}
#endregion
_testOutputHelper = testOutputHelper;
_archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService));
}
[Theory]
[InlineData("flat file.zip", false)]
[InlineData("file in folder in folder.zip", true)]
[InlineData("file in folder.zip", true)]
[InlineData("file in folder_alt.zip", true)]
public void ArchiveNeedsFlatteningTest(string archivePath, bool expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var file = Path.Join(testDirectory, archivePath);
using ZipArchive archive = ZipFile.OpenRead(file);
Assert.Equal(expected, _archiveService.ArchiveNeedsFlattening(archive));
}
[Theory]
[InlineData("non existent file.zip", false)]
[InlineData("winrar.rar", true)]
[InlineData("empty.zip", true)]
[InlineData("flat file.zip", true)]
[InlineData("file in folder in folder.zip", true)]
[InlineData("file in folder.zip", true)]
[InlineData("file in folder_alt.zip", true)]
public void IsValidArchiveTest(string archivePath, bool expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.Equal(expected, _archiveService.IsValidArchive(Path.Join(testDirectory, archivePath)));
}
[Theory]
[InlineData("non existent file.zip", 0)]
[InlineData("winrar.rar", 0)]
[InlineData("empty.zip", 0)]
[InlineData("flat file.zip", 1)]
[InlineData("file in folder in folder.zip", 1)]
[InlineData("file in folder.zip", 1)]
[InlineData("file in folder_alt.zip", 1)]
[InlineData("macos_none.zip", 0)]
[InlineData("macos_one.zip", 1)]
[InlineData("macos_native.zip", 21)]
[InlineData("macos_withdotunder_one.zip", 1)]
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var sw = Stopwatch.StartNew();
Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath)));
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("non existent file.zip", ArchiveLibrary.NotSupported)]
[InlineData("winrar.rar", ArchiveLibrary.SharpCompress)]
[InlineData("empty.zip", ArchiveLibrary.Default)]
[InlineData("flat file.zip", ArchiveLibrary.Default)]
[InlineData("file in folder in folder.zip", ArchiveLibrary.Default)]
[InlineData("file in folder.zip", ArchiveLibrary.Default)]
[InlineData("file in folder_alt.zip", ArchiveLibrary.Default)]
public void CanOpenArchive(string archivePath, ArchiveLibrary expected)
{
var sw = Stopwatch.StartNew();
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.Equal(expected, _archiveService.CanOpen(Path.Join(testDirectory, archivePath)));
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("non existent file.zip", 0)]
[InlineData("winrar.rar", 0)]
[InlineData("empty.zip", 0)]
[InlineData("flat file.zip", 1)]
[InlineData("file in folder in folder.zip", 1)]
[InlineData("file in folder.zip", 1)]
[InlineData("file in folder_alt.zip", 1)]
public void CanExtractArchive(string archivePath, int expectedFileCount)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction");
_directoryService.ClearAndDeleteDirectory(extractDirectory);
var sw = Stopwatch.StartNew();
_archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory);
var di1 = new DirectoryInfo(extractDirectory);
Assert.Equal(expectedFileCount, di1.Exists ? _directoryService.GetFiles(extractDirectory, searchOption:SearchOption.AllDirectories).Count() : 0);
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
_directoryService.ClearAndDeleteDirectory(extractDirectory);
}
[Theory]
[InlineData(new [] {"folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"vol1/"}, "")]
[InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")]
[InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "")]
[InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "folder.jpg")]
public void FindFolderEntry(string[] files, string expected)
{
var foundFile = ArchiveService.FindFolderEntry(files);
Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile);
}
[Theory]
[InlineData(new [] {"folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"vol1/"}, "")]
[InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")]
[InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")]
[InlineData(new [] {"page 2.jpg", "page 10.jpg"}, "page 2.jpg")]
[InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "vol1/page 01.jpg")]
[InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg")]
[InlineData(new [] {"001.jpg", "001 - chapter 1/001.jpg"}, "001.jpg")]
[InlineData(new [] {"chapter 1/001.jpg", "chapter 2/002.jpg", "somefile.jpg"}, "somefile.jpg")]
public void FindFirstEntry(string[] files, string expected)
{
var foundFile = ArchiveService.FirstFileEntry(files, string.Empty);
Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile);
}
[Theory]
[InlineData("v10.cbz", "v10.expected.png")]
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
[InlineData("macos_native.zip", "macos_native.png")]
[InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
[InlineData("sorting.zip", "sorting.expected.png")]
[InlineData("test.zip", "test.expected.jpg")]
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
{
var ds = Substitute.For<DirectoryService>(_directoryServiceLogger, new FileSystem());
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds);
var archiveService = Substitute.For<ArchiveService>(_logger, ds, imageService);
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
var expectedBytes = Image.Thumbnail(Path.Join(testDirectory, expectedOutputFile), 320).WriteToBuffer(".png");
archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default);
var outputDir = Path.Join(testDirectory, "output");
_directoryService.ClearDirectory(outputDir);
_directoryService.ExistOrCreate(outputDir);
var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
Path.GetFileNameWithoutExtension(inputFile) + "_output", outputDir);
var actual = File.ReadAllBytes(Path.Join(outputDir, coverImagePath));
Assert.Equal(expectedBytes, actual);
_directoryService.ClearAndDeleteDirectory(outputDir);
}
[Theory]
[InlineData("v10.cbz", "v10.expected.png")]
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
[InlineData("macos_native.zip", "macos_native.png")]
[InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
[InlineData("sorting.zip", "sorting.expected.png")]
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
{
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
var archiveService = Substitute.For<ArchiveService>(_logger,
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService);
var testDirectory = API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages")));
var outputDir = Path.Join(testDirectory, "output");
_directoryService.ClearDirectory(outputDir);
_directoryService.ExistOrCreate(outputDir);
archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.SharpCompress);
var coverOutputFile = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
Path.GetFileNameWithoutExtension(inputFile), outputDir);
var actualBytes = File.ReadAllBytes(Path.Join(outputDir, coverOutputFile));
var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile));
Assert.Equal(expectedBytes, actualBytes);
_directoryService.ClearAndDeleteDirectory(outputDir);
}
[Theory]
[InlineData("Archives/macos_native.zip")]
[InlineData("Formats/One File with DB_Supported.zip")]
public void CanParseCoverImage(string inputFile)
{
var imageService = Substitute.For<IImageService>();
imageService.WriteCoverThumbnail(Arg.Any<Stream>(), Arg.Any<string>(), Arg.Any<string>()).Returns(x => "cover.jpg");
var archiveService = new ArchiveService(_logger, _directoryService, imageService);
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/");
var inputPath = Path.GetFullPath(Path.Join(testDirectory, inputFile));
var outputPath = Path.Join(testDirectory, Path.GetFileNameWithoutExtension(inputFile) + "_output");
new DirectoryInfo(outputPath).Create();
var expectedImage = archiveService.GetCoverImage(inputPath, inputFile, outputPath);
Assert.Equal("cover.jpg", expectedImage);
new DirectoryInfo(outputPath).Delete();
}
#region ShouldHaveComicInfo
[Fact]
public void ShouldHaveComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo.zip");
const string summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?";
var comicInfo = _archiveService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal(summaryInfo, comicInfo.Summary);
}
[Fact]
public void ShouldHaveComicInfo_WithAuthors()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo_authors.zip");
var comicInfo = _archiveService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal("Junya Inoue", comicInfo.Writer);
}
[Fact]
public void ShouldHaveComicInfo_TopLevelFileOnly()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo_duplicateInfos.zip");
var comicInfo = _archiveService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal("BTOOOM!", comicInfo.Series);
}
#endregion
#region CanParseComicInfo
[Fact]
public void CanParseComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo.zip");
var actual = _archiveService.GetComicInfo(archive);
var expected = new ComicInfo()
{
Publisher = "Yen Press",
Genre = "Manga, Movies & TV",
Summary =
"By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?",
PageCount = 194,
LanguageISO = "en",
Notes = "Scraped metadata from Comixology [CMXDB450184]",
Series = "BTOOOM!",
Title = "v01",
Web = "https://www.comixology.com/BTOOOM/digital-comic/450184"
};
Assert.NotStrictEqual(expected, actual);
}
#endregion
#region FindCoverImageFilename
[Theory]
[InlineData(new string[] {}, "", null)]
[InlineData(new [] {"001.jpg", "002.jpg"}, "Test.zip", "001.jpg")]
[InlineData(new [] {"001.jpg", "!002.jpg"}, "Test.zip", "!002.jpg")]
[InlineData(new [] {"001.jpg", "!001.jpg"}, "Test.zip", "!001.jpg")]
[InlineData(new [] {"001.jpg", "cover.jpg"}, "Test.zip", "cover.jpg")]
[InlineData(new [] {"001.jpg", "Chapter 20/cover.jpg", "Chapter 21/0001.jpg"}, "Test.zip", "Chapter 20/cover.jpg")]
[InlineData(new [] {"._/001.jpg", "._/cover.jpg", "010.jpg"}, "Test.zip", "010.jpg")]
[InlineData(new [] {"001.txt", "002.txt", "a.jpg"}, "Test.zip", "a.jpg")]
public void FindCoverImageFilename(string[] filenames, string archiveName, string expected)
{
Assert.Equal(expected, ArchiveService.FindCoverImageFilename(archiveName, filenames));
}
#endregion
#region CreateZipForDownload
//[Fact]
public void CreateZipForDownloadTest()
{
var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
//_archiveService.CreateZipForDownload(new []{}, outputDirectory)
}
#endregion
}

View file

@ -135,17 +135,9 @@ public class BackupServiceTests
filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var inMemorySettings = new Dictionary<string, string> {
{"Logging:File:Path", "config/logs/kavita.log"},
{"Logging:File:MaxRollingFiles", "0"},
};
IConfiguration configuration = new ConfigurationBuilder()
.AddInMemoryCollection(inMemorySettings)
.Build();
var backupService = new BackupService(_logger, _unitOfWork, ds, _messageHub);
var backupService = new BackupService(_logger, _unitOfWork, ds, configuration, _messageHub);
var backupLogFiles = backupService.GetLogFiles(0, LogDirectory).ToList();
var backupLogFiles = backupService.GetLogFiles(false).ToList();
Assert.Single(backupLogFiles);
Assert.Equal(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita.log"), API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(backupLogFiles.First()));
}
@ -155,20 +147,12 @@ public class BackupServiceTests
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{LogDirectory}kavita.log", new MockFileData(""));
filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData(""));
filesystem.AddFile($"{LogDirectory}kavita20200213.log", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var inMemorySettings = new Dictionary<string, string> {
{"Logging:File:Path", "config/logs/kavita.log"},
{"Logging:File:MaxRollingFiles", "1"},
};
IConfiguration configuration = new ConfigurationBuilder()
.AddInMemoryCollection(inMemorySettings)
.Build();
var backupService = new BackupService(_logger, _unitOfWork, ds, _messageHub);
var backupService = new BackupService(_logger, _unitOfWork, ds, configuration, _messageHub);
var backupLogFiles = backupService.GetLogFiles(1, LogDirectory).Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList();
var backupLogFiles = backupService.GetLogFiles().Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList();
Assert.NotEmpty(backupLogFiles.Where(file => file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita.log")) || file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita1.log"))));
}

View file

@ -5,54 +5,53 @@ using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
namespace API.Tests.Services;
public class BookServiceTests
{
public class BookServiceTests
private readonly IBookService _bookService;
private readonly ILogger<BookService> _logger = Substitute.For<ILogger<BookService>>();
public BookServiceTests()
{
private readonly IBookService _bookService;
private readonly ILogger<BookService> _logger = Substitute.For<ILogger<BookService>>();
public BookServiceTests()
{
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
_bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService));
}
[Theory]
[InlineData("The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub", 16)]
[InlineData("Non-existent file.epub", 0)]
[InlineData("Non an ebub.pdf", 0)]
[InlineData("test_ſ.pdf", 1)] // This is dependent on Docnet bug https://github.com/GowenGit/docnet/issues/80
[InlineData("test.pdf", 1)]
public void GetNumberOfPagesTest(string filePath, int expectedPages)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
Assert.Equal(expectedPages, _bookService.GetNumberOfPages(Path.Join(testDirectory, filePath)));
}
[Fact]
public void ShouldHaveComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub");
const string summaryInfo = "Book Description";
var comicInfo = _bookService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal(summaryInfo, comicInfo.Summary);
Assert.Equal("genre1, genre2", comicInfo.Genre);
}
[Fact]
public void ShouldHaveComicInfo_WithAuthors()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub");
var comicInfo = _bookService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal("Roger Starbuck,Junya Inoue", comicInfo.Writer);
}
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
_bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService));
}
[Theory]
[InlineData("The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub", 16)]
[InlineData("Non-existent file.epub", 0)]
[InlineData("Non an ebub.pdf", 0)]
[InlineData("test_ſ.pdf", 1)] // This is dependent on Docnet bug https://github.com/GowenGit/docnet/issues/80
[InlineData("test.pdf", 1)]
public void GetNumberOfPagesTest(string filePath, int expectedPages)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
Assert.Equal(expectedPages, _bookService.GetNumberOfPages(Path.Join(testDirectory, filePath)));
}
[Fact]
public void ShouldHaveComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub");
const string summaryInfo = "Book Description";
var comicInfo = _bookService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal(summaryInfo, comicInfo.Summary);
Assert.Equal("genre1, genre2", comicInfo.Genre);
}
[Fact]
public void ShouldHaveComicInfo_WithAuthors()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub");
var comicInfo = _bookService.GetComicInfo(archive);
Assert.NotNull(comicInfo);
Assert.Equal("Roger Starbuck,Junya Inoue", comicInfo.Writer);
}
}

View file

@ -20,501 +20,500 @@ using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
namespace API.Tests.Services;
internal class MockReadingItemServiceForCacheService : IReadingItemService
{
internal class MockReadingItemServiceForCacheService : IReadingItemService
private readonly DirectoryService _directoryService;
public MockReadingItemServiceForCacheService(DirectoryService directoryService)
{
private readonly DirectoryService _directoryService;
public MockReadingItemServiceForCacheService(DirectoryService directoryService)
{
_directoryService = directoryService;
}
public ComicInfo GetComicInfo(string filePath)
{
return null;
}
public int GetNumberOfPages(string filePath, MangaFormat format)
{
return 1;
}
public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format)
{
return string.Empty;
}
public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1)
{
throw new System.NotImplementedException();
}
public ParserInfo Parse(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
_directoryService = directoryService;
}
public class CacheServiceTests
public ComicInfo GetComicInfo(string filePath)
{
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = "C:/data/"
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
#region Ensure
[Fact]
public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
filesystem.AddDirectory($"{CacheDirectory}1/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
await ResetDB();
var s = DbFactory.Series("Test");
var v = DbFactory.Volume("1");
var c = new Chapter()
{
Number = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Format = MangaFormat.Archive,
FilePath = $"{DataDirectory}Test v1.zip",
}
}
};
v.Chapters.Add(c);
s.Volumes.Add(v);
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
await cleanupService.Ensure(1);
Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
}
// [Fact]
// public async Task Ensure_DirectoryAlreadyExists_ExtractsImages()
// {
// // TODO: Figure out a way to test this
// var filesystem = CreateFileSystem();
// filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
// filesystem.AddDirectory($"{CacheDirectory}1/");
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var archiveService = Substitute.For<IArchiveService>();
// archiveService.ExtractArchive($"{DataDirectory}Test v1.zip",
// filesystem.Path.Join(CacheDirectory, "1"));
// var cleanupService = new CacheService(_logger, _unitOfWork, ds,
// new ReadingItemService(archiveService, Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
//
// await ResetDB();
// var s = DbFactory.Series("Test");
// var v = DbFactory.Volume("1");
// var c = new Chapter()
// {
// Number = "1",
// Files = new List<MangaFile>()
// {
// new MangaFile()
// {
// Format = MangaFormat.Archive,
// FilePath = $"{DataDirectory}Test v1.zip",
// }
// }
// };
// v.Chapters.Add(c);
// s.Volumes.Add(v);
// s.LibraryId = 1;
// _context.Series.Add(s);
//
// await _context.SaveChangesAsync();
//
// await cleanupService.Ensure(1);
// Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
// }
#endregion
#region CleanupChapters
[Fact]
public void CleanupChapters_AllFilesShouldBeDeleted()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
cleanupService.CleanupChapters(new []{1, 3});
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
}
#endregion
#region GetCachedEpubFile
[Fact]
public void GetCachedEpubFile_ShouldReturnFirstEpub()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
var c = new Chapter()
{
Files = new List<MangaFile>()
{
new MangaFile()
{
FilePath = $"{DataDirectory}1.epub"
},
new MangaFile()
{
FilePath = $"{DataDirectory}2.epub"
}
}
};
cs.GetCachedFile(c);
Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
}
#endregion
#region GetCachedPagePath
[Fact]
public void GetCachedPagePath_ReturnNullIfNoFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages - 1; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
var path = cs.GetCachedPagePath(c, 11);
Assert.Equal(string.Empty, path);
}
[Fact]
public void GetCachedPagePath_GetFileFromFirstFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0)));
}
[Fact]
public void GetCachedPagePath_GetLastPageFromSingleFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
}
}
};
c.Pages = c.Files.Sum(f => f.Pages);
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the 10th file
var path = cs.GetCachedPagePath(c, c.Pages);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
[Fact]
public void GetCachedPagePath_GetFileFromSecondFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the page + 1 file
var path = cs.GetCachedPagePath(c, 10);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
#endregion
#region ExtractChapterFiles
// [Fact]
// public void ExtractChapterFiles_ShouldExtractOnlyImages()
// {
// const string testDirectory = "/manga/";
// var fileSystem = new MockFileSystem();
// for (var i = 0; i < 10; i++)
// {
// fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
// }
//
// fileSystem.AddDirectory(CacheDirectory);
//
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
// var cs = new CacheService(_logger, _unitOfWork, ds,
// new MockReadingItemServiceForCacheService(ds));
//
//
// cs.ExtractChapterFiles(CacheDirectory, new List<MangaFile>()
// {
// new MangaFile()
// {
// ChapterId = 1,
// Format = MangaFormat.Archive,
// Pages = 2,
// FilePath =
// }
// })
// }
#endregion
return null;
}
public int GetNumberOfPages(string filePath, MangaFormat format)
{
return 1;
}
public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format)
{
return string.Empty;
}
public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1)
{
throw new System.NotImplementedException();
}
public ParserInfo Parse(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
}
public class CacheServiceTests
{
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = "C:/data/"
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
#region Ensure
[Fact]
public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
filesystem.AddDirectory($"{CacheDirectory}1/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
await ResetDB();
var s = DbFactory.Series("Test");
var v = DbFactory.Volume("1");
var c = new Chapter()
{
Number = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Format = MangaFormat.Archive,
FilePath = $"{DataDirectory}Test v1.zip",
}
}
};
v.Chapters.Add(c);
s.Volumes.Add(v);
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
await cleanupService.Ensure(1);
Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
}
// [Fact]
// public async Task Ensure_DirectoryAlreadyExists_ExtractsImages()
// {
// // TODO: Figure out a way to test this
// var filesystem = CreateFileSystem();
// filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
// filesystem.AddDirectory($"{CacheDirectory}1/");
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var archiveService = Substitute.For<IArchiveService>();
// archiveService.ExtractArchive($"{DataDirectory}Test v1.zip",
// filesystem.Path.Join(CacheDirectory, "1"));
// var cleanupService = new CacheService(_logger, _unitOfWork, ds,
// new ReadingItemService(archiveService, Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
//
// await ResetDB();
// var s = DbFactory.Series("Test");
// var v = DbFactory.Volume("1");
// var c = new Chapter()
// {
// Number = "1",
// Files = new List<MangaFile>()
// {
// new MangaFile()
// {
// Format = MangaFormat.Archive,
// FilePath = $"{DataDirectory}Test v1.zip",
// }
// }
// };
// v.Chapters.Add(c);
// s.Volumes.Add(v);
// s.LibraryId = 1;
// _context.Series.Add(s);
//
// await _context.SaveChangesAsync();
//
// await cleanupService.Ensure(1);
// Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
// }
#endregion
#region CleanupChapters
[Fact]
public void CleanupChapters_AllFilesShouldBeDeleted()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
cleanupService.CleanupChapters(new []{1, 3});
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
}
#endregion
#region GetCachedEpubFile
[Fact]
public void GetCachedEpubFile_ShouldReturnFirstEpub()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
var c = new Chapter()
{
Files = new List<MangaFile>()
{
new MangaFile()
{
FilePath = $"{DataDirectory}1.epub"
},
new MangaFile()
{
FilePath = $"{DataDirectory}2.epub"
}
}
};
cs.GetCachedFile(c);
Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
}
#endregion
#region GetCachedPagePath
[Fact]
public void GetCachedPagePath_ReturnNullIfNoFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages - 1; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
var path = cs.GetCachedPagePath(c, 11);
Assert.Equal(string.Empty, path);
}
[Fact]
public void GetCachedPagePath_GetFileFromFirstFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0)));
}
[Fact]
public void GetCachedPagePath_GetLastPageFromSingleFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
}
}
};
c.Pages = c.Files.Sum(f => f.Pages);
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the 10th file
var path = cs.GetCachedPagePath(c, c.Pages);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
[Fact]
public void GetCachedPagePath_GetFileFromSecondFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the page + 1 file
var path = cs.GetCachedPagePath(c, 10);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
#endregion
#region ExtractChapterFiles
// [Fact]
// public void ExtractChapterFiles_ShouldExtractOnlyImages()
// {
// const string testDirectory = "/manga/";
// var fileSystem = new MockFileSystem();
// for (var i = 0; i < 10; i++)
// {
// fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
// }
//
// fileSystem.AddDirectory(CacheDirectory);
//
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
// var cs = new CacheService(_logger, _unitOfWork, ds,
// new MockReadingItemServiceForCacheService(ds));
//
//
// cs.ExtractChapterFiles(CacheDirectory, new List<MangaFile>()
// {
// new MangaFile()
// {
// ChapterId = 1,
// Format = MangaFormat.Archive,
// Pages = 2,
// FilePath =
// }
// })
// }
#endregion
}

File diff suppressed because it is too large Load diff

View file

@ -5,38 +5,37 @@ using System.IO.Abstractions.TestingHelpers;
using API.Helpers;
using API.Services;
namespace API.Tests.Services
namespace API.Tests.Services;
public class MetadataServiceTests
{
public class MetadataServiceTests
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
private const string TestCoverImageFile = "thumbnail.jpg";
private const string TestCoverArchive = @"c:\file in folder.zip";
private readonly string _testCoverImageDirectory = Path.Join(Directory.GetCurrentDirectory(), @"../../../Services/Test Data/ArchiveService/CoverImages");
//private readonly MetadataService _metadataService;
// private readonly IUnitOfWork _unitOfWork = Substitute.For<IUnitOfWork>();
// private readonly IImageService _imageService = Substitute.For<IImageService>();
// private readonly IBookService _bookService = Substitute.For<IBookService>();
// private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
// private readonly ILogger<MetadataService> _logger = Substitute.For<ILogger<MetadataService>>();
// private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly ICacheHelper _cacheHelper;
public MetadataServiceTests()
{
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
private const string TestCoverImageFile = "thumbnail.jpg";
private const string TestCoverArchive = @"c:\file in folder.zip";
private readonly string _testCoverImageDirectory = Path.Join(Directory.GetCurrentDirectory(), @"../../../Services/Test Data/ArchiveService/CoverImages");
//private readonly MetadataService _metadataService;
// private readonly IUnitOfWork _unitOfWork = Substitute.For<IUnitOfWork>();
// private readonly IImageService _imageService = Substitute.For<IImageService>();
// private readonly IBookService _bookService = Substitute.For<IBookService>();
// private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
// private readonly ILogger<MetadataService> _logger = Substitute.For<ILogger<MetadataService>>();
// private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly ICacheHelper _cacheHelper;
public MetadataServiceTests()
//_metadataService = new MetadataService(_unitOfWork, _logger, _archiveService, _bookService, _imageService, _messageHub);
var file = new MockFileData("")
{
//_metadataService = new MetadataService(_unitOfWork, _logger, _archiveService, _bookService, _imageService, _messageHub);
var file = new MockFileData("")
{
LastWriteTime = DateTimeOffset.Now.Subtract(TimeSpan.FromMinutes(1))
};
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
{ TestCoverArchive, file }
});
LastWriteTime = DateTimeOffset.Now.Subtract(TimeSpan.FromMinutes(1))
};
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
{ TestCoverArchive, file }
});
var fileService = new FileService(fileSystem);
_cacheHelper = new CacheHelper(fileService);
}
var fileService = new FileService(fileSystem);
_cacheHelper = new CacheHelper(fileService);
}
}

View file

@ -9,124 +9,123 @@ using API.Services.Tasks.Scanner;
using API.Tests.Helpers;
using Xunit;
namespace API.Tests.Services
namespace API.Tests.Services;
public class ScannerServiceTests
{
public class ScannerServiceTests
[Fact]
public void FindSeriesNotOnDisk_Should_Remove1()
{
[Fact]
public void FindSeriesNotOnDisk_Should_Remove1()
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
//AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
var existingSeries = new List<Series>
{
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
//AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
var existingSeries = new List<Series>
new Series()
{
new Series()
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
Volumes = new List<Volume>()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
Volumes = new List<Volume>()
new Volume()
{
new Volume()
{
Number = 1,
Name = "1"
}
},
NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Epub
}
};
Assert.Equal(1, ScannerService.FindSeriesNotOnDisk(existingSeries, infos).Count());
}
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive});
var existingSeries = new List<Series>
{
new Series()
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Cage of Eden"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
Number = 1,
Name = "1"
}
},
new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
}
};
Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
// TODO: Figure out how to do this with ParseScannedFiles
// [Theory]
// [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
// [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
// public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
// {
// var collectedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// foreach (var seriesName in existingSeriesNames)
// {
// AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive});
// }
//
// var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo()
// {
// Series = parsedInfoName,
// Format = MangaFormat.Archive
// });
//
// Assert.Equal(expected, actualName);
// }
// [Fact]
// public void RemoveMissingSeries_Should_RemoveSeries()
// {
// var existingSeries = new List<Series>()
// {
// EntityFactory.CreateSeries("Darker than Black Vol 1"),
// EntityFactory.CreateSeries("Darker than Black"),
// EntityFactory.CreateSeries("Beastars"),
// };
// var missingSeries = new List<Series>()
// {
// EntityFactory.CreateSeries("Darker than Black Vol 1"),
// };
// existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList();
//
// Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name));
// Assert.Equal(missingSeries.Count, removeCount);
// }
// TODO: I want a test for UpdateSeries where if I have chapter 10 and now it's mapping into Vol 2 Chapter 10,
// if I can do it without deleting the underlying chapter (aka id change)
NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Epub
}
};
Assert.Equal(1, ScannerService.FindSeriesNotOnDisk(existingSeries, infos).Count());
}
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive});
var existingSeries = new List<Series>
{
new Series()
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Cage of Eden"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
},
new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
}
};
Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
// TODO: Figure out how to do this with ParseScannedFiles
// [Theory]
// [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
// [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
// public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
// {
// var collectedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// foreach (var seriesName in existingSeriesNames)
// {
// AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive});
// }
//
// var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo()
// {
// Series = parsedInfoName,
// Format = MangaFormat.Archive
// });
//
// Assert.Equal(expected, actualName);
// }
// [Fact]
// public void RemoveMissingSeries_Should_RemoveSeries()
// {
// var existingSeries = new List<Series>()
// {
// EntityFactory.CreateSeries("Darker than Black Vol 1"),
// EntityFactory.CreateSeries("Darker than Black"),
// EntityFactory.CreateSeries("Beastars"),
// };
// var missingSeries = new List<Series>()
// {
// EntityFactory.CreateSeries("Darker than Black Vol 1"),
// };
// existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList();
//
// Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name));
// Assert.Equal(missingSeries.Count, removeCount);
// }
// TODO: I want a test for UpdateSeries where if I have chapter 10 and now it's mapping into Vol 2 Chapter 10,
// if I can do it without deleting the underlying chapter (aka id change)
}