EPUB Support (#178)
* Added book filetype detection and reorganized tests due to size of file * Added ability to get basic Parse Info from Book and Pages. * We can now scan books and get them in a library with cover images. * Take the first image in the epub if the cover isn't set. * Implemented the ability to unzip the ebup to cache. Implemented a test api to load html files. * Just some test code to figure out how to approach this. * Fixed some merge conflicts * Removed some dead code from merge * Snapshot: I can now load everything properly into the UI by rewriting the urls before I send them back. I don't notice any lag from this method. It can be optimized further. * Implemented a way to load the content in the browser not via an iframe. * Added a note * Anchor mappings is complete. New anchors are updated so references now resolve to javascript:void() for UI to take care of internally loading and the appropriate page is mapped to it. Anchors that are external have target="_blank" added so they don't force you out of the app and styles are of course inlined. * Oops i need this * Table of contents api implemented (rough) and some small enhancements to codebase for books. * GetBookPageResources now only loads files from within the book. Nested chapter list support and images now use html parsing instead of string parsing. * Fonts now are remapped to load from endpoint. * book-resources now uses a key, ensuring the file is in proper format for lookup. Changed chapter list based on structure with one HEADER and nested chapters. * Properly handle svg resource requests and when there are part anchors that are clickable, make sure we handle them in the UI by adding a kavita-page handler. * Add Chapter group page even if one isn't set by using first page (without part) from nestedChildren. * Added extra debug code for issue #163. * Added new user preferences for books and updated the css so we scope it to our reading section. * Cleaned up style code * Implemented ability to save book preferences and some cleanup on existing apis. * Added an api for checking if a user has read something in a library type before. * Forgot to make sure the has reading progress is against a user lol. * Remove cacheservice code for books, sine we use an in-memory method * Handle svg images as well * Enhanced cover image extraction to check for a "cover" image if the cover image wasn't set in OPF before falling back to the first image. * Fixed an issue with special books not properly generating metadata due to not having filename set. * Cleanup, removed warmup task code from statup/program and changed taskscheduler to schedule tasks on startup only (or if tasks are changed from UI). * Code cleanup * Code cleanup * So much code. Lots of refactors to try to test scanner service. Moved a lot of the queries into Extensions to allow to easier test, even though it's hacky. Support @font-face src:url swaps with ' and ". Source summary information from epubs. * Well...baseURL needs to come from BE and not from UI lol. * Adjusted migrations so default values match Entity * Removed comment * I think I finally fixed #163! The issue was that when i checked if it had a parserInfo, i wasn't considering that the chapter range might have a - in it (0-6) and so when the code to check if range could parse out a number failed, it treated it like a special and checked range against info's filename. * Some bugfixes * Lots of testing, extracting code to make it easier to test. This code is buggy, but fixed a bug where 1) If we changed the normalization code, we would remove the whole db during a scan and 2) We weren't actually removing series properly. Other than that, code is being extracted to remove duplication and centralize logic. * More code cleanup and test cleanup to ensure scan loop is working as expected and matches expectaions from tests. * Cleaned up the code and made it so if I change normalization, which I do in this branch, it wont break existing DBs. * Some comic parser changes for partial chapter support. * Added some code for directory service and scanner service along with python code to generate test files (not used yet). Fixed up all the tests. * Code smells
This commit is contained in:
parent
2b99c8abfa
commit
a01613f80f
103 changed files with 5017 additions and 2480 deletions
|
|
@ -1,69 +1,102 @@
|
|||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Extensions;
|
||||
using API.Interfaces;
|
||||
using API.Interfaces.Services;
|
||||
using API.Parser;
|
||||
using API.Services;
|
||||
using API.Services.Tasks;
|
||||
using API.Tests.Helpers;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using NSubstitute.Extensions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Services
|
||||
{
|
||||
public class ScannerServiceTests
|
||||
public class ScannerServiceTests : IDisposable
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly ScannerService _scannerService;
|
||||
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
|
||||
private readonly IUnitOfWork _unitOfWork = Substitute.For<IUnitOfWork>();
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
|
||||
private readonly IBookService _bookService = Substitute.For<IBookService>();
|
||||
private readonly IMetadataService _metadataService;
|
||||
private readonly ILogger<MetadataService> _metadataLogger = Substitute.For<ILogger<MetadataService>>();
|
||||
private Library _libraryMock;
|
||||
|
||||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
|
||||
public ScannerServiceTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
_scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
|
||||
_metadataService= Substitute.For<MetadataService>(_unitOfWork, _metadataLogger, _archiveService);
|
||||
// _libraryMock = new Library()
|
||||
// {
|
||||
// Id = 1,
|
||||
// Name = "Manga",
|
||||
// Folders = new List<FolderPath>()
|
||||
// {
|
||||
// new FolderPath()
|
||||
// {
|
||||
// Id = 1,
|
||||
// LastScanned = DateTime.Now,
|
||||
// LibraryId = 1,
|
||||
// Path = "E:/Manga"
|
||||
// }
|
||||
// },
|
||||
// LastModified = DateTime.Now,
|
||||
// Series = new List<Series>()
|
||||
// {
|
||||
// new Series()
|
||||
// {
|
||||
// Id = 0,
|
||||
// Name = "Darker Than Black"
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
var contextOptions = new DbContextOptionsBuilder()
|
||||
.UseSqlite(CreateInMemoryDatabase())
|
||||
.Options;
|
||||
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
||||
|
||||
_context = new DataContext(contextOptions);
|
||||
Task.Run(SeedDb).GetAwaiter().GetResult();
|
||||
|
||||
|
||||
//BackgroundJob.Enqueue is what I need to mock or something (it's static...)
|
||||
// ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
|
||||
// IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
|
||||
// IBackgroundJobClient jobClient
|
||||
//var taskScheduler = new TaskScheduler(Substitute.For<ICacheService>(), Substitute.For<ILogger<TaskScheduler>>(), Substitute.For<)
|
||||
|
||||
|
||||
// Substitute.For<UserManager<AppUser>>() - Not needed because only for UserService
|
||||
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null,
|
||||
Substitute.For<ILogger<UnitOfWork>>());
|
||||
|
||||
|
||||
_testOutputHelper = testOutputHelper;
|
||||
_metadataService= Substitute.For<MetadataService>(_unitOfWork, _metadataLogger, _archiveService, _bookService);
|
||||
_scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService, _bookService);
|
||||
}
|
||||
|
||||
private async Task<bool> SeedDb()
|
||||
{
|
||||
await _context.Database.MigrateAsync();
|
||||
await Seed.SeedSettings(_context);
|
||||
|
||||
_context.Library.Add(new Library()
|
||||
{
|
||||
Name = "Manga",
|
||||
Folders = new List<FolderPath>()
|
||||
{
|
||||
new FolderPath()
|
||||
{
|
||||
Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga")
|
||||
}
|
||||
}
|
||||
});
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
||||
// [Fact]
|
||||
// public void Test()
|
||||
// {
|
||||
// _scannerService.ScanLibrary(1, false);
|
||||
//
|
||||
// var series = _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Result.Series;
|
||||
// }
|
||||
|
||||
[Fact]
|
||||
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
|
||||
{
|
||||
var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
|
||||
var infos = new Dictionary<string, List<ParserInfo>>();
|
||||
|
||||
AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"});
|
||||
|
|
@ -76,38 +109,36 @@ namespace API.Tests.Services
|
|||
Name = "Cage of Eden",
|
||||
LocalizedName = "Cage of Eden",
|
||||
OriginalName = "Cage of Eden",
|
||||
NormalizedName = Parser.Parser.Normalize("Cage of Eden")
|
||||
NormalizedName = API.Parser.Parser.Normalize("Cage of Eden")
|
||||
});
|
||||
existingSeries.Add(new Series()
|
||||
{
|
||||
Name = "Darker Than Black",
|
||||
LocalizedName = "Darker Than Black",
|
||||
OriginalName = "Darker Than Black",
|
||||
NormalizedName = Parser.Parser.Normalize("Darker Than Black")
|
||||
NormalizedName = API.Parser.Parser.Normalize("Darker Than Black")
|
||||
});
|
||||
var expectedSeries = new List<Series>();
|
||||
|
||||
|
||||
|
||||
Assert.Empty(scannerService.FindSeriesNotOnDisk(existingSeries, infos));
|
||||
Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
|
||||
[InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
|
||||
[InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker Than Black!")]
|
||||
[InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
|
||||
[InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
|
||||
public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
|
||||
{
|
||||
var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
|
||||
|
||||
var collectedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
|
||||
foreach (var seriesName in existingSeriesNames)
|
||||
{
|
||||
AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName});
|
||||
}
|
||||
|
||||
var actualName = scannerService.MergeName(collectedSeries, new ParserInfo()
|
||||
var actualName = _scannerService.MergeName(collectedSeries, new ParserInfo()
|
||||
{
|
||||
Series = parsedInfoName
|
||||
});
|
||||
|
|
@ -115,6 +146,25 @@ namespace API.Tests.Services
|
|||
Assert.Equal(expected, actualName);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemoveMissingSeries_Should_RemoveSeries()
|
||||
{
|
||||
var existingSeries = new List<Series>()
|
||||
{
|
||||
EntityFactory.CreateSeries("Darker than Black Vol 1"),
|
||||
EntityFactory.CreateSeries("Darker than Black"),
|
||||
EntityFactory.CreateSeries("Beastars"),
|
||||
};
|
||||
var missingSeries = new List<Series>()
|
||||
{
|
||||
EntityFactory.CreateSeries("Darker than Black Vol 1"),
|
||||
};
|
||||
existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList();
|
||||
|
||||
Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name));
|
||||
Assert.Equal(missingSeries.Count, removeCount);
|
||||
}
|
||||
|
||||
private void AddToParsedInfo(IDictionary<string, List<ParserInfo>> collectedSeries, ParserInfo info)
|
||||
{
|
||||
if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>))
|
||||
|
|
@ -209,5 +259,16 @@ namespace API.Tests.Services
|
|||
// _testOutputHelper.WriteLine(_libraryMock.ToString());
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
private static DbConnection CreateInMemoryDatabase()
|
||||
{
|
||||
var connection = new SqliteConnection("Filename=:memory:");
|
||||
|
||||
connection.Open();
|
||||
|
||||
return connection;
|
||||
}
|
||||
|
||||
public void Dispose() => _connection.Dispose();
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue