Kavita/API.Tests/Services/ScannerServiceTests.cs
Joseph Milazzo 56cf7be799
Scan Chunking (#604)
* Some performance refactoring around getting Library and avoid a byte[] copy for getting cover images for epubs.

* Initial commit. Rewrote the main series scan loop to use chunks of data at a time. Not fully shaken out.

* Hooked in the ability for the UI to react to series being added or removed from the DB.

* Cleaned up the messaging in the scan loop to be more clear.

* Metadata scan and scan work as expected and populate data to the UI. There is a slow down in speed for overall operation.

Scan series and refresh series metadata does not work fully.

* Fixed a bug where MangaFiles were not having LastModified Updated correctly, meaning they were opening archives every scan.

* Modified the code to be more realistic to the underlying file

* Updated ScanService to properly handle deleted files and not result in a higher-level scan.

* Shuffled around volume related repo apis to the volume repo rather than being in series.

* Rewrote scan series to be much cleaner and more concise on the flow. Fixed an issue in UpdateVolumes such that the debug code to log out removed volumes could throw an exception and actually break updating volumes.

* Refactored the code to set MangaFile last modified timestamp into the MangaFile entity.

* Added Series Name to ScanSeries event

* Added additional checks in ScanSeries to ensure we never go outside the library folder.

Added extra debug messages for when a metadata refresh doesn't actually make changes and for when we regen cover images.

* More logging statements saying where they originate from. Fixed a critical bug which caused only 1 chunk to ever be processed.

* Fixed a concurrency issue with natural sorter which could cause issues in ArchiveService.cs.

* Log cleanups

* Fixed an issue with logging out total time of a scan.

* Only show added toastrs for admins. When kicking off a refresh metadata for series, make sure we regenerate all cover images.

* Code smells on benchmark despite it being ignored
2021-09-30 06:08:05 -07:00

215 lines
8.3 KiB
C#

using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Data.Common;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services;
using API.Services.Tasks;
using API.Services.Tasks.Scanner;
using API.SignalR;
using API.Tests.Helpers;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
{
public class ScannerServiceTests : IDisposable
{
private readonly ScannerService _scannerService;
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
private readonly IBookService _bookService = Substitute.For<IBookService>();
private readonly IImageService _imageService = Substitute.For<IImageService>();
private readonly ILogger<MetadataService> _metadataLogger = Substitute.For<ILogger<MetadataService>>();
private readonly ICacheService _cacheService = Substitute.For<ICacheService>();
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
public ScannerServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
IMetadataService metadataService = Substitute.For<MetadataService>(unitOfWork, _metadataLogger, _archiveService, _bookService, _imageService, _messageHub);
_scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService, _cacheService, _messageHub);
}
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
await Seed.SeedSettings(_context);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga")
}
}
});
return await _context.SaveChangesAsync() > 0;
}
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var infos = new Dictionary<ParsedSeries, List<ParserInfo>>();
AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive});
var existingSeries = new List<Series>
{
new Series()
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = API.Parser.Parser.Normalize("Cage of Eden"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
},
new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = API.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
}
};
Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
// TODO: Figure out how to do this with ParseScannedFiles
// [Theory]
// [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
// [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
// public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
// {
// var collectedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// foreach (var seriesName in existingSeriesNames)
// {
// AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive});
// }
//
// var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo()
// {
// Series = parsedInfoName,
// Format = MangaFormat.Archive
// });
//
// Assert.Equal(expected, actualName);
// }
[Fact]
public void RemoveMissingSeries_Should_RemoveSeries()
{
var existingSeries = new List<Series>()
{
EntityFactory.CreateSeries("Darker than Black Vol 1"),
EntityFactory.CreateSeries("Darker than Black"),
EntityFactory.CreateSeries("Beastars"),
};
var missingSeries = new List<Series>()
{
EntityFactory.CreateSeries("Darker than Black Vol 1"),
};
existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList();
Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name));
Assert.Equal(missingSeries.Count, removeCount);
}
private void AddToParsedInfo(IDictionary<ParsedSeries, List<ParserInfo>> collectedSeries, ParserInfo info)
{
var existingKey = collectedSeries.Keys.FirstOrDefault(ps =>
ps.Format == info.Format && ps.NormalizedName == API.Parser.Parser.Normalize(info.Series));
existingKey ??= new ParsedSeries()
{
Format = info.Format,
Name = info.Series,
NormalizedName = API.Parser.Parser.Normalize(info.Series)
};
if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>))
{
((ConcurrentDictionary<ParsedSeries, List<ParserInfo>>) collectedSeries).AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
{
oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info))
{
oldValue.Add(info);
}
return oldValue;
});
}
else
{
if (!collectedSeries.ContainsKey(existingKey))
{
collectedSeries.Add(existingKey, new List<ParserInfo>() {info});
}
else
{
var list = collectedSeries[existingKey];
if (!list.Contains(info))
{
list.Add(info);
}
collectedSeries[existingKey] = list;
}
}
}
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
}
}