.NET 6 Coding Patterns + Unit Tests (#823)

* Refactored all files to have Interfaces within the same file. Started moving over to file-scoped namespaces.

* Refactored common methods for getting underlying file's cover, pages, and extracting into 1 interface.

* More refactoring around removing dependence on explicit filetype testing for getting information.

* Code is buildable, tests are broken. Huge refactor (not completed) which makes most of DirectoryService testable with a mock filesystem (and thus the services that utilize it).

* Finished porting DirectoryService to use mocked filesystem implementation.

* Added a null check

* Added a null check

* Finished all unit tests for DirectoryService.

* Some misc cleanup on the code

* Fixed up some bugs from refactoring scan loop.

* Implemented CleanupService testing and refactored more of DirectoryService to be non-static.

Fixed a bug where cover file cleanup wasn't properly finding files due to a regex bug.

* Fixed an issue in CleanupBackup() where we weren't properly selecting database files older than 30 days. Finished CleanupService Tests.

* Refactored Flatten and RemoveNonImages to directory service to allow CacheService to be testable.

* Finally have CacheService tested. Rewrote GetCachedPagePath() to be much more straightforward & performant.

* Updated DefaultParserTests.cs to contain all existing tests and follow new test layout format.

* All tests fixed up
This commit is contained in:
Joseph Milazzo 2021-12-05 10:58:53 -06:00 committed by GitHub
parent bf1876ff44
commit bbe8f800f6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
115 changed files with 6734 additions and 5370 deletions

View file

@ -4,204 +4,166 @@ using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities.Enums;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.SignalR;
using Hangfire;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
namespace API.Services.Tasks;
public interface IBackupService
{
public class BackupService : IBackupService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<BackupService> _logger;
private readonly IDirectoryService _directoryService;
private readonly IHubContext<MessageHub> _messageHub;
private readonly IList<string> _backupFiles;
public BackupService(IUnitOfWork unitOfWork, ILogger<BackupService> logger,
IDirectoryService directoryService, IConfiguration config, IHubContext<MessageHub> messageHub)
{
_unitOfWork = unitOfWork;
_logger = logger;
_directoryService = directoryService;
_messageHub = messageHub;
var maxRollingFiles = config.GetMaxRollingFiles();
var loggingSection = config.GetLoggingFileName();
var files = LogFiles(maxRollingFiles, loggingSection);
_backupFiles = new List<string>()
{
"appsettings.json",
"Hangfire.db", // This is not used atm
"Hangfire-log.db", // This is not used atm
"kavita.db",
"kavita.db-shm", // This wont always be there
"kavita.db-wal" // This wont always be there
};
foreach (var file in files.Select(f => (new FileInfo(f)).Name).ToList())
{
_backupFiles.Add(file);
}
}
public IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName)
{
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(logFileName);
var files = maxRollingFiles > 0
? DirectoryService.GetFiles(DirectoryService.LogDirectory, $@"{Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log")
: new[] {"kavita.log"};
return files;
}
/// <summary>
/// Will backup anything that needs to be backed up. This includes logs, setting files, bare minimum cover images (just locked and first cover).
/// </summary>
[AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)]
public async Task BackupDatabase()
{
_logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now);
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
_logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory);
if (!DirectoryService.ExistOrCreate(backupDirectory))
{
_logger.LogCritical("Could not write to {BackupDirectory}; aborting backup", backupDirectory);
return;
}
await SendProgress(0F);
var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_");
var zipPath = Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip");
if (File.Exists(zipPath))
{
_logger.LogInformation("{ZipFile} already exists, aborting", zipPath);
return;
}
var tempDirectory = Path.Join(DirectoryService.TempDirectory, dateString);
DirectoryService.ExistOrCreate(tempDirectory);
DirectoryService.ClearDirectory(tempDirectory);
_directoryService.CopyFilesToDirectory(
_backupFiles.Select(file => Path.Join(DirectoryService.ConfigDirectory, file)).ToList(), tempDirectory);
await SendProgress(0.25F);
await CopyCoverImagesToBackupDirectory(tempDirectory);
await SendProgress(0.75F);
try
{
ZipFile.CreateFromDirectory(tempDirectory, zipPath);
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue when archiving library backup");
}
DirectoryService.ClearAndDeleteDirectory(tempDirectory);
_logger.LogInformation("Database backup completed");
await SendProgress(1F);
}
private async Task CopyCoverImagesToBackupDirectory(string tempDirectory)
{
var outputTempDir = Path.Join(tempDirectory, "covers");
DirectoryService.ExistOrCreate(outputTempDir);
try
{
var seriesImages = await _unitOfWork.SeriesRepository.GetLockedCoverImagesAsync();
_directoryService.CopyFilesToDirectory(
seriesImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir);
var collectionTags = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync();
_directoryService.CopyFilesToDirectory(
collectionTags.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir);
var chapterImages = await _unitOfWork.ChapterRepository.GetCoverImagesForLockedChaptersAsync();
_directoryService.CopyFilesToDirectory(
chapterImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir);
}
catch (IOException)
{
// Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file.
}
if (!DirectoryService.GetFiles(outputTempDir).Any())
{
DirectoryService.ClearAndDeleteDirectory(outputTempDir);
}
}
private async Task SendProgress(float progress)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.BackupDatabaseProgress,
MessageFactory.BackupDatabaseProgressEvent(progress));
}
/// <summary>
/// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept.
/// </summary>
public void CleanupBackups()
{
const int dayThreshold = 30;
_logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now);
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
if (!_directoryService.Exists(backupDirectory)) return;
var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold));
var allBackups = DirectoryService.GetFiles(backupDirectory).ToList();
var expiredBackups = allBackups.Select(filename => new FileInfo(filename))
.Where(f => f.CreationTime > deltaTime)
.ToList();
if (expiredBackups.Count == allBackups.Count)
{
_logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold);
var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList();
for (var i = 1; i < toDelete.Count; i++)
{
try
{
toDelete[i].Delete();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting {FileName}", toDelete[i].Name);
}
}
}
else
{
foreach (var file in expiredBackups)
{
try
{
file.Delete();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting {FileName}", file.Name);
}
}
}
_logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now);
}
}
Task BackupDatabase();
/// <summary>
/// Returns a list of full paths of the logs files detailed in <see cref="IConfiguration"/>.
/// </summary>
/// <param name="maxRollingFiles"></param>
/// <param name="logFileName"></param>
/// <returns></returns>
IEnumerable<string> GetLogFiles(int maxRollingFiles, string logFileName);
}
public class BackupService : IBackupService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<BackupService> _logger;
private readonly IDirectoryService _directoryService;
private readonly IHubContext<MessageHub> _messageHub;
private readonly IList<string> _backupFiles;
public BackupService(ILogger<BackupService> logger, IUnitOfWork unitOfWork,
IDirectoryService directoryService, IConfiguration config, IHubContext<MessageHub> messageHub)
{
_unitOfWork = unitOfWork;
_logger = logger;
_directoryService = directoryService;
_messageHub = messageHub;
var maxRollingFiles = config.GetMaxRollingFiles();
var loggingSection = config.GetLoggingFileName();
var files = GetLogFiles(maxRollingFiles, loggingSection);
_backupFiles = new List<string>()
{
"appsettings.json",
"Hangfire.db", // This is not used atm
"Hangfire-log.db", // This is not used atm
"kavita.db",
"kavita.db-shm", // This wont always be there
"kavita.db-wal" // This wont always be there
};
foreach (var file in files.Select(f => (_directoryService.FileSystem.FileInfo.FromFileName(f)).Name).ToList())
{
_backupFiles.Add(file);
}
}
public IEnumerable<string> GetLogFiles(int maxRollingFiles, string logFileName)
{
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = _directoryService.FileSystem.FileInfo.FromFileName(logFileName);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(_directoryService.LogDirectory,
$@"{_directoryService.FileSystem.Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log")
: new[] {"kavita.log"};
return files;
}
/// <summary>
/// Will backup anything that needs to be backed up. This includes logs, setting files, bare minimum cover images (just locked and first cover).
/// </summary>
[AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)]
public async Task BackupDatabase()
{
_logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now);
var backupDirectory = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Value;
_logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory);
if (!_directoryService.ExistOrCreate(backupDirectory))
{
_logger.LogCritical("Could not write to {BackupDirectory}; aborting backup", backupDirectory);
return;
}
await SendProgress(0F);
var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_");
var zipPath = _directoryService.FileSystem.Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip");
if (File.Exists(zipPath))
{
_logger.LogInformation("{ZipFile} already exists, aborting", zipPath);
return;
}
var tempDirectory = Path.Join(_directoryService.TempDirectory, dateString);
_directoryService.ExistOrCreate(tempDirectory);
_directoryService.ClearDirectory(tempDirectory);
_directoryService.CopyFilesToDirectory(
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory);
await SendProgress(0.25F);
await CopyCoverImagesToBackupDirectory(tempDirectory);
await SendProgress(0.75F);
try
{
ZipFile.CreateFromDirectory(tempDirectory, zipPath);
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue when archiving library backup");
}
_directoryService.ClearAndDeleteDirectory(tempDirectory);
_logger.LogInformation("Database backup completed");
await SendProgress(1F);
}
private async Task CopyCoverImagesToBackupDirectory(string tempDirectory)
{
var outputTempDir = Path.Join(tempDirectory, "covers");
_directoryService.ExistOrCreate(outputTempDir);
try
{
var seriesImages = await _unitOfWork.SeriesRepository.GetLockedCoverImagesAsync();
_directoryService.CopyFilesToDirectory(
seriesImages.Select(s => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, s)), outputTempDir);
var collectionTags = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync();
_directoryService.CopyFilesToDirectory(
collectionTags.Select(s => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, s)), outputTempDir);
var chapterImages = await _unitOfWork.ChapterRepository.GetCoverImagesForLockedChaptersAsync();
_directoryService.CopyFilesToDirectory(
chapterImages.Select(s => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, s)), outputTempDir);
}
catch (IOException)
{
// Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file.
}
if (!_directoryService.GetFiles(outputTempDir).Any())
{
_directoryService.ClearAndDeleteDirectory(outputTempDir);
}
}
private async Task SendProgress(float progress)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.BackupDatabaseProgress,
MessageFactory.BackupDatabaseProgressEvent(progress));
}
}

View file

@ -1,7 +1,8 @@
using System.IO;
using System;
using System.Linq;
using System.Threading.Tasks;
using API.Interfaces;
using API.Interfaces.Services;
using API.Data;
using API.Entities.Enums;
using API.SignalR;
using Hangfire;
using Microsoft.AspNetCore.SignalR;
@ -9,32 +10,35 @@ using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
{
public interface ICleanupService
{
Task Cleanup();
void CleanupCacheDirectory();
Task DeleteSeriesCoverImages();
Task DeleteChapterCoverImages();
Task DeleteTagCoverImages();
Task CleanupBackups();
}
/// <summary>
/// Cleans up after operations on reoccurring basis
/// </summary>
public class CleanupService : ICleanupService
{
private readonly ICacheService _cacheService;
private readonly ILogger<CleanupService> _logger;
private readonly IBackupService _backupService;
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub;
private readonly IDirectoryService _directoryService;
public CleanupService(ICacheService cacheService, ILogger<CleanupService> logger,
IBackupService backupService, IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub)
public CleanupService(ILogger<CleanupService> logger,
IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub,
IDirectoryService directoryService)
{
_cacheService = cacheService;
_logger = logger;
_backupService = backupService;
_unitOfWork = unitOfWork;
_messageHub = messageHub;
_directoryService = directoryService;
}
public void CleanupCacheDirectory()
{
_logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup();
}
/// <summary>
/// Cleans up Temp, cache, deleted cover images, and old database backups
@ -45,12 +49,12 @@ namespace API.Services.Tasks
_logger.LogInformation("Starting Cleanup");
await SendProgress(0F);
_logger.LogInformation("Cleaning temp directory");
DirectoryService.ClearDirectory(DirectoryService.TempDirectory);
_directoryService.ClearDirectory(_directoryService.TempDirectory);
await SendProgress(0.1F);
CleanupCacheDirectory();
await SendProgress(0.25F);
_logger.LogInformation("Cleaning old database backups");
_backupService.CleanupBackups();
await CleanupBackups();
await SendProgress(0.50F);
_logger.LogInformation("Cleaning deleted cover images");
await DeleteSeriesCoverImages();
@ -68,40 +72,84 @@ namespace API.Services.Tasks
MessageFactory.CleanupProgressEvent(progress));
}
private async Task DeleteSeriesCoverImages()
/// <summary>
/// Removes all series images that are not in the database. They must follow <see cref="ImageService.SeriesCoverImageRegex"/> filename pattern.
/// </summary>
public async Task DeleteSeriesCoverImages()
{
var images = await _unitOfWork.SeriesRepository.GetAllCoverImagesAsync();
var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex);
foreach (var file in files)
{
if (images.Contains(Path.GetFileName(file))) continue;
File.Delete(file);
}
var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex);
_directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file))));
}
private async Task DeleteChapterCoverImages()
/// <summary>
/// Removes all chapter/volume images that are not in the database. They must follow <see cref="ImageService.ChapterCoverImageRegex"/> filename pattern.
/// </summary>
public async Task DeleteChapterCoverImages()
{
var images = await _unitOfWork.ChapterRepository.GetAllCoverImagesAsync();
var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex);
foreach (var file in files)
{
if (images.Contains(Path.GetFileName(file))) continue;
File.Delete(file);
}
var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex);
_directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file))));
}
private async Task DeleteTagCoverImages()
/// <summary>
/// Removes all collection tag images that are not in the database. They must follow <see cref="ImageService.CollectionTagCoverImageRegex"/> filename pattern.
/// </summary>
public async Task DeleteTagCoverImages()
{
var images = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync();
var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex);
foreach (var file in files)
{
if (images.Contains(Path.GetFileName(file))) continue;
File.Delete(file);
var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex);
_directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file))));
}
/// <summary>
/// Removes all files and directories in the cache directory
/// </summary>
public void CleanupCacheDirectory()
{
_logger.LogInformation("Performing cleanup of Cache directory");
_directoryService.ExistOrCreate(_directoryService.CacheDirectory);
try
{
_directoryService.ClearDirectory(_directoryService.CacheDirectory);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup");
}
_logger.LogInformation("Cache directory purged");
}
/// <summary>
/// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept.
/// </summary>
public async Task CleanupBackups()
{
const int dayThreshold = 30;
_logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now);
var backupDirectory =
(await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Value;
if (!_directoryService.Exists(backupDirectory)) return;
var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold));
var allBackups = _directoryService.GetFiles(backupDirectory).ToList();
var expiredBackups = allBackups.Select(filename => _directoryService.FileSystem.FileInfo.FromFileName(filename))
.Where(f => f.CreationTime < deltaTime)
.ToList();
if (expiredBackups.Count == allBackups.Count)
{
_logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold);
var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList();
_directoryService.DeleteFiles(toDelete.Take(toDelete.Count - 1).Select(f => f.FullName));
}
else
{
_directoryService.DeleteFiles(expiredBackups.Select(f => f.FullName));
}
_logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now);
}
}
}

View file

@ -7,7 +7,6 @@ using System.Linq;
using API.Data.Metadata;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces.Services;
using API.Parser;
using Microsoft.Extensions.Logging;
@ -24,25 +23,26 @@ namespace API.Services.Tasks.Scanner
public class ParseScannedFiles
{
private readonly ConcurrentDictionary<ParsedSeries, List<ParserInfo>> _scannedSeries;
private readonly IBookService _bookService;
private readonly ILogger _logger;
private readonly IArchiveService _archiveService;
private readonly IDirectoryService _directoryService;
private readonly IReadingItemService _readingItemService;
private readonly DefaultParser _defaultParser;
/// <summary>
/// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos.
/// Each instance is separate from other threads, allowing for no cross over.
/// </summary>
/// <param name="bookService"></param>
/// <param name="logger"></param>
public ParseScannedFiles(IBookService bookService, ILogger logger, IArchiveService archiveService,
IDirectoryService directoryService)
/// <param name="logger">Logger of the parent class that invokes this</param>
/// <param name="directoryService">Directory Service</param>
/// <param name="readingItemService">ReadingItemService Service for extracting information on a number of formats</param>
public ParseScannedFiles(ILogger logger, IDirectoryService directoryService,
IReadingItemService readingItemService)
{
_bookService = bookService;
_logger = logger;
_archiveService = archiveService;
_directoryService = directoryService;
_readingItemService = readingItemService;
_scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
_defaultParser = new DefaultParser(_directoryService);
}
/// <summary>
@ -63,12 +63,12 @@ namespace API.Services.Tasks.Scanner
{
if (Parser.Parser.IsEpub(path))
{
return _bookService.GetComicInfo(path);
return _readingItemService.GetComicInfo(path, MangaFormat.Epub);
}
if (Parser.Parser.IsComicInfoExtension(path))
{
return _archiveService.GetComicInfo(path);
return _readingItemService.GetComicInfo(path, MangaFormat.Archive);
}
return null;
}
@ -82,15 +82,15 @@ namespace API.Services.Tasks.Scanner
/// <param name="type">Library type to determine parsing to perform</param>
private void ProcessFile(string path, string rootPath, LibraryType type)
{
ParserInfo info;
ParserInfo info = null;
if (Parser.Parser.IsEpub(path))
{
info = _bookService.ParseInfo(path);
info = _readingItemService.Parse(path, rootPath, type);
}
else
{
info = Parser.Parser.Parse(path, rootPath, type);
info = _readingItemService.Parse(path, rootPath, type);
}
// If we couldn't match, log. But don't log if the file parses as a cover image
@ -105,8 +105,8 @@ namespace API.Services.Tasks.Scanner
if (Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume)
{
info = Parser.Parser.Parse(path, rootPath, type);
var info2 = _bookService.ParseInfo(path);
info = _defaultParser.Parse(path, rootPath, LibraryType.Book); // TODO: Why do I reparse?
var info2 = _readingItemService.Parse(path, rootPath, type);
info.Merge(info2);
}

View file

@ -12,8 +12,6 @@ using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services.Tasks.Scanner;
using API.SignalR;
@ -22,33 +20,42 @@ using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks;
public interface IScannerService
{
/// <summary>
/// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite
/// cover images if forceUpdate is true.
/// </summary>
/// <param name="libraryId">Library to scan against</param>
Task ScanLibrary(int libraryId);
Task ScanLibraries();
Task ScanSeries(int libraryId, int seriesId, CancellationToken token);
}
public class ScannerService : IScannerService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<ScannerService> _logger;
private readonly IArchiveService _archiveService;
private readonly IMetadataService _metadataService;
private readonly IBookService _bookService;
private readonly ICacheService _cacheService;
private readonly IHubContext<MessageHub> _messageHub;
private readonly IFileService _fileService;
private readonly IDirectoryService _directoryService;
private readonly IReadingItemService _readingItemService;
private readonly NaturalSortComparer _naturalSort = new ();
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
IMetadataService metadataService, IBookService bookService, ICacheService cacheService, IHubContext<MessageHub> messageHub,
IFileService fileService, IDirectoryService directoryService)
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger,
IMetadataService metadataService, ICacheService cacheService, IHubContext<MessageHub> messageHub,
IFileService fileService, IDirectoryService directoryService, IReadingItemService readingItemService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_metadataService = metadataService;
_bookService = bookService;
_cacheService = cacheService;
_messageHub = messageHub;
_fileService = fileService;
_directoryService = directoryService;
_readingItemService = readingItemService;
}
[DisableConcurrentExecution(timeoutInSeconds: 360)]
@ -63,16 +70,16 @@ public class ScannerService : IScannerService
var folderPaths = library.Folders.Select(f => f.Path).ToList();
// Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are
if (folderPaths.Any(f => !DirectoryService.IsDriveMounted(f)))
if (folderPaths.Any(f => !_directoryService.IsDriveMounted(f)))
{
_logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted");
return;
}
var dirs = DirectoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList());
var dirs = _directoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList());
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanner = new ParseScannedFiles(_bookService, _logger, _archiveService, _directoryService);
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService);
var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime);
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
@ -120,7 +127,7 @@ public class ScannerService : IScannerService
}
_logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory", series.OriginalName);
scanner = new ParseScannedFiles(_bookService, _logger, _archiveService, _directoryService);
scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService);
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
totalFiles += totalFiles2;
scanElapsedTime += scanElapsedTime2;
@ -208,7 +215,7 @@ public class ScannerService : IScannerService
}
// Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are
if (library.Folders.Any(f => !DirectoryService.IsDriveMounted(f.Path)))
if (library.Folders.Any(f => !_directoryService.IsDriveMounted(f.Path)))
{
_logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted");
return;
@ -218,7 +225,7 @@ public class ScannerService : IScannerService
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 0));
var scanner = new ParseScannedFiles(_bookService, _logger, _archiveService, _directoryService);
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService);
var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime);
foreach (var folderPath in library.Folders)
@ -618,28 +625,7 @@ public class ScannerService : IScannerService
private MangaFile CreateMangaFile(ParserInfo info)
{
var pages = 0;
switch (info.Format)
{
case MangaFormat.Archive:
{
pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
break;
}
case MangaFormat.Pdf:
case MangaFormat.Epub:
{
pages = _bookService.GetNumberOfPages(info.FullFilePath);
break;
}
case MangaFormat.Image:
{
pages = 1;
break;
}
}
return DbFactory.MangaFile(info.FullFilePath, info.Format, pages);
return DbFactory.MangaFile(info.FullFilePath, info.Format, _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format));
}
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
@ -650,23 +636,7 @@ public class ScannerService : IScannerService
{
existingFile.Format = info.Format;
if (!_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return;
switch (existingFile.Format)
{
case MangaFormat.Epub:
case MangaFormat.Pdf:
existingFile.Pages = _bookService.GetNumberOfPages(info.FullFilePath);
break;
case MangaFormat.Image:
existingFile.Pages = 1;
break;
case MangaFormat.Unknown:
existingFile.Pages = 0;
break;
case MangaFormat.Archive:
existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
break;
}
//existingFile.LastModified = File.GetLastWriteTime(info.FullFilePath); // This is messing up our logic on when last modified
existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format);
}
else
{

View file

@ -2,108 +2,111 @@
using System.Net.Http;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Stats;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using Flurl.Http;
using Kavita.Common.EnvironmentInfo;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
namespace API.Services.Tasks;
public interface IStatsService
{
public class StatsService : IStatsService
Task Send();
Task<ServerInfoDto> GetServerInfo();
}
public class StatsService : IStatsService
{
private readonly ILogger<StatsService> _logger;
private readonly IUnitOfWork _unitOfWork;
private const string ApiUrl = "https://stats.kavitareader.com";
public StatsService(ILogger<StatsService> logger, IUnitOfWork unitOfWork)
{
private readonly ILogger<StatsService> _logger;
private readonly IUnitOfWork _unitOfWork;
private const string ApiUrl = "https://stats.kavitareader.com";
_logger = logger;
_unitOfWork = unitOfWork;
public StatsService(ILogger<StatsService> logger, IUnitOfWork unitOfWork)
FlurlHttp.ConfigureClient(ApiUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
}
/// <summary>
/// Due to all instances firing this at the same time, we can DDOS our server. This task when fired will schedule the task to be run
/// randomly over a 6 hour spread
/// </summary>
public async Task Send()
{
var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection;
if (!allowStatCollection)
{
_logger = logger;
_unitOfWork = unitOfWork;
FlurlHttp.ConfigureClient(ApiUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
return;
}
/// <summary>
/// Due to all instances firing this at the same time, we can DDOS our server. This task when fired will schedule the task to be run
/// randomly over a 6 hour spread
/// </summary>
public async Task Send()
await SendData();
}
/// <summary>
/// This must be public for Hangfire. Do not call this directly.
/// </summary>
// ReSharper disable once MemberCanBePrivate.Global
public async Task SendData()
{
var data = await GetServerInfo();
await SendDataToStatsServer(data);
}
private async Task SendDataToStatsServer(ServerInfoDto data)
{
var responseContent = string.Empty;
try
{
var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection;
if (!allowStatCollection)
var response = await (ApiUrl + "/api/v2/stats")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-api-key", "MsnvA2DfQqxSK5jh")
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(30))
.PostJsonAsync(data);
if (response.StatusCode != StatusCodes.Status200OK)
{
return;
}
await SendData();
}
/// <summary>
/// This must be public for Hangfire. Do not call this directly.
/// </summary>
// ReSharper disable once MemberCanBePrivate.Global
public async Task SendData()
{
var data = await GetServerInfo();
await SendDataToStatsServer(data);
}
private async Task SendDataToStatsServer(ServerInfoDto data)
{
var responseContent = string.Empty;
try
{
var response = await (ApiUrl + "/api/v2/stats")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-api-key", "MsnvA2DfQqxSK5jh")
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(30))
.PostJsonAsync(data);
if (response.StatusCode != StatusCodes.Status200OK)
{
_logger.LogError("KavitaStats did not respond successfully. {Content}", response);
}
}
catch (HttpRequestException e)
{
var info = new
{
dataSent = data,
response = responseContent
};
_logger.LogError(e, "KavitaStats did not respond successfully. {Content}", info);
}
catch (Exception e)
{
_logger.LogError(e, "An error happened during the request to KavitaStats");
_logger.LogError("KavitaStats did not respond successfully. {Content}", response);
}
}
public async Task<ServerInfoDto> GetServerInfo()
catch (HttpRequestException e)
{
var installId = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId);
var serverInfo = new ServerInfoDto
var info = new
{
InstallId = installId.Value,
Os = RuntimeInformation.OSDescription,
KavitaVersion = BuildInfo.Version.ToString(),
DotnetVersion = Environment.Version.ToString(),
IsDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker,
NumOfCores = Math.Max(Environment.ProcessorCount, 1)
dataSent = data,
response = responseContent
};
return serverInfo;
_logger.LogError(e, "KavitaStats did not respond successfully. {Content}", info);
}
catch (Exception e)
{
_logger.LogError(e, "An error happened during the request to KavitaStats");
}
}
public async Task<ServerInfoDto> GetServerInfo()
{
var installId = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId);
var serverInfo = new ServerInfoDto
{
InstallId = installId.Value,
Os = RuntimeInformation.OSDescription,
KavitaVersion = BuildInfo.Version.ToString(),
DotnetVersion = Environment.Version.ToString(),
IsDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker,
NumOfCores = Math.Max(Environment.ProcessorCount, 1)
};
return serverInfo;
}
}

View file

@ -4,7 +4,6 @@ using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using API.DTOs.Update;
using API.Interfaces.Services;
using API.SignalR;
using API.SignalR.Presence;
using Flurl.Http;
@ -15,149 +14,155 @@ using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
namespace API.Services.Tasks;
internal class GithubReleaseMetadata
{
internal class GithubReleaseMetadata
{
/// <summary>
/// Name of the Tag
/// <example>v0.4.3</example>
/// </summary>
// ReSharper disable once InconsistentNaming
public string Tag_Name { get; init; }
/// <summary>
/// Name of the Release
/// </summary>
public string Name { get; init; }
/// <summary>
/// Body of the Release
/// </summary>
public string Body { get; init; }
/// <summary>
/// Url of the release on Github
/// </summary>
// ReSharper disable once InconsistentNaming
public string Html_Url { get; init; }
/// <summary>
/// Date Release was Published
/// </summary>
// ReSharper disable once InconsistentNaming
public string Published_At { get; init; }
}
/// <summary>
/// Name of the Tag
/// <example>v0.4.3</example>
/// </summary>
// ReSharper disable once InconsistentNaming
public string Tag_Name { get; init; }
/// <summary>
/// Name of the Release
/// </summary>
public string Name { get; init; }
/// <summary>
/// Body of the Release
/// </summary>
public string Body { get; init; }
/// <summary>
/// Url of the release on Github
/// </summary>
// ReSharper disable once InconsistentNaming
public string Html_Url { get; init; }
/// <summary>
/// Date Release was Published
/// </summary>
// ReSharper disable once InconsistentNaming
public string Published_At { get; init; }
}
public class UntrustedCertClientFactory : DefaultHttpClientFactory
{
public override HttpMessageHandler CreateMessageHandler() {
return new HttpClientHandler {
ServerCertificateCustomValidationCallback = (_, _, _, _) => true
};
}
}
public class VersionUpdaterService : IVersionUpdaterService
{
private readonly ILogger<VersionUpdaterService> _logger;
private readonly IHubContext<MessageHub> _messageHub;
private readonly IPresenceTracker _tracker;
private readonly Markdown _markdown = new MarkdownDeep.Markdown();
#pragma warning disable S1075
private static readonly string GithubLatestReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases/latest";
private static readonly string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases";
#pragma warning restore S1075
public VersionUpdaterService(ILogger<VersionUpdaterService> logger, IHubContext<MessageHub> messageHub, IPresenceTracker tracker)
{
_logger = logger;
_messageHub = messageHub;
_tracker = tracker;
FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlHttp.ConfigureClient(GithubAllReleasesUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
}
/// <summary>
/// Fetches the latest release from Github
/// </summary>
public async Task<UpdateNotificationDto> CheckForUpdate()
{
var update = await GetGithubRelease();
return CreateDto(update);
}
public async Task<IEnumerable<UpdateNotificationDto>> GetAllReleases()
{
var updates = await GetGithubReleases();
return updates.Select(CreateDto);
}
private UpdateNotificationDto CreateDto(GithubReleaseMetadata update)
{
if (update == null || string.IsNullOrEmpty(update.Tag_Name)) return null;
var updateVersion = new Version(update.Tag_Name.Replace("v", string.Empty));
var currentVersion = BuildInfo.Version.ToString();
if (updateVersion.Revision == -1)
{
currentVersion = currentVersion.Substring(0, currentVersion.LastIndexOf(".", StringComparison.Ordinal));
}
return new UpdateNotificationDto()
{
CurrentVersion = currentVersion,
UpdateVersion = updateVersion.ToString(),
UpdateBody = _markdown.Transform(update.Body.Trim()),
UpdateTitle = update.Name,
UpdateUrl = update.Html_Url,
IsDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker,
PublishDate = update.Published_At
};
}
public async Task PushUpdate(UpdateNotificationDto update)
{
if (update == null) return;
var admins = await _tracker.GetOnlineAdmins();
var updateVersion = new Version(update.CurrentVersion);
if (BuildInfo.Version < updateVersion)
{
_logger.LogInformation("Server is out of date. Current: {CurrentVersion}. Available: {AvailableUpdate}", BuildInfo.Version, updateVersion);
await SendEvent(update, admins);
}
else if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == Environments.Development)
{
_logger.LogInformation("Server is up to date. Current: {CurrentVersion}", BuildInfo.Version);
await SendEvent(update, admins);
}
}
private async Task SendEvent(UpdateNotificationDto update, IReadOnlyList<string> admins)
{
await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateAvailable, MessageFactory.UpdateVersionEvent(update));
}
private static async Task<GithubReleaseMetadata> GetGithubRelease()
{
var update = await GithubLatestReleasesUrl
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<GithubReleaseMetadata>();
return update;
}
private static async Task<IEnumerable<GithubReleaseMetadata>> GetGithubReleases()
{
var update = await GithubAllReleasesUrl
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<IEnumerable<GithubReleaseMetadata>>();
return update;
}
public class UntrustedCertClientFactory : DefaultHttpClientFactory
{
public override HttpMessageHandler CreateMessageHandler() {
return new HttpClientHandler {
ServerCertificateCustomValidationCallback = (_, _, _, _) => true
};
}
}
public interface IVersionUpdaterService
{
Task<UpdateNotificationDto> CheckForUpdate();
Task PushUpdate(UpdateNotificationDto update);
Task<IEnumerable<UpdateNotificationDto>> GetAllReleases();
}
public class VersionUpdaterService : IVersionUpdaterService
{
private readonly ILogger<VersionUpdaterService> _logger;
private readonly IHubContext<MessageHub> _messageHub;
private readonly IPresenceTracker _tracker;
private readonly Markdown _markdown = new MarkdownDeep.Markdown();
#pragma warning disable S1075
private static readonly string GithubLatestReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases/latest";
private static readonly string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases";
#pragma warning restore S1075
public VersionUpdaterService(ILogger<VersionUpdaterService> logger, IHubContext<MessageHub> messageHub, IPresenceTracker tracker)
{
_logger = logger;
_messageHub = messageHub;
_tracker = tracker;
FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlHttp.ConfigureClient(GithubAllReleasesUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
}
/// <summary>
/// Fetches the latest release from Github
/// </summary>
public async Task<UpdateNotificationDto> CheckForUpdate()
{
var update = await GetGithubRelease();
return CreateDto(update);
}
public async Task<IEnumerable<UpdateNotificationDto>> GetAllReleases()
{
var updates = await GetGithubReleases();
return updates.Select(CreateDto);
}
private UpdateNotificationDto CreateDto(GithubReleaseMetadata update)
{
if (update == null || string.IsNullOrEmpty(update.Tag_Name)) return null;
var updateVersion = new Version(update.Tag_Name.Replace("v", string.Empty));
var currentVersion = BuildInfo.Version.ToString();
if (updateVersion.Revision == -1)
{
currentVersion = currentVersion.Substring(0, currentVersion.LastIndexOf(".", StringComparison.Ordinal));
}
return new UpdateNotificationDto()
{
CurrentVersion = currentVersion,
UpdateVersion = updateVersion.ToString(),
UpdateBody = _markdown.Transform(update.Body.Trim()),
UpdateTitle = update.Name,
UpdateUrl = update.Html_Url,
IsDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker,
PublishDate = update.Published_At
};
}
public async Task PushUpdate(UpdateNotificationDto update)
{
if (update == null) return;
var admins = await _tracker.GetOnlineAdmins();
var updateVersion = new Version(update.CurrentVersion);
if (BuildInfo.Version < updateVersion)
{
_logger.LogInformation("Server is out of date. Current: {CurrentVersion}. Available: {AvailableUpdate}", BuildInfo.Version, updateVersion);
await SendEvent(update, admins);
}
else if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == Environments.Development)
{
_logger.LogInformation("Server is up to date. Current: {CurrentVersion}", BuildInfo.Version);
await SendEvent(update, admins);
}
}
private async Task SendEvent(UpdateNotificationDto update, IReadOnlyList<string> admins)
{
await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateAvailable, MessageFactory.UpdateVersionEvent(update));
}
private static async Task<GithubReleaseMetadata> GetGithubRelease()
{
var update = await GithubLatestReleasesUrl
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<GithubReleaseMetadata>();
return update;
}
private static async Task<IEnumerable<GithubReleaseMetadata>> GetGithubReleases()
{
var update = await GithubAllReleasesUrl
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<IEnumerable<GithubReleaseMetadata>>();
return update;
}
}