Cache cleanup implemented

This commit is contained in:
Joseph Milazzo 2021-01-11 15:39:25 -06:00
parent f737f662df
commit 731e3a9c5e
11 changed files with 132 additions and 29 deletions

View file

@ -1,8 +1,11 @@
using System.IO;
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using Microsoft.Extensions.Logging;
namespace API.Services
{
@ -10,11 +13,14 @@ namespace API.Services
{
private readonly IDirectoryService _directoryService;
private readonly ISeriesRepository _seriesRepository;
private readonly ILogger<CacheService> _logger;
private readonly string _cacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../cache/"));
public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository)
public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository, ILogger<CacheService> logger)
{
_directoryService = directoryService;
_seriesRepository = seriesRepository;
_logger = logger;
}
public async Task<Volume> Ensure(int volumeId)
@ -31,10 +37,40 @@ namespace API.Services
}
public bool Cleanup(Volume volume)
public void Cleanup()
{
throw new System.NotImplementedException();
_logger.LogInformation("Performing cleanup of Cache directory");
DirectoryInfo di = new DirectoryInfo(_cacheDirectory);
try
{
di.Empty();
}
catch (Exception ex)
{
_logger.LogError("There was an issue deleting one or more folders/files during cleanup.", ex);
}
_logger.LogInformation("Cache directory purged.");
}
public void CleanupLibrary(int libraryId, int[] volumeIds)
{
_logger.LogInformation($"Running Cache cleanup on Library: {libraryId}");
foreach (var volume in volumeIds)
{
var di = new DirectoryInfo(Path.Join(_cacheDirectory, volume + ""));
if (di.Exists)
{
di.Delete(true);
}
}
_logger.LogInformation("Cache directory purged");
}
private string GetVolumeCachePath(int volumeId, MangaFile file)
{

View file

@ -123,23 +123,23 @@ namespace API.Services
Name = seriesName,
OriginalName = seriesName,
SortName = seriesName,
Summary = "" // TODO: Check if comicInfo.xml in file
Summary = "" // TODO: Check if comicInfo.xml in file and parse metadata out.
};
}
var volumes = UpdateVolumes(series, infos, forceUpdate);
series.Volumes = volumes;
series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage;
//GetFiles()
return series;
}
private MangaFile CreateMangaFile(ParserInfo info)
{
_logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
int chapter;
int.TryParse(info.Chapters, out chapter);
_logger.LogDebug($"Chapter? {chapter}");
int.TryParse(info.Chapters, out var chapter);
_logger.LogDebug($"Found Chapter: {chapter}");
return new MangaFile()
{
FilePath = info.FullFilePath,
@ -176,11 +176,7 @@ namespace API.Services
{
existingVolume.Files.Add(CreateMangaFile(info));
}
if (forceUpdate || existingVolume.CoverImage == null || existingVolumes.Count == 0)
{
existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true);
}
volumes.Add(existingVolume);
}
else
@ -189,7 +185,6 @@ namespace API.Services
if (existingVolume != null)
{
existingVolume.Files.Add(CreateMangaFile(info));
existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true);
}
else
{
@ -197,7 +192,6 @@ namespace API.Services
{
Name = info.Volumes,
Number = Int32.Parse(info.Volumes),
CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true),
Files = new List<MangaFile>()
{
CreateMangaFile(info)
@ -210,20 +204,41 @@ namespace API.Services
Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.Filename}");
}
foreach (var volume in volumes)
{
if (forceUpdate || volume.CoverImage == null || !volume.Files.Any())
{
var firstFile = volume.Files.OrderBy(x => x.Chapter).FirstOrDefault()?.FilePath;
volume.CoverImage = ImageProvider.GetCoverImage(firstFile, true);
}
}
return volumes;
}
public void ScanLibrary(int libraryId, bool forceUpdate)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
Library library;
try
{
library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
_logger.LogError($"There was an issue fetching Library {libraryId}.", ex);
return;
}
_scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>();
_logger.LogInformation($"Beginning scan on {library.Name}");
var totalFiles = 0;
foreach (var folderPath in library.Folders)
{
try {
TraverseTreeParallelForEach(folderPath.Path, (f) =>
totalFiles = TraverseTreeParallelForEach(folderPath.Path, (f) =>
{
try
{
@ -266,7 +281,7 @@ namespace API.Services
}
_scannedSeries = null;
Console.WriteLine("Processed {0} files in {1} milliseconds", library.Name, sw.ElapsedMilliseconds);
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
}
public string GetExtractPath(int volumeId)
@ -359,7 +374,7 @@ namespace API.Services
/// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param>
/// <exception cref="ArgumentException"></exception>
private static void TraverseTreeParallelForEach(string root, Action<string> action)
private static int TraverseTreeParallelForEach(string root, Action<string> action)
{
//Count of files traversed and timer for diagnostic output
int fileCount = 0;
@ -449,6 +464,8 @@ namespace API.Services
foreach (string str in subDirs)
dirs.Push(str);
}
return fileCount;
}
}

View file

@ -1,15 +1,21 @@
using API.Interfaces;
using Hangfire;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class TaskScheduler : ITaskScheduler
{
private readonly ILogger<TaskScheduler> _logger;
private readonly BackgroundJobServer _client;
public TaskScheduler()
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger)
{
_logger = logger;
_client = new BackgroundJobServer();
_logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis.");
RecurringJob.AddOrUpdate(() => cacheService.Cleanup(), Cron.Daily);
}