Moved some files around, fixed Log file fetching to use zip and work more reliably. Refactored some code in directory service.

This commit is contained in:
Joseph Milazzo 2021-02-24 15:58:17 -06:00
parent bbb4240e20
commit ecfb40cb2d
15 changed files with 128 additions and 47 deletions

View file

@ -6,6 +6,7 @@ using System.Linq;
using System.Xml.Serialization;
using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging;
using NetVips;

View file

@ -92,7 +92,8 @@ namespace API.Services
public void ClearDirectory(string directoryPath)
{
DirectoryInfo di = new DirectoryInfo(directoryPath);
var di = new DirectoryInfo(directoryPath);
if (!di.Exists) return;
foreach (var file in di.EnumerateFiles())
{
@ -156,7 +157,7 @@ namespace API.Services
return new ImageDto
{
Content = await File.ReadAllBytesAsync(imagePath),
Content = await ReadFileAsync(imagePath),
Filename = Path.GetFileNameWithoutExtension(imagePath),
FullPath = Path.GetFullPath(imagePath),
Width = image.Width,
@ -165,6 +166,12 @@ namespace API.Services
};
}
public async Task<byte[]> ReadFileAsync(string path)
{
if (!File.Exists(path)) return Array.Empty<byte>();
return await File.ReadAllBytesAsync(path);
}
/// <summary>
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed

View file

@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System.IO;
using System.Threading.Tasks;
using API.Entities.Enums;
using API.Helpers.Converters;
using API.Interfaces;
@ -16,6 +17,8 @@ namespace API.Services
private readonly IUnitOfWork _unitOfWork;
private readonly IMetadataService _metadataService;
private readonly IBackupService _backupService;
private readonly ICleanupService _cleanupService;
private readonly IDirectoryService _directoryService;
public BackgroundJobServer Client => new BackgroundJobServer();
// new BackgroundJobServerOptions()
@ -24,7 +27,8 @@ namespace API.Services
// }
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService)
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
IDirectoryService directoryService)
{
_cacheService = cacheService;
_logger = logger;
@ -32,6 +36,8 @@ namespace API.Services
_unitOfWork = unitOfWork;
_metadataService = metadataService;
_backupService = backupService;
_cleanupService = cleanupService;
_directoryService = directoryService;
ScheduleTasks();
@ -65,7 +71,7 @@ namespace API.Services
RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), Cron.Weekly);
}
RecurringJob.AddOrUpdate(() => _cacheService.Cleanup(), Cron.Daily);
RecurringJob.AddOrUpdate(() => _cleanupService.Cleanup(), Cron.Daily);
}
public void ScanLibrary(int libraryId, bool forceUpdate = false)
@ -85,6 +91,12 @@ namespace API.Services
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate)));
}
public void CleanupTemp()
{
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
BackgroundJob.Enqueue((() => _directoryService.ClearDirectory(tempDirectory)));
}
public void BackupDatabase()
{
BackgroundJob.Enqueue(() => _backupService.BackupDatabase());

View file

@ -11,7 +11,7 @@ using API.Interfaces.Services;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
namespace API.Services
namespace API.Services.Tasks
{
public class BackupService : IBackupService
{
@ -27,16 +27,10 @@ namespace API.Services
_unitOfWork = unitOfWork;
_logger = logger;
_directoryService = directoryService;
var maxRollingFiles = config.GetMaxRollingFiles();
var loggingSection = config.GetLoggingFileName();
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(loggingSection);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"};
var files = LogFiles(maxRollingFiles, loggingSection);
_backupFiles = new List<string>()
{
"appsettings.json",
@ -52,6 +46,17 @@ namespace API.Services
}
}
public IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName)
{
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(logFileName);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"};
return files;
}
public void BackupDatabase()
{
_logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now);

View file

@ -0,0 +1,33 @@
using System.IO;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
{
/// <summary>
/// Cleans up after operations on reoccurring basis
/// </summary>
public class CleanupService : ICleanupService
{
private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService;
private readonly ILogger<CleanupService> _logger;
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger)
{
_cacheService = cacheService;
_directoryService = directoryService;
_logger = logger;
}
public void Cleanup()
{
_logger.LogInformation("Cleaning temp directory");
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
_directoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup();
}
}
}

View file

@ -4,7 +4,6 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
@ -14,8 +13,7 @@ using API.Parser;
using Hangfire;
using Microsoft.Extensions.Logging;
[assembly: InternalsVisibleTo("API.Tests")]
namespace API.Services
namespace API.Services.Tasks
{
public class ScannerService : IScannerService
{