Breaking Changes: Docker Parity (#698)

* Refactored all the config files for Kavita to be loaded from config/. This will allow docker to just mount one folder and for Update functionality to be trivial.

* Cleaned up documentation around new update method.

* Updated docker files to support config directory

* Removed entrypoint, no longer needed

* Update appsettings to point to config directory for logs

* Updated message for docker users that are upgrading

* Ensure that docker users that have not updated their mount points from upgrade cannot start the server

* Code smells

* More cleanup

* Added entrypoint to fix bind mount issues

* Updated README with new folder structure

* Fixed build system for new setup

* Updated string path if user is docker

* Updated the migration flow for docker to work properly and Fixed LogFile configuration updating.

* Migrating docker images is now working 100%

* Fixed config from bad code

* Code cleanup

Co-authored-by: Chris Plaatjes <kizaing@gmail.com>
This commit is contained in:
Joseph Milazzo 2021-11-03 08:36:04 -05:00 committed by GitHub
parent 66b79e8cbe
commit a29b11c366
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 670 additions and 438 deletions

View file

@ -20,8 +20,8 @@ namespace API.Services.Tasks
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<BackupService> _logger;
private readonly IDirectoryService _directoryService;
private readonly string _tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
private readonly string _logDirectory = Path.Join(Directory.GetCurrentDirectory(), "logs");
private readonly string _tempDirectory = DirectoryService.TempDirectory;
private readonly string _logDirectory = DirectoryService.LogDirectory;
private readonly IList<string> _backupFiles;
@ -72,7 +72,7 @@ namespace API.Services.Tasks
var fi = new FileInfo(logFileName);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(_logDirectory, $@"{Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log")
? DirectoryService.GetFiles(_logDirectory, $@"{Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log")
: new[] {"kavita.log"};
return files;
}
@ -148,7 +148,7 @@ namespace API.Services.Tasks
// Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file.
}
if (!_directoryService.GetFiles(outputTempDir).Any())
if (!DirectoryService.GetFiles(outputTempDir).Any())
{
DirectoryService.ClearAndDeleteDirectory(outputTempDir);
}
@ -164,7 +164,7 @@ namespace API.Services.Tasks
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
if (!_directoryService.Exists(backupDirectory)) return;
var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold));
var allBackups = _directoryService.GetFiles(backupDirectory).ToList();
var allBackups = DirectoryService.GetFiles(backupDirectory).ToList();
var expiredBackups = allBackups.Select(filename => new FileInfo(filename))
.Where(f => f.CreationTime > deltaTime)
.ToList();

View file

@ -16,16 +16,14 @@ namespace API.Services.Tasks
private readonly ILogger<CleanupService> _logger;
private readonly IBackupService _backupService;
private readonly IUnitOfWork _unitOfWork;
private readonly IDirectoryService _directoryService;
public CleanupService(ICacheService cacheService, ILogger<CleanupService> logger,
IBackupService backupService, IUnitOfWork unitOfWork, IDirectoryService directoryService)
IBackupService backupService, IUnitOfWork unitOfWork)
{
_cacheService = cacheService;
_logger = logger;
_backupService = backupService;
_unitOfWork = unitOfWork;
_directoryService = directoryService;
}
public void CleanupCacheDirectory()
@ -42,7 +40,7 @@ namespace API.Services.Tasks
{
_logger.LogInformation("Starting Cleanup");
_logger.LogInformation("Cleaning temp directory");
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
var tempDirectory = DirectoryService.TempDirectory;
DirectoryService.ClearDirectory(tempDirectory);
CleanupCacheDirectory();
_logger.LogInformation("Cleaning old database backups");
@ -57,7 +55,7 @@ namespace API.Services.Tasks
private async Task DeleteSeriesCoverImages()
{
var images = await _unitOfWork.SeriesRepository.GetAllCoverImagesAsync();
var files = _directoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex);
var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex);
foreach (var file in files)
{
if (images.Contains(Path.GetFileName(file))) continue;
@ -69,7 +67,7 @@ namespace API.Services.Tasks
private async Task DeleteChapterCoverImages()
{
var images = await _unitOfWork.ChapterRepository.GetAllCoverImagesAsync();
var files = _directoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex);
var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex);
foreach (var file in files)
{
if (images.Contains(Path.GetFileName(file))) continue;
@ -81,7 +79,7 @@ namespace API.Services.Tasks
private async Task DeleteTagCoverImages()
{
var images = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync();
var files = _directoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex);
var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex);
foreach (var file in files)
{
if (images.Contains(Path.GetFileName(file))) continue;