Backend Bugfixes and Enhanced Selections (#754)

* Updated some signatures to avoid a ToArray() within a loop.

* Use UpdateSeries directly when adding new series, rather than a modified version for new series only.

* Refactored some messages for scanner loop to reduce duplicate code and write messages more clear. Hooked in a RefreshMetadataProgress event (no UI changes).

* Fixed a bug on docker where backup service was using different logic than non-docker, which isn't needed after config change last release.

* Allow user to make more than 1 backup per day

* Implemented a select all checkbox for library access modal
This commit is contained in:
Joseph Milazzo 2021-11-14 10:20:12 -06:00 committed by GitHub
parent 0aff08c9cd
commit f6bfabde4c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 171 additions and 101 deletions

View file

@ -266,7 +266,7 @@ namespace API.Services
/// <param name="directoryPath"></param>
/// <param name="prepend">An optional string to prepend to the target file's name</param>
/// <returns></returns>
public bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "")
public static bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "", ILogger logger = null)
{
ExistOrCreate(directoryPath);
string currentFile = null;
@ -282,19 +282,24 @@ namespace API.Services
}
else
{
_logger.LogWarning("Tried to copy {File} but it doesn't exist", file);
logger?.LogWarning("Tried to copy {File} but it doesn't exist", file);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath);
logger?.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath);
return false;
}
return true;
}
public bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "")
{
return CopyFilesToDirectory(filePaths, directoryPath, prepend, _logger);
}
public IEnumerable<string> ListDirectory(string rootPath)
{
if (!Directory.Exists(rootPath)) return ImmutableList<string>.Empty;

View file

@ -218,14 +218,18 @@ namespace API.Services
var stopwatch = Stopwatch.StartNew();
var totalTime = 0L;
_logger.LogInformation("[MetadataService] Refreshing Library {LibraryName}. Total Items: {TotalSize}. Total Chunks: {TotalChunks} with {ChunkSize} size", library.Name, chunkInfo.TotalSize, chunkInfo.TotalChunks, chunkInfo.ChunkSize);
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 0F));
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
var i = 0;
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++, i++)
{
if (chunkInfo.TotalChunks == 0) continue;
totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart();
_logger.LogInformation("[MetadataService] Processing chunk {ChunkNumber} / {TotalChunks} with size {ChunkSize}. Series ({SeriesStart} - {SeriesEnd}",
chunk, chunkInfo.TotalChunks, chunkInfo.ChunkSize, chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize);
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
new UserParams()
{
@ -233,6 +237,7 @@ namespace API.Services
PageSize = chunkInfo.ChunkSize
});
_logger.LogDebug("[MetadataService] Fetched {SeriesCount} series for refresh", nonLibrarySeries.Count);
Parallel.ForEach(nonLibrarySeries, series =>
{
try
@ -275,8 +280,14 @@ namespace API.Services
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
}
var progress = Math.Max(0F, Math.Min(100F, i * 1F / chunkInfo.TotalChunks));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, progress));
}
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.RefreshMetadataProgressEvent(library.Id, 100F));
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
}

View file

@ -9,7 +9,6 @@ using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Hangfire;
using Kavita.Common.EnvironmentInfo;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
@ -20,8 +19,6 @@ namespace API.Services.Tasks
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<BackupService> _logger;
private readonly IDirectoryService _directoryService;
private readonly string _tempDirectory = DirectoryService.TempDirectory;
private readonly string _logDirectory = DirectoryService.LogDirectory;
private readonly IList<string> _backupFiles;
@ -35,30 +32,16 @@ namespace API.Services.Tasks
var loggingSection = config.GetLoggingFileName();
var files = LogFiles(maxRollingFiles, loggingSection);
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
_backupFiles = new List<string>()
{
_backupFiles = new List<string>()
{
"data/appsettings.json",
"data/Hangfire.db",
"data/Hangfire-log.db",
"data/kavita.db",
"data/kavita.db-shm", // This wont always be there
"data/kavita.db-wal" // This wont always be there
};
}
else
{
_backupFiles = new List<string>()
{
"appsettings.json",
"Hangfire.db",
"Hangfire-log.db",
"kavita.db",
"kavita.db-shm", // This wont always be there
"kavita.db-wal" // This wont always be there
};
}
"appsettings.json",
"Hangfire.db", // This is not used atm
"Hangfire-log.db", // This is not used atm
"kavita.db",
"kavita.db-shm", // This wont always be there
"kavita.db-wal" // This wont always be there
};
foreach (var file in files.Select(f => (new FileInfo(f)).Name).ToList())
{
@ -72,7 +55,7 @@ namespace API.Services.Tasks
var fi = new FileInfo(logFileName);
var files = maxRollingFiles > 0
? DirectoryService.GetFiles(_logDirectory, $@"{Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log")
? DirectoryService.GetFiles(DirectoryService.LogDirectory, $@"{Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log")
: new[] {"kavita.log"};
return files;
}
@ -93,7 +76,7 @@ namespace API.Services.Tasks
return;
}
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_");
var zipPath = Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip");
if (File.Exists(zipPath))
@ -102,7 +85,7 @@ namespace API.Services.Tasks
return;
}
var tempDirectory = Path.Join(_tempDirectory, dateString);
var tempDirectory = Path.Join(DirectoryService.TempDirectory, dateString);
DirectoryService.ExistOrCreate(tempDirectory);
DirectoryService.ClearDirectory(tempDirectory);

View file

@ -360,14 +360,13 @@ namespace API.Services.Tasks
Series existingSeries;
try
{
existingSeries = allSeries.SingleOrDefault(s =>
(s.NormalizedName.Equals(key.NormalizedName) || Parser.Parser.Normalize(s.OriginalName).Equals(key.NormalizedName))
&& (s.Format == key.Format || s.Format == MangaFormat.Unknown));
existingSeries = allSeries.SingleOrDefault(s => FindSeries(s, key));
}
catch (Exception e)
{
// NOTE: If I ever want to put Duplicates table, this is where it can go
_logger.LogCritical(e, "[ScannerService] There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it. This will be skipped", key.NormalizedName);
var duplicateSeries = allSeries.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
var duplicateSeries = allSeries.Where(s => FindSeries(s, key));
foreach (var series in duplicateSeries)
{
_logger.LogCritical("[ScannerService] Duplicate Series Found: {Key} maps with {Series}", key.Name, series.OriginalName);
@ -378,21 +377,20 @@ namespace API.Services.Tasks
if (existingSeries != null) continue;
existingSeries = DbFactory.Series(infos[0].Series);
existingSeries.Format = key.Format;
newSeries.Add(existingSeries);
var s = DbFactory.Series(infos[0].Series);
s.Format = key.Format;
s.LibraryId = library.Id; // We have to manually set this since we aren't adding the series to the Library's series.
newSeries.Add(s);
}
var i = 0;
foreach(var series in newSeries)
{
_logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName);
UpdateSeries(series, parsedSeries);
_unitOfWork.SeriesRepository.Attach(series);
try
{
_logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName);
UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
series.LibraryId = library.Id; // We have to manually set this since we aren't adding the series to the Library's series.
_unitOfWork.SeriesRepository.Attach(series);
await _unitOfWork.CommitAsync();
_logger.LogInformation(
"[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
@ -403,7 +401,7 @@ namespace API.Services.Tasks
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] There was a critical exception adding new series entry for {SeriesName} with a duplicate index key: {IndexKey}",
_logger.LogCritical(ex, "[ScannerService] There was a critical exception adding new series entry for {SeriesName} with a duplicate index key: {IndexKey} ",
series.Name, $"{series.Name}_{series.NormalizedName}_{series.LocalizedName}_{series.LibraryId}_{series.Format}");
}
@ -418,13 +416,19 @@ namespace API.Services.Tasks
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
}
private static bool FindSeries(Series series, ParsedSeries parsedInfoKey)
{
return (series.NormalizedName.Equals(parsedInfoKey.NormalizedName) || Parser.Parser.Normalize(series.OriginalName).Equals(parsedInfoKey.NormalizedName))
&& (series.Format == parsedInfoKey.Format || series.Format == MangaFormat.Unknown);
}
private void UpdateSeries(Series series, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
try
{
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray();
var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series);
UpdateVolumes(series, parsedInfos);
series.Pages = series.Volumes.Sum(v => v.Pages);
@ -491,7 +495,7 @@ namespace API.Services.Tasks
/// <param name="missingSeries">Series not found on disk or can't be parsed</param>
/// <param name="removeCount"></param>
/// <returns>the updated existingSeries</returns>
public static IList<Series> RemoveMissingSeries(IList<Series> existingSeries, IEnumerable<Series> missingSeries, out int removeCount)
public static IEnumerable<Series> RemoveMissingSeries(IList<Series> existingSeries, IEnumerable<Series> missingSeries, out int removeCount)
{
var existingCount = existingSeries.Count;
var missingList = missingSeries.ToList();
@ -505,7 +509,7 @@ namespace API.Services.Tasks
return existingSeries;
}
private void UpdateVolumes(Series series, ParserInfo[] parsedInfos)
private void UpdateVolumes(Series series, IList<ParserInfo> parsedInfos)
{
var startingVolumeCount = series.Volumes.Count;
// Add new volumes and update chapters per volume
@ -559,7 +563,7 @@ namespace API.Services.Tasks
/// </summary>
/// <param name="volume"></param>
/// <param name="parsedInfos"></param>
private void UpdateChapters(Volume volume, ParserInfo[] parsedInfos)
private void UpdateChapters(Volume volume, IList<ParserInfo> parsedInfos)
{
// Add new chapters
foreach (var info in parsedInfos)