Feature/unit tests (#171)

* Removed a duplicate loop that was already done earlier in method.

* Normalize now replaces underscores

* Added more Parser cases, Added test case for SeriesExtension (Name in List), and added MergeNameTest and some TODOs for where tests should go

* Added a test for removal

* Fixed bad merge

Co-authored-by: Andrew Song <asong641@gmail.com>
This commit is contained in:
Joseph Milazzo 2021-04-13 10:24:44 -05:00 committed by GitHub
parent 6ba00477e7
commit d59d60d9ec
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 205 additions and 54 deletions

View file

@ -155,27 +155,16 @@ namespace API.Services.Tasks
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
}
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
{
if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries));
// First, remove any series that are not in parsedSeries list
var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
// var missingSeries = library.Series.Where(existingSeries =>
// !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name)
// || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName))
// || !parsedSeries.ContainsKey(existingSeries.OriginalName));
var missingSeries = library.Series.Where(existingSeries => !existingSeries.NameInList(foundSeries)
|| !existingSeries.NameInList(parsedSeries.Keys));
var removeCount = 0;
foreach (var existingSeries in missingSeries)
{
library.Series?.Remove(existingSeries);
removeCount += 1;
}
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries);
var removeCount = RemoveMissingSeries(library.Series, missingSeries);
_logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount);
// Add new series that have parsedInfos
foreach (var (key, _) in parsedSeries)
@ -207,9 +196,29 @@ namespace API.Services.Tasks
UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
});
}
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<string, List<ParserInfo>> parsedSeries)
{
var foundSeries = parsedSeries.Select(s => s.Key).ToList();
var missingSeries = existingSeries.Where(existingSeries => !existingSeries.NameInList(foundSeries)
|| !existingSeries.NameInList(parsedSeries.Keys));
return missingSeries;
}
public int RemoveMissingSeries(ICollection<Series> existingSeries, IEnumerable<Series> missingSeries)
{
var removeCount = 0;
//library.Series = library.Series.Except(missingSeries).ToList();
if (existingSeries == null || existingSeries.Count == 0) return 0;
foreach (var existing in missingSeries)
{
existingSeries.Remove(existing);
removeCount += 1;
}
return removeCount;
}
private void UpdateVolumes(Series series, ParserInfo[] parsedInfos)
@ -266,6 +275,7 @@ namespace API.Services.Tasks
Chapter chapter = null;
try
{
// TODO: Extract to FindExistingChapter()
chapter = specialTreatment
? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename
|| (c.Files.Select(f => f.FilePath)
@ -317,7 +327,7 @@ namespace API.Services.Tasks
// TODO: Extract to
// Remove chapters that aren't in parsedInfos or have no files linked
var existingChapters = volume.Chapters.ToList();
foreach (var existingChapter in existingChapters)
@ -354,15 +364,7 @@ namespace API.Services.Tasks
if (info.Series == string.Empty) return;
// Check if normalized info.Series already exists and if so, update info to use that name instead
var normalizedSeries = Parser.Parser.Normalize(info.Series);
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
.Key;
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
{
_logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName);
info.Series = existingName;
}
info.Series = MergeName(_scannedSeries, info);
_scannedSeries.AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (_, oldValue) =>
{
@ -376,6 +378,21 @@ namespace API.Services.Tasks
});
}
public string MergeName(ConcurrentDictionary<string,List<ParserInfo>> collectedSeries, ParserInfo info)
{
var normalizedSeries = Parser.Parser.Normalize(info.Series);
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
.Key;
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
{
_logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName);
return existingName;
}
return info.Series;
}
/// <summary>
/// Processes files found during a library scan.
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.