Last Read Filter + A lot of bug fixes (#3312)
This commit is contained in:
parent
953d80de1a
commit
6b13db129e
42 changed files with 620 additions and 198 deletions
|
@ -23,11 +23,15 @@ public class AppUserCollectionDto : IHasCoverImage
|
|||
public string SecondaryColor { get; set; } = string.Empty;
|
||||
public bool CoverImageLocked { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of Series in the Collection
|
||||
/// </summary>
|
||||
public int ItemCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Owner of the Collection
|
||||
/// </summary>
|
||||
public string? Owner { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Last time Kavita Synced the Collection with an upstream source (for non Kavita sourced collections)
|
||||
/// </summary>
|
||||
|
|
|
@ -51,6 +51,10 @@ public enum FilterField
|
|||
AverageRating = 28,
|
||||
Imprint = 29,
|
||||
Team = 30,
|
||||
Location = 31
|
||||
Location = 31,
|
||||
/// <summary>
|
||||
/// Last time User Read
|
||||
/// </summary>
|
||||
ReadLast = 32,
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,11 @@ public class ReadingListDto : IHasCoverImage
|
|||
public string PrimaryColor { get; set; } = string.Empty;
|
||||
public string SecondaryColor { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Number of Items in the Reading List
|
||||
/// </summary>
|
||||
public int ItemCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Minimum Year the Reading List starts
|
||||
/// </summary>
|
||||
|
|
|
@ -1253,6 +1253,7 @@ public class SeriesRepository : ISeriesRepository
|
|||
FilterField.ReleaseYear => query.HasReleaseYear(true, statement.Comparison, (int) value),
|
||||
FilterField.ReadTime => query.HasAverageReadTime(true, statement.Comparison, (int) value),
|
||||
FilterField.ReadingDate => query.HasReadingDate(true, statement.Comparison, (DateTime) value, userId),
|
||||
FilterField.ReadLast => query.HasReadLast(true, statement.Comparison, (int) value, userId),
|
||||
FilterField.AverageRating => query.HasAverageRating(true, statement.Comparison, (float) value),
|
||||
_ => throw new ArgumentOutOfRangeException()
|
||||
};
|
||||
|
|
|
@ -257,9 +257,9 @@ public static class SeriesFilter
|
|||
.Select(s => new
|
||||
{
|
||||
Series = s,
|
||||
Percentage = ((float) s.Progress
|
||||
Percentage = s.Progress
|
||||
.Where(p => p != null && p.AppUserId == userId)
|
||||
.Sum(p => p != null ? (p.PagesRead * 1.0f / s.Pages) : 0) * 100)
|
||||
.Sum(p => p != null ? (p.PagesRead * 1.0f / s.Pages) : 0) * 100
|
||||
})
|
||||
.AsSplitQuery()
|
||||
.AsEnumerable();
|
||||
|
@ -361,6 +361,72 @@ public static class SeriesFilter
|
|||
return queryable.Where(s => ids.Contains(s.Id));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// HasReadingDate but used to filter where last reading point was TODAY() - timeDeltaDays. This allows the user
|
||||
/// to build smart filters "Haven't read in a month"
|
||||
/// </summary>
|
||||
public static IQueryable<Series> HasReadLast(this IQueryable<Series> queryable, bool condition,
|
||||
FilterComparison comparison, int timeDeltaDays, int userId)
|
||||
{
|
||||
if (!condition || timeDeltaDays == 0) return queryable;
|
||||
|
||||
var subQuery = queryable
|
||||
.Include(s => s.Progress)
|
||||
.Where(s => s.Progress != null)
|
||||
.Select(s => new
|
||||
{
|
||||
Series = s,
|
||||
MaxDate = s.Progress.Where(p => p != null && p.AppUserId == userId)
|
||||
.Select(p => (DateTime?) p.LastModified)
|
||||
.DefaultIfEmpty()
|
||||
.Max()
|
||||
})
|
||||
.Where(s => s.MaxDate != null)
|
||||
.AsSplitQuery()
|
||||
.AsEnumerable();
|
||||
|
||||
var date = DateTime.Now.AddDays(-timeDeltaDays);
|
||||
|
||||
switch (comparison)
|
||||
{
|
||||
case FilterComparison.Equal:
|
||||
subQuery = subQuery.Where(s => s.MaxDate != null && s.MaxDate.Equals(date));
|
||||
break;
|
||||
case FilterComparison.IsAfter:
|
||||
case FilterComparison.GreaterThan:
|
||||
subQuery = subQuery.Where(s => s.MaxDate != null && s.MaxDate > date);
|
||||
break;
|
||||
case FilterComparison.GreaterThanEqual:
|
||||
subQuery = subQuery.Where(s => s.MaxDate != null && s.MaxDate >= date);
|
||||
break;
|
||||
case FilterComparison.IsBefore:
|
||||
case FilterComparison.LessThan:
|
||||
subQuery = subQuery.Where(s => s.MaxDate != null && s.MaxDate < date);
|
||||
break;
|
||||
case FilterComparison.LessThanEqual:
|
||||
subQuery = subQuery.Where(s => s.MaxDate != null && s.MaxDate <= date);
|
||||
break;
|
||||
case FilterComparison.NotEqual:
|
||||
subQuery = subQuery.Where(s => s.MaxDate != null && !s.MaxDate.Equals(date));
|
||||
break;
|
||||
case FilterComparison.Matches:
|
||||
case FilterComparison.Contains:
|
||||
case FilterComparison.NotContains:
|
||||
case FilterComparison.BeginsWith:
|
||||
case FilterComparison.EndsWith:
|
||||
case FilterComparison.IsInLast:
|
||||
case FilterComparison.IsNotInLast:
|
||||
case FilterComparison.MustContains:
|
||||
case FilterComparison.IsEmpty:
|
||||
throw new KavitaException($"{comparison} not applicable for Series.ReadProgress");
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException(nameof(comparison), comparison, null);
|
||||
}
|
||||
|
||||
var ids = subQuery.Select(s => s.Series.Id).ToList();
|
||||
return queryable.Where(s => ids.Contains(s.Id));
|
||||
}
|
||||
|
||||
public static IQueryable<Series> HasReadingDate(this IQueryable<Series> queryable, bool condition,
|
||||
FilterComparison comparison, DateTime? date, int userId)
|
||||
{
|
||||
|
|
|
@ -59,7 +59,8 @@ public class AutoMapperProfiles : Profile
|
|||
CreateMap<Series, SeriesDto>();
|
||||
CreateMap<CollectionTag, CollectionTagDto>();
|
||||
CreateMap<AppUserCollection, AppUserCollectionDto>()
|
||||
.ForMember(dest => dest.Owner, opt => opt.MapFrom(src => src.AppUser.UserName));
|
||||
.ForMember(dest => dest.Owner, opt => opt.MapFrom(src => src.AppUser.UserName))
|
||||
.ForMember(dest => dest.ItemCount, opt => opt.MapFrom(src => src.Items.Count));
|
||||
CreateMap<Person, PersonDto>();
|
||||
CreateMap<Genre, GenreTagDto>();
|
||||
CreateMap<Tag, TagDto>();
|
||||
|
@ -266,7 +267,8 @@ public class AutoMapperProfiles : Profile
|
|||
|
||||
CreateMap<AppUserBookmark, BookmarkDto>();
|
||||
|
||||
CreateMap<ReadingList, ReadingListDto>();
|
||||
CreateMap<ReadingList, ReadingListDto>()
|
||||
.ForMember(dest => dest.ItemCount, opt => opt.MapFrom(src => src.Items.Count));
|
||||
CreateMap<ReadingListItem, ReadingListItemDto>();
|
||||
CreateMap<ScrobbleError, ScrobbleErrorDto>();
|
||||
CreateMap<ChapterDto, TachiyomiChapterDto>();
|
||||
|
|
|
@ -101,6 +101,7 @@ public static class FilterFieldValueConverter
|
|||
FilterField.WantToRead => bool.Parse(value),
|
||||
FilterField.ReadProgress => string.IsNullOrEmpty(value) ? 0f : value.AsFloat(),
|
||||
FilterField.ReadingDate => DateTime.Parse(value),
|
||||
FilterField.ReadLast => int.Parse(value),
|
||||
FilterField.Formats => value.Split(',')
|
||||
.Select(x => (MangaFormat) Enum.Parse(typeof(MangaFormat), x))
|
||||
.ToList(),
|
||||
|
|
|
@ -613,7 +613,7 @@ public class ParseScannedFiles
|
|||
}
|
||||
|
||||
// Remove or clear any scan results that now have no ParserInfos after merging
|
||||
return scanResults.Where(sr => sr.ParserInfos.Any()).ToList();
|
||||
return scanResults.Where(sr => sr.ParserInfos.Count > 0).ToList();
|
||||
}
|
||||
|
||||
private static List<ParserInfo> GetRelevantInfos(List<ParserInfo> allInfos)
|
||||
|
@ -665,10 +665,11 @@ public class ParseScannedFiles
|
|||
}
|
||||
}
|
||||
|
||||
private void RemapSeries(IList<ScanResult> scanResults, List<ParserInfo> allInfos, string localizedSeries, string nonLocalizedSeries)
|
||||
private static void RemapSeries(IList<ScanResult> scanResults, List<ParserInfo> allInfos, string localizedSeries, string nonLocalizedSeries)
|
||||
{
|
||||
// Find all infos that need to be remapped from the localized series to the non-localized series
|
||||
var seriesToBeRemapped = allInfos.Where(i => i.Series.Equals(localizedSeries)).ToList();
|
||||
var normalizedLocalizedSeries = localizedSeries.ToNormalized();
|
||||
var seriesToBeRemapped = allInfos.Where(i => i.Series.ToNormalized().Equals(normalizedLocalizedSeries)).ToList();
|
||||
|
||||
foreach (var infoNeedingMapping in seriesToBeRemapped)
|
||||
{
|
||||
|
|
|
@ -9,7 +9,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
|
|||
{
|
||||
public override ParserInfo? Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo? comicInfo = null)
|
||||
{
|
||||
if (type != LibraryType.Image || !Parser.IsImage(filePath)) return null;
|
||||
if (!IsApplicable(filePath, type)) return null;
|
||||
|
||||
var directoryName = directoryService.FileSystem.DirectoryInfo.New(rootPath).Name;
|
||||
var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
|
||||
|
@ -29,7 +29,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
|
|||
if (IsEmptyOrDefault(ret.Volumes, ret.Chapters))
|
||||
{
|
||||
ret.IsSpecial = true;
|
||||
ret.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
ret.Volumes = Parser.SpecialVolume;
|
||||
}
|
||||
|
||||
// Override the series name, as fallback folders needs it to try and parse folder name
|
||||
|
@ -38,6 +38,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
|
|||
ret.Series = Parser.CleanTitle(directoryName, replaceSpecials: false);
|
||||
}
|
||||
|
||||
|
||||
return string.IsNullOrEmpty(ret.Series) ? null : ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -722,78 +722,64 @@ public class ProcessSeries : IProcessSeries
|
|||
}
|
||||
|
||||
RemoveChapters(volume, parsedInfos);
|
||||
|
||||
// // Update all the metadata on the Chapters
|
||||
// foreach (var chapter in volume.Chapters)
|
||||
// {
|
||||
// var firstFile = chapter.Files.MinBy(x => x.Chapter);
|
||||
// if (firstFile == null || _cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, forceUpdate, firstFile)) continue;
|
||||
// try
|
||||
// {
|
||||
// var firstChapterInfo = infos.SingleOrDefault(i => i.FullFilePath.Equals(firstFile.FilePath));
|
||||
// await UpdateChapterFromComicInfo(chapter, firstChapterInfo?.ComicInfo, forceUpdate);
|
||||
// }
|
||||
// catch (Exception ex)
|
||||
// {
|
||||
// _logger.LogError(ex, "There was some issue when updating chapter's metadata");
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
private void RemoveChapters(Volume volume, IList<ParserInfo> parsedInfos)
|
||||
{
|
||||
// Remove chapters that aren't in parsedInfos or have no files linked
|
||||
// Chapters to remove after enumeration
|
||||
var chaptersToRemove = new List<Chapter>();
|
||||
|
||||
var existingChapters = volume.Chapters;
|
||||
|
||||
// Extract the directories (without filenames) from parserInfos
|
||||
var parsedDirectories = parsedInfos
|
||||
.Select(p => Path.GetDirectoryName(p.FullFilePath)) // Get directory path
|
||||
.Select(p => Path.GetDirectoryName(p.FullFilePath))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
foreach (var existingChapter in existingChapters)
|
||||
{
|
||||
// Get the directories for the files in the current chapter
|
||||
var chapterFileDirectories = existingChapter.Files
|
||||
.Select(f => Path.GetDirectoryName(f.FilePath)) // Get directory path minus the filename
|
||||
.Select(f => Path.GetDirectoryName(f.FilePath))
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
// Check if any of the chapter's file directories match the parsedDirectories
|
||||
var hasMatchingDirectory = chapterFileDirectories.Exists(dir => parsedDirectories.Contains(dir));
|
||||
|
||||
if (hasMatchingDirectory)
|
||||
{
|
||||
// Ensure we remove any files that no longer exist AND order the remaining files
|
||||
existingChapter.Files = existingChapter.Files
|
||||
.Where(f => parsedInfos.Any(p => Parser.Parser.NormalizePath(p.FullFilePath) == Parser.Parser.NormalizePath(f.FilePath)))
|
||||
.OrderByNatural(f => f.FilePath)
|
||||
.ToList();
|
||||
|
||||
// Update the chapter's page count after filtering the files
|
||||
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
|
||||
|
||||
// If no files remain after filtering, remove the chapter
|
||||
if (existingChapter.Files.Count != 0) continue;
|
||||
|
||||
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}",
|
||||
existingChapter.Range, volume.Name, parsedInfos[0].Series);
|
||||
volume.Chapters.Remove(existingChapter);
|
||||
chaptersToRemove.Add(existingChapter); // Mark chapter for removal
|
||||
}
|
||||
else
|
||||
{
|
||||
// If there are no matching directories in the current scan, check if the files still exist on disk
|
||||
var filesExist = existingChapter.Files.Any(f => File.Exists(f.FilePath));
|
||||
|
||||
// If no files exist, remove the chapter
|
||||
if (filesExist) continue;
|
||||
|
||||
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName} as no files exist",
|
||||
existingChapter.Range, volume.Name, parsedInfos[0].Series);
|
||||
volume.Chapters.Remove(existingChapter);
|
||||
chaptersToRemove.Add(existingChapter); // Mark chapter for removal
|
||||
}
|
||||
}
|
||||
|
||||
// Remove chapters after the loop to avoid modifying the collection during enumeration
|
||||
foreach (var chapter in chaptersToRemove)
|
||||
{
|
||||
volume.Chapters.Remove(chapter);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info, bool forceUpdate = false)
|
||||
{
|
||||
chapter.Files ??= new List<MangaFile>();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue