.kavitaignore no more (#2442)

This commit is contained in:
Joe Milazzo 2023-11-19 12:15:32 -06:00 committed by GitHub
parent cd27efecdd
commit 7221501c4d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
91 changed files with 5968 additions and 1026 deletions

View file

@ -84,6 +84,15 @@ public class LibraryController : BaseApiController
.WIthAllowScrobbling(dto.AllowScrobbling)
.Build();
library.LibraryFileTypes = dto.FileGroupTypes
.Select(t => new LibraryFileTypeGroup() {FileTypeGroup = t, LibraryId = library.Id})
.Distinct()
.ToList();
library.LibraryExcludePatterns = dto.ExcludePatterns
.Select(t => new LibraryExcludePattern() {Pattern = t, LibraryId = library.Id})
.Distinct()
.ToList();
// Override Scrobbling for Comic libraries since there are no providers to scrobble to
if (library.Type == LibraryType.Comic)
{
@ -415,7 +424,7 @@ public class LibraryController : BaseApiController
public async Task<ActionResult> UpdateLibrary(UpdateLibraryDto dto)
{
var userId = User.GetUserId();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(dto.Id, LibraryIncludes.Folders);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(dto.Id, LibraryIncludes.Folders | LibraryIncludes.FileTypes);
if (library == null) return BadRequest(await _localizationService.Translate(userId, "library-doesnt-exist"));
var newName = dto.Name.Trim();
@ -437,6 +446,15 @@ public class LibraryController : BaseApiController
library.ManageCollections = dto.ManageCollections;
library.ManageReadingLists = dto.ManageReadingLists;
library.AllowScrobbling = dto.AllowScrobbling;
library.LibraryFileTypes = dto.FileGroupTypes
.Select(t => new LibraryFileTypeGroup() {FileTypeGroup = t, LibraryId = library.Id})
.Distinct()
.ToList();
library.LibraryExcludePatterns = dto.ExcludePatterns
.Select(t => new LibraryExcludePattern() {Pattern = t, LibraryId = library.Id})
.Distinct()
.ToList();
// Override Scrobbling for Comic libraries since there are no providers to scrobble to
if (library.Type == LibraryType.Comic)

View file

@ -1,4 +1,5 @@
using System;
using System.Collections;
using System.Collections.Generic;
using API.Entities.Enums;
@ -51,4 +52,12 @@ public class LibraryDto
/// When showing series, only parent series or series with no relationships will be returned
/// </summary>
public bool CollapseSeriesRelationships { get; set; } = false;
/// <summary>
/// The types of file type groups the library will scan for
/// </summary>
public ICollection<FileTypeGroup> LibraryFileTypes { get; set; }
/// <summary>
/// A set of globs that will exclude matching content from being scanned
/// </summary>
public ICollection<string> ExcludePatterns { get; set; }
}

View file

@ -28,4 +28,13 @@ public class UpdateLibraryDto
public bool ManageReadingLists { get; init; }
[Required]
public bool AllowScrobbling { get; init; }
/// <summary>
/// What types of files to allow the scanner to pickup
/// </summary>
[Required]
public ICollection<FileTypeGroup> FileGroupTypes { get; init; }
/// <summary>
/// A set of Glob patterns that the scanner will exclude processing
/// </summary>
public ICollection<string> ExcludePatterns { get; init; }
}

View file

@ -1,136 +0,0 @@
using System;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.7 introduced UTC dates and GMT+1 users would sometimes have dates stored as '0000-12-31 23:00:00'.
/// This Migration will update those dates.
/// </summary>
// ReSharper disable once InconsistentNaming
public static class MigrateBrokenGMT1Dates
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
// if current version is > 0.7, then we can exit and not perform
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (Version.Parse(settings.InstallVersion) > new Version(0, 7, 0, 2))
{
return;
}
logger.LogCritical("Running MigrateBrokenGMT1Dates migration. Please be patient, this may take some time depending on the size of your library. Do not abort, this can break your Database");
#region Series
logger.LogInformation("Updating Dates on Series...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Series SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE Series SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
UPDATE Series SET LastChapterAddedUtc = '0001-01-01 00:00:00' WHERE LastChapterAddedUtc = '0000-12-31 23:00:00';
UPDATE Series SET LastFolderScannedUtc = '0001-01-01 00:00:00' WHERE LastFolderScannedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Series...Done");
#endregion
#region Library
logger.LogInformation("Updating Dates on Libraries...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Library SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE Library SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Libraries...Done");
#endregion
#region Volume
try
{
logger.LogInformation("Updating Dates on Volumes...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Volume SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE Volume SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Volumes...Done");
}
catch (Exception ex)
{
logger.LogCritical(ex, "Updating Dates on Volumes...Failed");
}
#endregion
#region Chapter
try
{
logger.LogInformation("Updating Dates on Chapters...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Chapter SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE Chapter SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Chapters...Done");
}
catch (Exception ex)
{
logger.LogCritical(ex, "Updating Dates on Chapters...Failed");
}
#endregion
#region AppUserBookmark
logger.LogInformation("Updating Dates on Bookmarks...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE AppUserBookmark SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE AppUserBookmark SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Bookmarks...Done");
#endregion
#region AppUserProgress
logger.LogInformation("Updating Dates on Progress...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE AppUserProgresses SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE AppUserProgresses SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Progress...Done");
#endregion
#region Device
logger.LogInformation("Updating Dates on Device...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Device SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE Device SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
UPDATE Device SET LastUsedUtc = '0001-01-01 00:00:00' WHERE LastUsedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on Device...Done");
#endregion
#region MangaFile
logger.LogInformation("Updating Dates on MangaFile...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE MangaFile SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE MangaFile SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
UPDATE MangaFile SET LastFileAnalysisUtc = '0001-01-01 00:00:00' WHERE LastFileAnalysisUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on MangaFile...Done");
#endregion
#region ReadingList
logger.LogInformation("Updating Dates on ReadingList...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE ReadingList SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE ReadingList SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on ReadingList...Done");
#endregion
#region SiteTheme
logger.LogInformation("Updating Dates on SiteTheme...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE SiteTheme SET CreatedUtc = '0001-01-01 00:00:00' WHERE CreatedUtc = '0000-12-31 23:00:00';
UPDATE SiteTheme SET LastModifiedUtc = '0001-01-01 00:00:00' WHERE LastModifiedUtc = '0000-12-31 23:00:00';
");
logger.LogInformation("Updating Dates on SiteTheme...Done");
#endregion
logger.LogInformation("MigrateBrokenGMT1Dates migration finished");
}
}

View file

@ -1,30 +0,0 @@
using System.Threading.Tasks;
using API.Constants;
using API.Entities;
using Microsoft.AspNetCore.Identity;
namespace API.Data.ManualMigrations;
/// <summary>
/// New role introduced in v0.5.1. Adds the role to all users.
/// </summary>
public static class MigrateChangePasswordRoles
{
/// <summary>
/// Will not run if any users have the ChangePassword role already
/// </summary>
/// <param name="unitOfWork"></param>
/// <param name="userManager"></param>
public static async Task Migrate(IUnitOfWork unitOfWork, UserManager<AppUser> userManager)
{
var usersWithRole = await userManager.GetUsersInRoleAsync(PolicyConstants.ChangePasswordRole);
if (usersWithRole.Count != 0) return;
var allUsers = await unitOfWork.UserRepository.GetAllUsersAsync();
foreach (var user in allUsers)
{
await userManager.RemoveFromRoleAsync(user, "ChangePassword");
await userManager.AddToRoleAsync(user, PolicyConstants.ChangePasswordRole);
}
}
}

View file

@ -1,36 +0,0 @@
using System.Threading.Tasks;
using API.Constants;
using API.Entities;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// New role introduced in v0.6. Adds the role to all users.
/// </summary>
public static class MigrateChangeRestrictionRoles
{
/// <summary>
/// Will not run if any users have the <see cref="PolicyConstants.ChangeRestrictionRole"/> role already
/// </summary>
/// <param name="unitOfWork"></param>
/// <param name="userManager"></param>
/// <param name="logger"></param>
public static async Task Migrate(IUnitOfWork unitOfWork, UserManager<AppUser> userManager, ILogger<Program> logger)
{
var usersWithRole = await userManager.GetUsersInRoleAsync(PolicyConstants.ChangeRestrictionRole);
if (usersWithRole.Count != 0) return;
logger.LogCritical("Running MigrateChangeRestrictionRoles migration");
var allUsers = await unitOfWork.UserRepository.GetAllUsersAsync();
foreach (var user in allUsers)
{
await userManager.RemoveFromRoleAsync(user, PolicyConstants.ChangeRestrictionRole);
await userManager.AddToRoleAsync(user, PolicyConstants.ChangeRestrictionRole);
}
logger.LogInformation("MigrateChangeRestrictionRoles migration complete");
}
}

View file

@ -1,35 +0,0 @@
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.7.8.6 explicitly introduced DashboardStream and v0.7.8.9 changed the default seed titles to use locale strings.
/// This migration will target nightly releases and should be removed before v0.7.9 release.
/// </summary>
public static class MigrateDashboardStreamNamesToLocaleKeys
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
var allStreams = await unitOfWork.UserRepository.GetAllDashboardStreams();
if (!allStreams.Any(s => s.Name.Equals("On Deck"))) return;
logger.LogCritical("Running MigrateDashboardStreamNamesToLocaleKeys migration. Please be patient, this may take some time depending on the size of your library. Do not abort, this can break your Database");
foreach (var stream in allStreams.Where(s => s.IsProvided))
{
stream.Name = stream.Name switch
{
"On Deck" => "on-deck",
"Recently Updated" => "recently-updated",
"Newly Added" => "newly-added",
"More In" => "more-in-genre",
_ => stream.Name
};
unitOfWork.UserRepository.Update(stream);
}
await unitOfWork.CommitAsync();
logger.LogInformation("MigrateDashboardStreamNamesToLocaleKeys migration finished");
}
}

View file

@ -1,38 +0,0 @@
using System.Linq;
using System.Threading.Tasks;
using API.Entities.Enums;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.7.4 introduced Scrobbling with Kavita+. By default, it is on, but Comic libraries have no scrobble providers, so disable
/// </summary>
public static class MigrateDisableScrobblingOnComicLibraries
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
if (!await dataContext.Library.Where(s => s.Type == LibraryType.Comic).Where(l => l.AllowScrobbling).AnyAsync())
{
return;
}
logger.LogInformation("Running MigrateDisableScrobblingOnComicLibraries migration. Please be patient, this may take some time");
foreach (var lib in await dataContext.Library.Where(s => s.Type == LibraryType.Comic).Where(l => l.AllowScrobbling).ToListAsync())
{
lib.AllowScrobbling = false;
unitOfWork.LibraryRepository.Update(lib);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("MigrateDisableScrobblingOnComicLibraries migration finished");
}
}

View file

@ -1,32 +0,0 @@
using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// Introduced in v0.7.5.6 and v0.7.6, Ratings > 0 need to have "HasRatingSet"
/// </summary>
/// <remarks>Added in v0.7.5.6</remarks>
// ReSharper disable once InconsistentNaming
public static class MigrateExistingRatings
{
public static async Task Migrate(DataContext context, ILogger<Program> logger)
{
logger.LogCritical("Running MigrateExistingRatings migration - Please be patient, this may take some time. This is not an error");
foreach (var r in context.AppUserRating.Where(r => r.Rating > 0f))
{
r.HasBeenRated = true;
context.Entry(r).State = EntityState.Modified;
}
if (context.ChangeTracker.HasChanges())
{
await context.SaveChangesAsync();
}
logger.LogCritical("Running MigrateExistingRatings migration - Completed. This is not an error");
}
}

View file

@ -0,0 +1,67 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// Introduced in v0.7.11 with the removal of .Kavitaignore files
/// </summary>
public static class MigrateLibrariesToHaveAllFileTypes
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
logger.LogCritical("Running MigrateLibrariesToHaveAllFileTypes migration - Please be patient, this may take some time. This is not an error");
var allLibs = await dataContext.Library.Include(l => l.LibraryFileTypes).ToListAsync();
foreach (var library in allLibs.Where(library => library.LibraryFileTypes.Count == 0))
{
switch (library.Type)
{
case LibraryType.Manga:
case LibraryType.Comic:
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Archive
});
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Epub
});
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Images
});
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Pdf
});
break;
case LibraryType.Book:
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Pdf
});
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Epub
});
break;
case LibraryType.Image:
library.LibraryFileTypes.Add(new LibraryFileTypeGroup()
{
FileTypeGroup = FileTypeGroup.Images
});
break;
default:
throw new ArgumentOutOfRangeException();
}
}
await dataContext.SaveChangesAsync();
logger.LogCritical("Running MigrateLibrariesToHaveAllFileTypes migration - Completed. This is not an error");
}
}

View file

@ -1,36 +0,0 @@
using System.Threading.Tasks;
using API.Constants;
using API.Entities;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// Added in v0.7.1.18
/// </summary>
public static class MigrateLoginRoles
{
/// <summary>
/// Will not run if any users have the <see cref="PolicyConstants.LoginRole"/> role already
/// </summary>
/// <param name="unitOfWork"></param>
/// <param name="userManager"></param>
/// <param name="logger"></param>
public static async Task Migrate(IUnitOfWork unitOfWork, UserManager<AppUser> userManager, ILogger<Program> logger)
{
var usersWithRole = await userManager.GetUsersInRoleAsync(PolicyConstants.LoginRole);
if (usersWithRole.Count != 0) return;
logger.LogCritical("Running MigrateLoginRoles migration");
var allUsers = await unitOfWork.UserRepository.GetAllUsersAsync();
foreach (var user in allUsers)
{
await userManager.RemoveFromRoleAsync(user, PolicyConstants.LoginRole);
await userManager.AddToRoleAsync(user, PolicyConstants.LoginRole);
}
logger.LogInformation("MigrateLoginRoles migration complete");
}
}

View file

@ -1,118 +0,0 @@
using System;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.6.0 introduced a change in how Normalization works and hence every normalized field needs to be re-calculated
/// </summary>
public static class MigrateNormalizedEverything
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
// if current version is > 0.5.6.5, then we can exit and not perform
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (Version.Parse(settings.InstallVersion) > new Version(0, 5, 6, 5))
{
return;
}
logger.LogCritical("Running MigrateNormalizedEverything migration. Please be patient, this may take some time depending on the size of your library. Do not abort, this can break your Database");
logger.LogInformation("Updating Normalization on Series...");
foreach (var series in await dataContext.Series.ToListAsync())
{
series.NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.LocalizedName ?? string.Empty);
series.NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name ?? string.Empty);
logger.LogInformation("Updated Series: {SeriesName}", series.Name);
unitOfWork.SeriesRepository.Update(series);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("Updating Normalization on Series...Done");
// Genres
logger.LogInformation("Updating Normalization on Genres...");
foreach (var genre in await dataContext.Genre.ToListAsync())
{
genre.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(genre.Title ?? string.Empty);
logger.LogInformation("Updated Genre: {Genre}", genre.Title);
unitOfWork.GenreRepository.Attach(genre);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("Updating Normalization on Genres...Done");
// Tags
logger.LogInformation("Updating Normalization on Tags...");
foreach (var tag in await dataContext.Tag.ToListAsync())
{
tag.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(tag.Title ?? string.Empty);
logger.LogInformation("Updated Tag: {Tag}", tag.Title);
unitOfWork.TagRepository.Attach(tag);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("Updating Normalization on Tags...Done");
// People
logger.LogInformation("Updating Normalization on People...");
foreach (var person in await dataContext.Person.ToListAsync())
{
person.NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(person.Name ?? string.Empty);
logger.LogInformation("Updated Person: {Person}", person.Name);
unitOfWork.PersonRepository.Attach(person);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("Updating Normalization on People...Done");
// Collections
logger.LogInformation("Updating Normalization on Collections...");
foreach (var collection in await dataContext.CollectionTag.ToListAsync())
{
collection.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(collection.Title ?? string.Empty);
logger.LogInformation("Updated Collection: {Collection}", collection.Title);
unitOfWork.CollectionTagRepository.Update(collection);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("Updating Normalization on Collections...Done");
// Reading Lists
logger.LogInformation("Updating Normalization on Reading Lists...");
foreach (var readingList in await dataContext.ReadingList.ToListAsync())
{
readingList.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(readingList.Title ?? string.Empty);
logger.LogInformation("Updated Reading List: {ReadingList}", readingList.Title);
unitOfWork.ReadingListRepository.Update(readingList);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("Updating Normalization on Reading Lists...Done");
logger.LogInformation("MigrateNormalizedEverything migration finished");
}
}

View file

@ -1,38 +0,0 @@
using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.5.6 introduced Normalized Localized Name, which allows for faster lookups and less memory usage. This migration will calculate them once
/// </summary>
public static class MigrateNormalizedLocalizedName
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
if (!await dataContext.Series.Where(s => s.NormalizedLocalizedName == null).AnyAsync())
{
return;
}
logger.LogInformation("Running MigrateNormalizedLocalizedName migration. Please be patient, this may take some time");
foreach (var series in await dataContext.Series.ToListAsync())
{
series.NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.LocalizedName ?? string.Empty);
logger.LogInformation("Updated {SeriesName} normalized localized name: {LocalizedName}", series.Name, series.NormalizedLocalizedName);
unitOfWork.SeriesRepository.Update(series);
}
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
logger.LogInformation("MigrateNormalizedLocalizedName migration finished");
}
}

View file

@ -1,40 +0,0 @@
using System;
using System.Threading.Tasks;
using API.Services;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// New role introduced in v0.6. Calculates the Age Rating on all Reading Lists
/// </summary>
public static class MigrateReadingListAgeRating
{
/// <summary>
/// Will not run if any above v0.5.6.24 or v0.6.0
/// </summary>
/// <param name="unitOfWork"></param>
/// <param name="context"></param>
/// <param name="readingListService"></param>
/// <param name="logger"></param>
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext context, IReadingListService readingListService, ILogger<Program> logger)
{
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (Version.Parse(settings.InstallVersion) > new Version(0, 5, 6, 26))
{
return;
}
logger.LogInformation("MigrateReadingListAgeRating migration starting");
var readingLists = await context.ReadingList.Include(r => r.Items).ToListAsync();
foreach (var readingList in readingLists)
{
await readingListService.CalculateReadingListAgeRating(readingList);
context.ReadingList.Update(readingList);
}
await context.SaveChangesAsync();
logger.LogInformation("MigrateReadingListAgeRating migration complete");
}
}

View file

@ -1,55 +0,0 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using API.Services.Tasks;
namespace API.Data.ManualMigrations;
/// <summary>
/// In v0.5.3, we removed Light and E-Ink themes. This migration will remove the themes from the DB and default anyone on
/// null, E-Ink, or Light to Dark.
/// </summary>
public static class MigrateRemoveExtraThemes
{
public static async Task Migrate(IUnitOfWork unitOfWork, IThemeService themeService)
{
var themes = (await unitOfWork.SiteThemeRepository.GetThemes()).ToList();
if (themes.Find(t => t.Name.Equals("Light")) == null)
{
return;
}
Console.WriteLine("Removing Dark and E-Ink themes");
var darkTheme = themes.Single(t => t.Name.Equals("Dark"));
var lightTheme = themes.Single(t => t.Name.Equals("Light"));
var eInkTheme = themes.Single(t => t.Name.Equals("E-Ink"));
// Update default theme if it's not Dark or a custom theme
await themeService.UpdateDefault(darkTheme.Id);
// Update all users to Dark theme if they are on Light/E-Ink
foreach (var pref in await unitOfWork.UserRepository.GetAllPreferencesByThemeAsync(lightTheme.Id))
{
pref.Theme = darkTheme;
}
foreach (var pref in await unitOfWork.UserRepository.GetAllPreferencesByThemeAsync(eInkTheme.Id))
{
pref.Theme = darkTheme;
}
// Remove Light/E-Ink themes
foreach (var siteTheme in themes.Where(t => t.Name.Equals("Light") || t.Name.Equals("E-Ink")))
{
unitOfWork.SiteThemeRepository.Remove(siteTheme);
}
// Commit and call it a day
await unitOfWork.CommitAsync();
Console.WriteLine("Completed removing Dark and E-Ink themes");
}
}

View file

@ -1,31 +0,0 @@
using System.Threading.Tasks;
using API.Entities.Enums;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// Added in v0.7.2.7/v0.7.3 in which the ConvertXToWebP Setting keys were removed. This migration will remove them.
/// </summary>
public static class MigrateRemoveWebPSettingRows
{
public static async Task Migrate(IUnitOfWork unitOfWork, ILogger<Program> logger)
{
logger.LogCritical("Running MigrateRemoveWebPSettingRows migration - Please be patient, this may take some time. This is not an error");
var key = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.ConvertBookmarkToWebP);
var key2 = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.ConvertCoverToWebP);
if (key == null && key2 == null)
{
logger.LogCritical("Running MigrateRemoveWebPSettingRows migration - complete. Nothing to do");
return;
}
unitOfWork.SettingsRepository.Remove(key);
unitOfWork.SettingsRepository.Remove(key2);
await unitOfWork.CommitAsync();
logger.LogCritical("Running MigrateRemoveWebPSettingRows migration - Completed. This is not an error");
}
}

View file

@ -1,153 +0,0 @@
using System;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// Introduced in v0.6.1.38 or v0.7.0,
/// </summary>
public static class MigrateToUtcDates
{
public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger<Program> logger)
{
// if current version is > 0.6.1.38, then we can exit and not perform
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (Version.Parse(settings.InstallVersion) > new Version(0, 6, 1, 38))
{
return;
}
logger.LogCritical("Running MigrateToUtcDates migration. Please be patient, this may take some time depending on the size of your library. Do not abort, this can break your Database");
#region Series
logger.LogInformation("Updating Dates on Series...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Series SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc'),
[LastChapterAddedUtc] = datetime([LastChapterAdded], 'utc'),
[LastFolderScannedUtc] = datetime([LastFolderScanned], 'utc')
;
");
logger.LogInformation("Updating Dates on Series...Done");
#endregion
#region Library
logger.LogInformation("Updating Dates on Libraries...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Library SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc')
;
");
logger.LogInformation("Updating Dates on Libraries...Done");
#endregion
#region Volume
try
{
logger.LogInformation("Updating Dates on Volumes...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Volume SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc');
");
logger.LogInformation("Updating Dates on Volumes...Done");
}
catch (Exception ex)
{
logger.LogCritical(ex, "Updating Dates on Volumes...Failed");
}
#endregion
#region Chapter
try
{
logger.LogInformation("Updating Dates on Chapters...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Chapter SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc')
;
");
logger.LogInformation("Updating Dates on Chapters...Done");
}
catch (Exception ex)
{
logger.LogCritical(ex, "Updating Dates on Chapters...Failed");
}
#endregion
#region AppUserBookmark
logger.LogInformation("Updating Dates on Bookmarks...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE AppUserBookmark SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc')
;
");
logger.LogInformation("Updating Dates on Bookmarks...Done");
#endregion
#region AppUserProgress
logger.LogInformation("Updating Dates on Progress...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE AppUserProgresses SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc')
;
");
logger.LogInformation("Updating Dates on Progress...Done");
#endregion
#region Device
logger.LogInformation("Updating Dates on Device...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE Device SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc'),
[LastUsedUtc] = datetime([LastUsed], 'utc')
;
");
logger.LogInformation("Updating Dates on Device...Done");
#endregion
#region MangaFile
logger.LogInformation("Updating Dates on MangaFile...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE MangaFile SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc'),
[LastFileAnalysisUtc] = datetime([LastFileAnalysis], 'utc')
;
");
logger.LogInformation("Updating Dates on MangaFile...Done");
#endregion
#region ReadingList
logger.LogInformation("Updating Dates on ReadingList...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE ReadingList SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc')
;
");
logger.LogInformation("Updating Dates on ReadingList...Done");
#endregion
#region SiteTheme
logger.LogInformation("Updating Dates on SiteTheme...");
await dataContext.Database.ExecuteSqlRawAsync(@"
UPDATE SiteTheme SET
[LastModifiedUtc] = datetime([LastModified], 'utc'),
[CreatedUtc] = datetime([Created], 'utc')
;
");
logger.LogInformation("Updating Dates on SiteTheme...Done");
#endregion
logger.LogInformation("MigrateToUtcDates migration finished");
}
}

View file

@ -1,34 +0,0 @@
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// Introduced in v0.6.1.8 and v0.7, this adds library ids to all User Progress to allow for easier queries against progress
/// </summary>
public static class MigrateUserProgressLibraryId
{
public static async Task Migrate(IUnitOfWork unitOfWork, ILogger<Program> logger)
{
logger.LogCritical("Running MigrateUserProgressLibraryId migration - Please be patient, this may take some time. This is not an error");
var progress = await unitOfWork.AppUserProgressRepository.GetAnyProgress();
if (progress == null || progress.LibraryId != 0)
{
logger.LogCritical("Running MigrateUserProgressLibraryId migration - complete. Nothing to do");
return;
}
var seriesIdsWithLibraryIds = await unitOfWork.SeriesRepository.GetLibraryIdsForSeriesAsync();
foreach (var prog in await unitOfWork.AppUserProgressRepository.GetAllProgress())
{
prog.LibraryId = seriesIdsWithLibraryIds[prog.SeriesId];
unitOfWork.AppUserProgressRepository.Update(prog);
}
await unitOfWork.CommitAsync();
logger.LogCritical("Running MigrateSeriesRelationsImport migration - Completed. This is not an error");
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,46 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class LibraryFileTypes : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "LibraryFileTypeGroup",
columns: table => new
{
Id = table.Column<int>(type: "INTEGER", nullable: false)
.Annotation("Sqlite:Autoincrement", true),
LibraryId = table.Column<int>(type: "INTEGER", nullable: false),
FileTypeGroup = table.Column<int>(type: "INTEGER", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_LibraryFileTypeGroup", x => x.Id);
table.ForeignKey(
name: "FK_LibraryFileTypeGroup_Library_LibraryId",
column: x => x.LibraryId,
principalTable: "Library",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_LibraryFileTypeGroup_LibraryId",
table: "LibraryFileTypeGroup",
column: "LibraryId");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "LibraryFileTypeGroup");
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,46 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class LibraryExcludePatterns : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "LibraryExcludePattern",
columns: table => new
{
Id = table.Column<int>(type: "INTEGER", nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Pattern = table.Column<string>(type: "TEXT", nullable: true),
LibraryId = table.Column<int>(type: "INTEGER", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_LibraryExcludePattern", x => x.Id);
table.ForeignKey(
name: "FK_LibraryExcludePattern_Library_LibraryId",
column: x => x.LibraryId,
principalTable: "Library",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_LibraryExcludePattern_LibraryId",
table: "LibraryExcludePattern",
column: "LibraryId");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "LibraryExcludePattern");
}
}
}

View file

@ -15,7 +15,7 @@ namespace API.Data.Migrations
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder.HasAnnotation("ProductVersion", "7.0.11");
modelBuilder.HasAnnotation("ProductVersion", "7.0.13");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
@ -893,6 +893,44 @@ namespace API.Data.Migrations
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.LibraryExcludePattern", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Pattern")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("LibraryExcludePattern");
});
modelBuilder.Entity("API.Entities.LibraryFileTypeGroup", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("FileTypeGroup")
.HasColumnType("INTEGER");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("LibraryFileTypeGroup");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
@ -2057,6 +2095,28 @@ namespace API.Data.Migrations
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.LibraryExcludePattern", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("LibraryExcludePatterns")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.LibraryFileTypeGroup", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("LibraryFileTypes")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
@ -2436,6 +2496,10 @@ namespace API.Data.Migrations
{
b.Navigation("Folders");
b.Navigation("LibraryExcludePatterns");
b.Navigation("LibraryFileTypes");
b.Navigation("Series");
});

View file

@ -26,7 +26,8 @@ public enum LibraryIncludes
Series = 2,
AppUser = 4,
Folders = 8,
// Ratings = 16
FileTypes = 16,
ExcludePatterns = 32
}
public interface ILibraryRepository
@ -86,7 +87,9 @@ public class LibraryRepository : ILibraryRepository
{
return _context.Library
.Include(l => l.AppUsers)
.Where(library => library.AppUsers.Any(x => x.UserName.Equals(userName)))
.Include(l => l.LibraryFileTypes)
.Include(l => l.LibraryExcludePatterns)
.Where(library => library.AppUsers.Any(x => x.UserName!.Equals(userName)))
.OrderBy(l => l.Name)
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
.AsSplitQuery()
@ -100,12 +103,10 @@ public class LibraryRepository : ILibraryRepository
/// <returns></returns>
public async Task<IEnumerable<Library>> GetLibrariesAsync(LibraryIncludes includes = LibraryIncludes.None)
{
var query = _context.Library
return await _context.Library
.Include(l => l.AppUsers)
.Select(l => l);
query = AddIncludesToQuery(query, includes);
return await query.ToListAsync();
.Includes(includes)
.ToListAsync();
}
/// <summary>
@ -142,11 +143,10 @@ public class LibraryRepository : ILibraryRepository
public async Task<IEnumerable<Library>> GetLibraryForIdsAsync(IEnumerable<int> libraryIds, LibraryIncludes includes = LibraryIncludes.None)
{
var query = _context.Library
.Where(x => libraryIds.Contains(x.Id));
AddIncludesToQuery(query, includes);
return await query.ToListAsync();
return await _context.Library
.Where(x => libraryIds.Contains(x.Id))
.Includes(includes)
.ToListAsync();
}
public async Task<int> GetTotalFiles()
@ -190,6 +190,7 @@ public class LibraryRepository : ILibraryRepository
{
return await _context.Library
.Include(f => f.Folders)
.Include(l => l.LibraryFileTypes)
.OrderBy(l => l.Name)
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
.AsSplitQuery()
@ -201,31 +202,12 @@ public class LibraryRepository : ILibraryRepository
{
var query = _context.Library
.Where(x => x.Id == libraryId);
.Where(x => x.Id == libraryId)
.Includes(includes);
query = AddIncludesToQuery(query, includes);
return await query.SingleOrDefaultAsync();
}
private static IQueryable<Library> AddIncludesToQuery(IQueryable<Library> query, LibraryIncludes includeFlags)
{
if (includeFlags.HasFlag(LibraryIncludes.Folders))
{
query = query.Include(l => l.Folders);
}
if (includeFlags.HasFlag(LibraryIncludes.Series))
{
query = query.Include(l => l.Series);
}
if (includeFlags.HasFlag(LibraryIncludes.AppUser))
{
query = query.Include(l => l.AppUsers);
}
return query.AsSplitQuery();
}
public async Task<bool> LibraryExists(string libraryName)
{

View file

@ -0,0 +1,19 @@
using System.ComponentModel;
namespace API.Entities.Enums;
/// <summary>
/// Represents a set of file types that can be scanned
/// </summary>
public enum FileTypeGroup
{
[Description("Archive")]
Archive = 1,
[Description("EPub")]
Epub = 2,
[Description("Pdf")]
Pdf = 3,
[Description("Images")]
Images = 4
}

View file

@ -2,7 +2,6 @@
using System.Collections.Generic;
using API.Entities.Enums;
using API.Entities.Interfaces;
using Microsoft.EntityFrameworkCore;
namespace API.Entities;
@ -44,6 +43,8 @@ public class Library : IEntityDate
public DateTime Created { get; set; }
public DateTime LastModified { get; set; }
public DateTime CreatedUtc { get; set; }
@ -57,6 +58,8 @@ public class Library : IEntityDate
public ICollection<FolderPath> Folders { get; set; } = null!;
public ICollection<AppUser> AppUsers { get; set; } = null!;
public ICollection<Series> Series { get; set; } = null!;
public ICollection<LibraryFileTypeGroup> LibraryFileTypes { get; set; } = new List<LibraryFileTypeGroup>();
public ICollection<LibraryExcludePattern> LibraryExcludePatterns { get; set; } = new List<LibraryExcludePattern>();
public void UpdateLastModified()
{

View file

@ -0,0 +1,10 @@
namespace API.Entities;
public class LibraryExcludePattern
{
public int Id { get; set; }
public string Pattern { get; set; }
public int LibraryId { get; set; }
public Library Library { get; set; } = null!;
}

View file

@ -0,0 +1,12 @@
using API.Entities.Enums;
namespace API.Entities;
public class LibraryFileTypeGroup
{
public int Id { get; set; }
public FileTypeGroup FileTypeGroup { get; set; }
public int LibraryId { get; set; }
public Library Library { get; set; } = null!;
}

View file

@ -0,0 +1,25 @@
using System;
using API.Entities.Enums;
using API.Services.Tasks.Scanner.Parser;
namespace API.Extensions;
public static class FileTypeGroupExtensions
{
public static string GetRegex(this FileTypeGroup fileTypeGroup)
{
switch (fileTypeGroup)
{
case FileTypeGroup.Archive:
return Parser.ArchiveFileExtensions;
case FileTypeGroup.Epub:
return Parser.EpubFileExtension;
case FileTypeGroup.Pdf:
return Parser.PdfFileExtension;
case FileTypeGroup.Images:
return Parser.ImageFileExtensions;;
default:
throw new ArgumentOutOfRangeException(nameof(fileTypeGroup), fileTypeGroup, null);
}
}
}

View file

@ -173,4 +173,34 @@ public static class IncludesExtensions
return queryable.AsSplitQuery();
}
public static IQueryable<Library> Includes(this IQueryable<Library> query, LibraryIncludes includeFlags)
{
if (includeFlags.HasFlag(LibraryIncludes.Folders))
{
query = query.Include(l => l.Folders);
}
if (includeFlags.HasFlag(LibraryIncludes.FileTypes))
{
query = query.Include(l => l.LibraryFileTypes);
}
if (includeFlags.HasFlag(LibraryIncludes.Series))
{
query = query.Include(l => l.Series);
}
if (includeFlags.HasFlag(LibraryIncludes.AppUser))
{
query = query.Include(l => l.AppUsers);
}
if (includeFlags.HasFlag(LibraryIncludes.ExcludePatterns))
{
query = query.Include(l => l.LibraryExcludePatterns);
}
return query.AsSplitQuery();
}
}

View file

@ -208,7 +208,13 @@ public class AutoMapperProfiles : Profile
CreateMap<Library, LibraryDto>()
.ForMember(dest => dest.Folders,
opt =>
opt.MapFrom(src => src.Folders.Select(x => x.Path).ToList()));
opt.MapFrom(src => src.Folders.Select(x => x.Path).ToList()))
.ForMember(dest => dest.LibraryFileTypes,
opt =>
opt.MapFrom(src => src.LibraryFileTypes.Select(l => l.FileTypeGroup)))
.ForMember(dest => dest.ExcludePatterns,
opt =>
opt.MapFrom(src => src.LibraryExcludePatterns.Select(l => l.Pattern)));
CreateMap<AppUser, MemberDto>()
.ForMember(dest => dest.AgeRestriction,

View file

@ -64,7 +64,7 @@ public interface IDirectoryService
IEnumerable<string> GetDirectories(string folderPath);
IEnumerable<string> GetDirectories(string folderPath, GlobMatcher? matcher);
string GetParentDirectoryName(string fileOrFolder);
IList<string> ScanFiles(string folderPath, GlobMatcher? matcher = null);
IList<string> ScanFiles(string folderPath, string fileTypes, GlobMatcher? matcher = null);
DateTime GetLastWriteTime(string folderPath);
GlobMatcher? CreateMatcherFromFile(string filePath);
}
@ -646,7 +646,7 @@ public class DirectoryService : IDirectoryService
/// <param name="folderPath"></param>
/// <param name="matcher"></param>
/// <returns></returns>
public IList<string> ScanFiles(string folderPath, GlobMatcher? matcher = null)
public IList<string> ScanFiles(string folderPath, string supportedExtensions, GlobMatcher? matcher = null)
{
_logger.LogDebug("[ScanFiles] called on {Path}", folderPath);
var files = new List<string>();
@ -667,19 +667,19 @@ public class DirectoryService : IDirectoryService
foreach (var directory in directories)
{
files.AddRange(ScanFiles(directory, matcher));
files.AddRange(ScanFiles(directory, supportedExtensions, matcher));
}
// Get the matcher from either ignore or global (default setup)
if (matcher == null)
{
files.AddRange(GetFilesWithCertainExtensions(folderPath, Tasks.Scanner.Parser.Parser.SupportedExtensions));
files.AddRange(GetFilesWithCertainExtensions(folderPath, supportedExtensions));
}
else
{
var foundFiles = GetFilesWithCertainExtensions(folderPath,
Tasks.Scanner.Parser.Parser.SupportedExtensions)
supportedExtensions)
.Where(file => !matcher.ExcludeMatches(FileSystem.FileInfo.New(file).Name));
files.AddRange(foundFiles);
}

View file

@ -4,6 +4,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Services.Tasks.Scanner.Parser;
@ -77,14 +78,30 @@ public class ParseScannedFiles
/// <param name="folderAction">A callback async Task to be called once all files for each folder path are found</param>
/// <param name="forceCheck">If we should bypass any folder last write time checks on the scan and force I/O</param>
public async Task ProcessFiles(string folderPath, bool scanDirectoryByDirectory,
IDictionary<string, IList<SeriesModified>> seriesPaths, Func<IList<string>, string,Task> folderAction, bool forceCheck = false)
IDictionary<string, IList<SeriesModified>> seriesPaths, Func<IList<string>, string,Task> folderAction, Library library, bool forceCheck = false)
{
string normalizedPath;
var fileExtensions = string.Join("|", library.LibraryFileTypes.Select(l => l.FileTypeGroup.GetRegex()));
if (scanDirectoryByDirectory)
{
// This is used in library scan, so we should check first for a ignore file and use that here as well
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile);
var matcher = _directoryService.CreateMatcherFromFile(potentialIgnoreFile);
if (matcher != null)
{
_logger.LogWarning(".kavitaignore found! Ignore files is deprecated in favor of Library Settings. Please update and remove file at {Path}", potentialIgnoreFile);
}
if (library.LibraryExcludePatterns.Count != 0)
{
matcher ??= new GlobMatcher();
foreach (var pattern in library.LibraryExcludePatterns)
{
matcher.AddExclude(pattern.Pattern);
}
}
var directories = _directoryService.GetDirectories(folderPath, matcher).ToList();
foreach (var directory in directories)
@ -97,7 +114,7 @@ public class ParseScannedFiles
else
{
// For a scan, this is doing everything in the directory loop before the folder Action is called...which leads to no progress indication
await folderAction(_directoryService.ScanFiles(directory, matcher), directory);
await folderAction(_directoryService.ScanFiles(directory, fileExtensions, matcher), directory);
}
}
@ -113,7 +130,7 @@ public class ParseScannedFiles
// We need to calculate all folders till library root and see if any kavitaignores
var seriesMatcher = BuildIgnoreFromLibraryRoot(folderPath, seriesPaths);
await folderAction(_directoryService.ScanFiles(folderPath, seriesMatcher), folderPath);
await folderAction(_directoryService.ScanFiles(folderPath, fileExtensions, seriesMatcher), folderPath);
}
/// <summary>
@ -268,25 +285,24 @@ public class ParseScannedFiles
/// <summary>
/// This will process series by folder groups. This is used solely by ScanSeries
/// </summary>
/// <param name="libraryType"></param>
/// <param name="library">This should have the FileTypes included</param>
/// <param name="folders"></param>
/// <param name="libraryName"></param>
/// <param name="isLibraryScan">If true, does a directory scan first (resulting in folders being tackled in parallel), else does an immediate scan files</param>
/// <param name="seriesPaths">A map of Series names -> existing folder paths to handle skipping folders</param>
/// <param name="processSeriesInfos">Action which returns if the folder was skipped and the infos from said folder</param>
/// <param name="forceCheck">Defaults to false</param>
/// <returns></returns>
public async Task ScanLibrariesForSeries(LibraryType libraryType,
IEnumerable<string> folders, string libraryName, bool isLibraryScan,
public async Task ScanLibrariesForSeries(Library library,
IEnumerable<string> folders, bool isLibraryScan,
IDictionary<string, IList<SeriesModified>> seriesPaths, Func<Tuple<bool, IList<ParserInfo>>, Task>? processSeriesInfos, bool forceCheck = false)
{
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
foreach (var folderPath in folders)
{
try
{
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, ProcessFolder, forceCheck);
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, ProcessFolder, library, forceCheck);
}
catch (ArgumentException ex)
{
@ -294,7 +310,7 @@ public class ParseScannedFiles
}
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", libraryName, ProgressEventType.Ended));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
return;
async Task ProcessFolder(IList<string> files, string folder)
@ -311,13 +327,13 @@ public class ParseScannedFiles
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent("Skipped " + normalizedFolder, libraryName, ProgressEventType.Updated));
MessageFactory.FileScanProgressEvent("Skipped " + normalizedFolder, library.Name, ProgressEventType.Updated));
return;
}
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent($"{files.Count} files in {folder}", libraryName, ProgressEventType.Updated));
MessageFactory.FileScanProgressEvent($"{files.Count} files in {folder}", library.Name, ProgressEventType.Updated));
if (files.Count == 0)
{
_logger.LogInformation("[ScannerService] {Folder} is empty or is no longer in this location", folder);
@ -326,7 +342,7 @@ public class ParseScannedFiles
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
var infos = files
.Select(file => _readingItemService.ParseFile(file, folder, libraryType))
.Select(file => _readingItemService.ParseFile(file, folder, library.Type))
.Where(info => info != null)
.ToList();

View file

@ -17,7 +17,9 @@ public static class Parser
public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg|\.webp|\.gif|\.avif)"; // Don't forget to update CoverChooser
public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|\.cb7|\.cbt";
private const string BookFileExtensions = @"\.epub|\.pdf";
public const string EpubFileExtension = @"\.epub";
public const string PdfFileExtension = @"\.pdf";
private const string BookFileExtensions = EpubFileExtension + "|" + PdfFileExtension;
private const string XmlRegexExtensions = @"\.xml";
public const string MacOsMetadataFileStartsWith = @"._";

View file

@ -198,7 +198,7 @@ public class ScannerService : IScannerService
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
if (series == null) return; // This can occur when UI deletes a series but doesn't update and user re-requests update
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId, LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
if (library == null) return;
var libraryPaths = library.Folders.Select(f => f.Path).ToList();
if (await ShouldScanSeries(seriesId, library, libraryPaths, series, true) != ScanCancelReason.NoCancel)
@ -229,7 +229,6 @@ public class ScannerService : IScannerService
await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Files for series are not in a nested folder under library path. Correct this and rescan."));
return;
}
}
if (string.IsNullOrEmpty(folderPath))
@ -472,7 +471,7 @@ public class ScannerService : IScannerService
public async Task ScanLibrary(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
var libraryFolderPaths = library!.Folders.Select(fp => fp.Path).ToList();
if (!await CheckMounts(library.Name, libraryFolderPaths)) return;
@ -493,7 +492,7 @@ public class ScannerService : IScannerService
await _processSeries.Prime();
var processTasks = new List<Func<Task>>();
//var processTasks = new List<Func<Task>>();
var scanElapsedTime = await ScanFiles(library, libraryFolderPaths, shouldUseLibraryScan, TrackFiles, forceUpdate);
@ -579,7 +578,7 @@ public class ScannerService : IScannerService
var foundParsedSeries = new ParsedSeries()
{
Name = parsedFiles[0].Series,
NormalizedName = Scanner.Parser.Parser.Normalize(parsedFiles[0].Series),
NormalizedName = Parser.Normalize(parsedFiles[0].Series),
Format = parsedFiles[0].Format,
};
@ -588,7 +587,7 @@ public class ScannerService : IScannerService
seenSeries.AddRange(parsedFiles.Select(pf => new ParsedSeries()
{
Name = pf.Series,
NormalizedName = Scanner.Parser.Parser.Normalize(pf.Series),
NormalizedName = Parser.Normalize(pf.Series),
Format = pf.Format
}));
return;
@ -616,7 +615,7 @@ public class ScannerService : IScannerService
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService, _eventHub);
var scanWatch = Stopwatch.StartNew();
await scanner.ScanLibrariesForSeries(library.Type, dirs, library.Name,
await scanner.ScanLibrariesForSeries(library, dirs,
isLibraryScan, await _unitOfWork.SeriesRepository.GetFolderPathMap(library.Id), processSeriesInfos, forceChecks);
var scanElapsedTime = scanWatch.ElapsedMilliseconds;

View file

@ -243,6 +243,7 @@ public class Startup
// v0.7.11
await MigrateSmartFilterEncoding.Migrate(unitOfWork, dataContext, logger);
await MigrateLibrariesToHaveAllFileTypes.Migrate(unitOfWork, dataContext, logger);
// Update the version in the DB after all migrations are run
var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion);