More Bugfixes (#2989)
This commit is contained in:
parent
1ae723b405
commit
a3e020fe17
49 changed files with 579 additions and 272 deletions
|
@ -53,9 +53,9 @@
|
|||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CsvHelper" Version="32.0.1" />
|
||||
<PackageReference Include="MailKit" Version="4.5.0" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="8.0.4">
|
||||
<PackageReference Include="CsvHelper" Version="32.0.3" />
|
||||
<PackageReference Include="MailKit" Version="4.6.0" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="8.0.6">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
|
@ -66,17 +66,17 @@
|
|||
<PackageReference Include="Flurl" Version="3.0.7" />
|
||||
<PackageReference Include="Flurl.Http" Version="3.2.4" />
|
||||
<PackageReference Include="Hangfire" Version="1.8.12" />
|
||||
<PackageReference Include="Hangfire.InMemory" Version="0.9.0" />
|
||||
<PackageReference Include="Hangfire.InMemory" Version="0.10.0" />
|
||||
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
|
||||
<PackageReference Include="Hangfire.Storage.SQLite" Version="0.4.2" />
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.11.61" />
|
||||
<PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" />
|
||||
<PackageReference Include="Hangfire.AspNetCore" Version="1.8.12" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.SignalR" Version="1.1.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.4" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="8.0.4" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="8.0.4" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.4" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.6" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="8.0.6" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="8.0.6" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.6" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="3.0.0" />
|
||||
<PackageReference Include="MimeTypeMapOfficial" Version="1.0.17" />
|
||||
|
@ -84,7 +84,7 @@
|
|||
<PackageReference Include="NetVips" Version="2.4.1" />
|
||||
<PackageReference Include="NetVips.Native" Version="8.15.2" />
|
||||
<PackageReference Include="NReco.Logging.File" Version="1.2.0" />
|
||||
<PackageReference Include="Serilog" Version="3.1.1" />
|
||||
<PackageReference Include="Serilog" Version="4.0.0" />
|
||||
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
|
||||
<PackageReference Include="Serilog.Enrichers.Thread" Version="3.2.0-dev-00752" />
|
||||
<PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" />
|
||||
|
@ -95,16 +95,16 @@
|
|||
<PackageReference Include="Serilog.Sinks.SignalR.Core" Version="0.1.2" />
|
||||
<PackageReference Include="SharpCompress" Version="0.37.2" />
|
||||
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.4" />
|
||||
<PackageReference Include="SonarAnalyzer.CSharp" Version="9.25.0.90414">
|
||||
<PackageReference Include="SonarAnalyzer.CSharp" Version="9.26.0.92422">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.1" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2" />
|
||||
<PackageReference Include="Swashbuckle.AspNetCore.Filters" Version="8.0.2" />
|
||||
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.5.2" />
|
||||
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.6.0" />
|
||||
<PackageReference Include="System.IO.Abstractions" Version="21.0.2" />
|
||||
<PackageReference Include="System.Drawing.Common" Version="8.0.4" />
|
||||
<PackageReference Include="VersOne.Epub" Version="3.3.1" />
|
||||
<PackageReference Include="System.Drawing.Common" Version="8.0.6" />
|
||||
<PackageReference Include="VersOne.Epub" Version="3.3.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
|
|
@ -853,27 +853,35 @@ public class OpdsController : BaseApiController
|
|||
var seriesDetail = await _seriesService.GetSeriesDetail(seriesId, userId);
|
||||
foreach (var volume in seriesDetail.Volumes)
|
||||
{
|
||||
var chapters = await _unitOfWork.ChapterRepository.GetChaptersAsync(volume.Id, ChapterIncludes.Files);
|
||||
var chaptersForVolume = await _unitOfWork.ChapterRepository.GetChaptersAsync(volume.Id, ChapterIncludes.Files | ChapterIncludes.People);
|
||||
|
||||
foreach (var chapter in chapters)
|
||||
foreach (var chapter in chaptersForVolume)
|
||||
{
|
||||
var chapterId = chapter.Id;
|
||||
var chapterDto = _mapper.Map<ChapterDto>(chapter);
|
||||
foreach (var mangaFile in chapter.Files)
|
||||
{
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, volume.Id, chapterId, mangaFile, series, chapterDto, apiKey, prefix, baseUrl));
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, volume.Id, chapterId, _mapper.Map<MangaFileDto>(mangaFile), series,
|
||||
chapterDto, apiKey, prefix, baseUrl));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
foreach (var storylineChapter in seriesDetail.StorylineChapters.Where(c => !c.IsSpecial))
|
||||
var chapters = seriesDetail.StorylineChapters;
|
||||
if (!seriesDetail.StorylineChapters.Any() && seriesDetail.Chapters.Any())
|
||||
{
|
||||
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(storylineChapter.Id);
|
||||
var chapterDto = _mapper.Map<ChapterDto>(storylineChapter);
|
||||
chapters = seriesDetail.Chapters;
|
||||
}
|
||||
|
||||
foreach (var chapter in chapters.Where(c => !c.IsSpecial))
|
||||
{
|
||||
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id);
|
||||
var chapterDto = _mapper.Map<ChapterDto>(chapter);
|
||||
foreach (var mangaFile in files)
|
||||
{
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, storylineChapter.VolumeId, storylineChapter.Id, mangaFile, series, chapterDto, apiKey, prefix, baseUrl));
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, chapter.VolumeId, chapter.Id, _mapper.Map<MangaFileDto>(mangaFile), series,
|
||||
chapterDto, apiKey, prefix, baseUrl));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -883,7 +891,8 @@ public class OpdsController : BaseApiController
|
|||
var chapterDto = _mapper.Map<ChapterDto>(special);
|
||||
foreach (var mangaFile in files)
|
||||
{
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, special.VolumeId, special.Id, mangaFile, series, chapterDto, apiKey, prefix, baseUrl));
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, special.VolumeId, special.Id, _mapper.Map<MangaFileDto>(mangaFile), series,
|
||||
chapterDto, apiKey, prefix, baseUrl));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -909,9 +918,9 @@ public class OpdsController : BaseApiController
|
|||
SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-{_seriesService.FormatChapterName(userId, libraryType)}s");
|
||||
foreach (var chapter in chapters)
|
||||
{
|
||||
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id);
|
||||
var chapterDto = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapter.Id);
|
||||
foreach (var mangaFile in files)
|
||||
//var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id);
|
||||
var chapterDto = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapter.Id, ChapterIncludes.Files | ChapterIncludes.People);
|
||||
foreach (var mangaFile in chapterDto.Files)
|
||||
{
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, volumeId, chapter.Id, mangaFile, series, chapterDto!, apiKey, prefix, baseUrl));
|
||||
}
|
||||
|
@ -928,17 +937,20 @@ public class OpdsController : BaseApiController
|
|||
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
||||
return BadRequest(await _localizationService.Translate(userId, "opds-disabled"));
|
||||
var (baseUrl, prefix) = await GetPrefix();
|
||||
|
||||
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
|
||||
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId);
|
||||
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId);
|
||||
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId, ChapterIncludes.Files | ChapterIncludes.People);
|
||||
|
||||
if (chapter == null) return BadRequest(await _localizationService.Translate(userId, "chapter-doesnt-exist"));
|
||||
|
||||
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
|
||||
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
|
||||
|
||||
var feed = CreateFeed(series.Name + " - Volume " + volume!.Name + $" - {_seriesService.FormatChapterName(userId, libraryType)}s",
|
||||
$"{prefix}{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}", apiKey, prefix);
|
||||
SetFeedId(feed, $"series-{series.Id}-volume-{volumeId}-{_seriesService.FormatChapterName(userId, libraryType)}-{chapterId}-files");
|
||||
foreach (var mangaFile in files)
|
||||
|
||||
foreach (var mangaFile in chapter.Files)
|
||||
{
|
||||
feed.Entries.Add(await CreateChapterWithFile(userId, seriesId, volumeId, chapterId, mangaFile, series, chapter, apiKey, prefix, baseUrl));
|
||||
}
|
||||
|
@ -1028,22 +1040,30 @@ public class OpdsController : BaseApiController
|
|||
Summary = $"Format: {seriesDto.Format}" + (string.IsNullOrWhiteSpace(metadata.Summary)
|
||||
? string.Empty
|
||||
: $" Summary: {metadata.Summary}"),
|
||||
Authors = metadata.Writers.Select(p => new FeedAuthor()
|
||||
{
|
||||
Name = p.Name,
|
||||
Uri = "http://opds-spec.org/author/" + p.Id
|
||||
}).ToList(),
|
||||
Authors = metadata.Writers.Select(CreateAuthor).ToList(),
|
||||
Categories = metadata.Genres.Select(g => new FeedCategory()
|
||||
{
|
||||
Label = g.Title,
|
||||
Term = string.Empty
|
||||
}).ToList(),
|
||||
Links = new List<FeedLink>()
|
||||
{
|
||||
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/series/{seriesDto.Id}"),
|
||||
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={seriesDto.Id}&apiKey={apiKey}"),
|
||||
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={seriesDto.Id}&apiKey={apiKey}")
|
||||
}
|
||||
Links =
|
||||
[
|
||||
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation,
|
||||
$"{prefix}{apiKey}/series/{seriesDto.Id}"),
|
||||
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image,
|
||||
$"{baseUrl}api/image/series-cover?seriesId={seriesDto.Id}&apiKey={apiKey}"),
|
||||
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image,
|
||||
$"{baseUrl}api/image/series-cover?seriesId={seriesDto.Id}&apiKey={apiKey}")
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
private static FeedAuthor CreateAuthor(PersonDto person)
|
||||
{
|
||||
return new FeedAuthor()
|
||||
{
|
||||
Name = person.Name,
|
||||
Uri = "http://opds-spec.org/author/" + person.Id
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1070,6 +1090,7 @@ public class OpdsController : BaseApiController
|
|||
Id = chapterId.ToString(),
|
||||
Title = title,
|
||||
Summary = summary ?? string.Empty,
|
||||
|
||||
Links = new List<FeedLink>()
|
||||
{
|
||||
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation,
|
||||
|
@ -1082,7 +1103,7 @@ public class OpdsController : BaseApiController
|
|||
};
|
||||
}
|
||||
|
||||
private async Task<FeedEntry> CreateChapterWithFile(int userId, int seriesId, int volumeId, int chapterId, MangaFile mangaFile, SeriesDto series, ChapterDto chapter, string apiKey, string prefix, string baseUrl)
|
||||
private async Task<FeedEntry> CreateChapterWithFile(int userId, int seriesId, int volumeId, int chapterId, MangaFileDto mangaFile, SeriesDto series, ChapterDto chapter, string apiKey, string prefix, string baseUrl)
|
||||
{
|
||||
var fileSize =
|
||||
mangaFile.Bytes > 0 ? DirectoryService.GetHumanReadableBytes(mangaFile.Bytes) :
|
||||
|
@ -1143,7 +1164,8 @@ public class OpdsController : BaseApiController
|
|||
{
|
||||
Text = fileType,
|
||||
Type = "text"
|
||||
}
|
||||
},
|
||||
Authors = chapter.Writers.Select(CreateAuthor).ToList()
|
||||
};
|
||||
|
||||
var canPageStream = mangaFile.Extension != ".epub";
|
||||
|
@ -1241,7 +1263,7 @@ public class OpdsController : BaseApiController
|
|||
throw new KavitaException(await _localizationService.Get("en", "user-doesnt-exist"));
|
||||
}
|
||||
|
||||
private async Task<FeedLink> CreatePageStreamLink(int libraryId, int seriesId, int volumeId, int chapterId, MangaFile mangaFile, string apiKey, string prefix)
|
||||
private async Task<FeedLink> CreatePageStreamLink(int libraryId, int seriesId, int volumeId, int chapterId, MangaFileDto mangaFile, string apiKey, string prefix)
|
||||
{
|
||||
var userId = await GetUser(apiKey);
|
||||
var progress = await _unitOfWork.AppUserProgressRepository.GetUserProgressDtoAsync(chapterId, userId);
|
||||
|
|
|
@ -263,9 +263,9 @@ public class ReaderController : BaseApiController
|
|||
info.Title += " - " + info.ChapterTitle;
|
||||
}
|
||||
|
||||
if (info.IsSpecial && dto.VolumeNumber.Equals(Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume))
|
||||
if (info.IsSpecial)
|
||||
{
|
||||
info.Subtitle = info.FileName;
|
||||
info.Subtitle = Path.GetFileNameWithoutExtension(info.FileName);
|
||||
} else if (!info.IsSpecial && info.VolumeNumber.Equals(Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume))
|
||||
{
|
||||
info.Subtitle = ReaderService.FormatChapterName(info.LibraryType, true, true) + info.ChapterNumber;
|
||||
|
|
|
@ -6,10 +6,23 @@ namespace API.DTOs;
|
|||
public class MangaFileDto
|
||||
{
|
||||
public int Id { get; init; }
|
||||
/// <summary>
|
||||
/// Absolute path to the archive file (normalized)
|
||||
/// </summary>
|
||||
public string FilePath { get; init; } = default!;
|
||||
/// <summary>
|
||||
/// Number of pages for the given file
|
||||
/// </summary>
|
||||
public int Pages { get; init; }
|
||||
/// <summary>
|
||||
/// How many bytes make up this file
|
||||
/// </summary>
|
||||
public long Bytes { get; init; }
|
||||
public MangaFormat Format { get; init; }
|
||||
public DateTime Created { get; init; }
|
||||
/// <summary>
|
||||
/// File extension
|
||||
/// </summary>
|
||||
public string? Extension { get; set; }
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
using API.Entities.Enums;
|
||||
using System;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
|
||||
namespace API.DTOs.Settings;
|
||||
|
@ -88,6 +89,14 @@ public class ServerSettingDto
|
|||
/// SMTP Configuration
|
||||
/// </summary>
|
||||
public SmtpConfigDto SmtpConfig { get; set; }
|
||||
/// <summary>
|
||||
/// The Date Kavita was first installed
|
||||
/// </summary>
|
||||
public DateTime? FirstInstallDate { get; set; }
|
||||
/// <summary>
|
||||
/// The Version of Kavita on the first run
|
||||
/// </summary>
|
||||
public string? FirstInstallVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Are at least some basics filled in
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
namespace API.DTOs.Stats;
|
||||
using System;
|
||||
|
||||
namespace API.DTOs.Stats;
|
||||
|
||||
/// <summary>
|
||||
/// This is just for the Server tab on UI
|
||||
|
@ -17,5 +19,13 @@ public class ServerInfoSlimDto
|
|||
/// Version of Kavita
|
||||
/// </summary>
|
||||
public required string KavitaVersion { get; set; }
|
||||
/// <summary>
|
||||
/// The Date Kavita was first installed
|
||||
/// </summary>
|
||||
public DateTime? FirstInstallDate { get; set; }
|
||||
/// <summary>
|
||||
/// The Version of Kavita on the first run
|
||||
/// </summary>
|
||||
public string? FirstInstallVersion { get; set; }
|
||||
|
||||
}
|
||||
|
|
53
API/Data/ManualMigrations/MigrateInitialInstallData.cs
Normal file
53
API/Data/ManualMigrations/MigrateInitialInstallData.cs
Normal file
|
@ -0,0 +1,53 @@
|
|||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.2 I started collecting information on when the user first installed Kavita as a nice to have info for the user
|
||||
/// </summary>
|
||||
public static class MigrateInitialInstallData
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger, IDirectoryService directoryService)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateInitialInstallData"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateInitialInstallData migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
var settings = await dataContext.ServerSetting.ToListAsync();
|
||||
|
||||
// Get the Install Date as Date the DB was written
|
||||
var dbFile = Path.Join(directoryService.ConfigDirectory, "kavita.db");
|
||||
if (!string.IsNullOrEmpty(dbFile) && directoryService.FileSystem.File.Exists(dbFile))
|
||||
{
|
||||
var fi = directoryService.FileSystem.FileInfo.New(dbFile);
|
||||
var setting = settings.First(s => s.Key == ServerSettingKey.FirstInstallDate);
|
||||
setting.Value = fi.CreationTimeUtc.ToString();
|
||||
await dataContext.SaveChangesAsync();
|
||||
}
|
||||
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateInitialInstallData",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateInitialInstallData migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ public enum ChapterIncludes
|
|||
None = 1,
|
||||
Volumes = 2,
|
||||
Files = 4,
|
||||
People = 8
|
||||
}
|
||||
|
||||
public interface IChapterRepository
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading.Tasks;
|
||||
|
@ -137,6 +138,7 @@ public interface ISeriesRepository
|
|||
Task<IList<Series>> GetWantToReadForUserAsync(int userId);
|
||||
Task<bool> IsSeriesInWantToRead(int userId, int seriesId);
|
||||
Task<Series?> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None);
|
||||
Task<Series?> GetSeriesThatContainsLowestFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None);
|
||||
Task<IEnumerable<Series>> GetAllSeriesByNameAsync(IList<string> normalizedNames,
|
||||
int userId, SeriesIncludes includes = SeriesIncludes.None);
|
||||
Task<Series?> GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId, MangaFormat format, bool withFullIncludes = true);
|
||||
|
@ -1589,14 +1591,29 @@ public class SeriesRepository : ISeriesRepository
|
|||
/// <summary>
|
||||
/// Return a Series by Folder path. Null if not found.
|
||||
/// </summary>
|
||||
/// <param name="folder">This will be normalized in the query</param>
|
||||
/// <param name="folder">This will be normalized in the query and checked against FolderPath and LowestFolderPath</param>
|
||||
/// <param name="includes">Additional relationships to include with the base query</param>
|
||||
/// <returns></returns>
|
||||
public async Task<Series?> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None)
|
||||
{
|
||||
var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder);
|
||||
if (string.IsNullOrEmpty(normalized)) return null;
|
||||
|
||||
return await _context.Series
|
||||
.Where(s => s.FolderPath != null && s.FolderPath.Equals(normalized))
|
||||
.Where(s => (!string.IsNullOrEmpty(s.FolderPath) && s.FolderPath.Equals(normalized) || (!string.IsNullOrEmpty(s.LowestFolderPath) && s.LowestFolderPath.Equals(normalized))))
|
||||
.Includes(includes)
|
||||
.SingleOrDefaultAsync();
|
||||
}
|
||||
|
||||
public async Task<Series?> GetSeriesThatContainsLowestFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None)
|
||||
{
|
||||
var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder);
|
||||
if (string.IsNullOrEmpty(normalized)) return null;
|
||||
|
||||
normalized = normalized.TrimEnd('/');
|
||||
|
||||
return await _context.Series
|
||||
.Where(s => !string.IsNullOrEmpty(s.LowestFolderPath) && EF.Functions.Like(normalized, s.LowestFolderPath + "%"))
|
||||
.Includes(includes)
|
||||
.SingleOrDefaultAsync();
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
@ -251,6 +252,9 @@ public static class Seed
|
|||
new() {Key = ServerSettingKey.EmailEnableSsl, Value = "true"},
|
||||
new() {Key = ServerSettingKey.EmailSizeLimit, Value = 26_214_400 + string.Empty},
|
||||
new() {Key = ServerSettingKey.EmailCustomizedTemplates, Value = "false"},
|
||||
new() {Key = ServerSettingKey.FirstInstallVersion, Value = BuildInfo.Version.ToString()},
|
||||
new() {Key = ServerSettingKey.FirstInstallDate, Value = DateTime.UtcNow.ToString()},
|
||||
|
||||
}.ToArray());
|
||||
|
||||
foreach (var defaultSetting in DefaultSettings)
|
||||
|
|
|
@ -186,5 +186,15 @@ public enum ServerSettingKey
|
|||
/// When the cleanup task should run - Critical to keeping Kavita working
|
||||
/// </summary>
|
||||
[Description("TaskCleanup")]
|
||||
TaskCleanup = 37
|
||||
TaskCleanup = 37,
|
||||
/// <summary>
|
||||
/// The Date Kavita was first installed
|
||||
/// </summary>
|
||||
[Description("FirstInstallDate")]
|
||||
FirstInstallDate = 38,
|
||||
/// <summary>
|
||||
/// The Version of Kavita on the first run
|
||||
/// </summary>
|
||||
[Description("FirstInstallVersion")]
|
||||
FirstInstallVersion = 39,
|
||||
}
|
||||
|
|
|
@ -53,6 +53,12 @@ public static class IncludesExtensions
|
|||
.Include(c => c.Files);
|
||||
}
|
||||
|
||||
if (includes.HasFlag(ChapterIncludes.People))
|
||||
{
|
||||
queryable = queryable
|
||||
.Include(c => c.People);
|
||||
}
|
||||
|
||||
return queryable.AsSplitQuery();
|
||||
}
|
||||
|
||||
|
|
|
@ -122,6 +122,12 @@ public class ServerSettingConverter : ITypeConverter<IEnumerable<ServerSetting>,
|
|||
destination.SmtpConfig ??= new SmtpConfigDto();
|
||||
destination.SmtpConfig.CustomizedTemplates = bool.Parse(row.Value);
|
||||
break;
|
||||
case ServerSettingKey.FirstInstallDate:
|
||||
destination.FirstInstallDate = DateTime.Parse(row.Value);
|
||||
break;
|
||||
case ServerSettingKey.FirstInstallVersion:
|
||||
destination.FirstInstallVersion = row.Value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,12 @@ public class DownloadService : IDownloadService
|
|||
// Figures out what the content type should be based on the file name.
|
||||
if (!_fileTypeProvider.TryGetContentType(filepath, out var contentType))
|
||||
{
|
||||
if (contentType == null)
|
||||
{
|
||||
// Get extension
|
||||
contentType = Path.GetExtension(filepath);
|
||||
}
|
||||
|
||||
contentType = Path.GetExtension(filepath).ToLowerInvariant() switch
|
||||
{
|
||||
".cbz" => "application/x-cbz",
|
||||
|
|
|
@ -169,6 +169,9 @@ public class SmartCollectionSyncService : ISmartCollectionSyncService
|
|||
s.NormalizedLocalizedName == normalizedSeriesName)
|
||||
&& formats.Contains(s.Format));
|
||||
|
||||
_logger.LogDebug("Trying to find {SeriesName} with formats ({Formats}) within Kavita for linking. Found: {ExistingSeriesName} ({ExistingSeriesId})",
|
||||
seriesInfo.SeriesName, formats, existingSeries?.Name, existingSeries?.Id);
|
||||
|
||||
if (existingSeries != null)
|
||||
{
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
|
|
|
@ -57,7 +57,10 @@ public class StatisticService : IStatisticService
|
|||
public async Task<UserReadStatistics> GetUserReadStatistics(int userId, IList<int> libraryIds)
|
||||
{
|
||||
if (libraryIds.Count == 0)
|
||||
{
|
||||
libraryIds = await _context.Library.GetUserLibraries(userId).ToListAsync();
|
||||
}
|
||||
|
||||
|
||||
// Total Pages Read
|
||||
var totalPagesRead = await _context.AppUserProgresses
|
||||
|
|
|
@ -20,7 +20,7 @@ public interface ITaskScheduler
|
|||
Task ScheduleStatsTasks();
|
||||
void ScheduleUpdaterTasks();
|
||||
Task ScheduleKavitaPlusTasks();
|
||||
void ScanFolder(string folderPath, TimeSpan delay);
|
||||
void ScanFolder(string folderPath, string originalPath, TimeSpan delay);
|
||||
void ScanFolder(string folderPath);
|
||||
void ScanLibrary(int libraryId, bool force = false);
|
||||
void ScanLibraries(bool force = false);
|
||||
|
@ -267,24 +267,38 @@ public class TaskScheduler : ITaskScheduler
|
|||
BackgroundJob.Enqueue(() => CheckForUpdate());
|
||||
}
|
||||
|
||||
public void ScanFolder(string folderPath, TimeSpan delay)
|
||||
/// <summary>
|
||||
/// Queue up a Scan folder for a folder from Library Watcher.
|
||||
/// </summary>
|
||||
/// <param name="folderPath"></param>
|
||||
/// <param name="originalPath"></param>
|
||||
/// <param name="delay"></param>
|
||||
public void ScanFolder(string folderPath, string originalPath, TimeSpan delay)
|
||||
{
|
||||
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder]))
|
||||
var normalizedOriginal = Tasks.Scanner.Parser.Parser.NormalizePath(originalPath);
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, normalizedOriginal]) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, string.Empty]))
|
||||
{
|
||||
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
|
||||
normalizedFolder);
|
||||
return;
|
||||
}
|
||||
|
||||
// Not sure where we should put this code, but we can get a bunch of ScanFolders when original has slight variations, like
|
||||
// create a folder, add a new file, etc. All of these can be merged into just 1 request.
|
||||
|
||||
|
||||
|
||||
|
||||
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
|
||||
BackgroundJob.Schedule(() => _scannerService.ScanFolder(normalizedFolder), delay);
|
||||
BackgroundJob.Schedule(() => _scannerService.ScanFolder(normalizedFolder, normalizedOriginal), delay);
|
||||
}
|
||||
|
||||
public void ScanFolder(string folderPath)
|
||||
{
|
||||
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {normalizedFolder}))
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, string.Empty]))
|
||||
{
|
||||
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
|
||||
normalizedFolder);
|
||||
|
@ -292,7 +306,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
}
|
||||
|
||||
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
|
||||
_scannerService.ScanFolder(normalizedFolder);
|
||||
_scannerService.ScanFolder(normalizedFolder, string.Empty);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
@ -350,9 +364,9 @@ public class TaskScheduler : ITaskScheduler
|
|||
public void RefreshMetadata(int libraryId, bool forceUpdate = true)
|
||||
{
|
||||
var alreadyEnqueued = HasAlreadyEnqueuedTask(MetadataService.Name, "GenerateCoversForLibrary",
|
||||
new object[] {libraryId, true}) ||
|
||||
[libraryId, true]) ||
|
||||
HasAlreadyEnqueuedTask("MetadataService", "GenerateCoversForLibrary",
|
||||
new object[] {libraryId, false});
|
||||
[libraryId, false]);
|
||||
if (alreadyEnqueued)
|
||||
{
|
||||
_logger.LogInformation("A duplicate request to refresh metadata for library occured. Skipping");
|
||||
|
@ -365,7 +379,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
|
||||
public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
{
|
||||
if (HasAlreadyEnqueuedTask(MetadataService.Name,"GenerateCoversForSeries", new object[] {libraryId, seriesId, forceUpdate}))
|
||||
if (HasAlreadyEnqueuedTask(MetadataService.Name,"GenerateCoversForSeries", [libraryId, seriesId, forceUpdate]))
|
||||
{
|
||||
_logger.LogInformation("A duplicate request to refresh metadata for library occured. Skipping");
|
||||
return;
|
||||
|
@ -377,7 +391,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
|
||||
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
{
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, forceUpdate}, ScanQueue))
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", [seriesId, forceUpdate], ScanQueue))
|
||||
{
|
||||
_logger.LogInformation("A duplicate request to scan series occured. Skipping");
|
||||
return;
|
||||
|
@ -396,7 +410,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
|
||||
public void AnalyzeFilesForSeries(int libraryId, int seriesId, bool forceUpdate = false)
|
||||
{
|
||||
if (HasAlreadyEnqueuedTask("WordCountAnalyzerService", "ScanSeries", new object[] {libraryId, seriesId, forceUpdate}))
|
||||
if (HasAlreadyEnqueuedTask("WordCountAnalyzerService", "ScanSeries", [libraryId, seriesId, forceUpdate]))
|
||||
{
|
||||
_logger.LogInformation("A duplicate request to scan series occured. Skipping");
|
||||
return;
|
||||
|
@ -426,13 +440,13 @@ public class TaskScheduler : ITaskScheduler
|
|||
public static bool HasScanTaskRunningForLibrary(int libraryId, bool checkRunningJobs = true)
|
||||
{
|
||||
return
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true, true}, ScanQueue,
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", [libraryId, true, true], ScanQueue,
|
||||
checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false, true}, ScanQueue,
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", [libraryId, false, true], ScanQueue,
|
||||
checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true, false}, ScanQueue,
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", [libraryId, true, false], ScanQueue,
|
||||
checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false, false}, ScanQueue,
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", [libraryId, false, false], ScanQueue,
|
||||
checkRunningJobs);
|
||||
}
|
||||
|
||||
|
@ -445,8 +459,8 @@ public class TaskScheduler : ITaskScheduler
|
|||
public static bool HasScanTaskRunningForSeries(int seriesId, bool checkRunningJobs = true)
|
||||
{
|
||||
return
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, true}, ScanQueue, checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, false}, ScanQueue, checkRunningJobs);
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", [seriesId, true], ScanQueue, checkRunningJobs) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", [seriesId, false], ScanQueue, checkRunningJobs);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -488,6 +502,7 @@ public class TaskScheduler : ITaskScheduler
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Checks against any jobs that are running or about to run
|
||||
/// </summary>
|
||||
|
|
|
@ -56,9 +56,9 @@ public class LibraryWatcher : ILibraryWatcher
|
|||
/// <summary>
|
||||
/// Counts within a time frame how many times the buffer became full. Is used to reschedule LibraryWatcher to start monitoring much later rather than instantly
|
||||
/// </summary>
|
||||
private int _bufferFullCounter;
|
||||
private int _restartCounter;
|
||||
private DateTime _lastErrorTime = DateTime.MinValue;
|
||||
private static int _bufferFullCounter;
|
||||
private static int _restartCounter;
|
||||
private static DateTime _lastErrorTime = DateTime.MinValue;
|
||||
/// <summary>
|
||||
/// Used to lock buffer Full Counter
|
||||
/// </summary>
|
||||
|
@ -262,17 +262,19 @@ public class LibraryWatcher : ILibraryWatcher
|
|||
return;
|
||||
}
|
||||
|
||||
_taskScheduler.ScanFolder(fullPath, _queueWaitTime);
|
||||
_taskScheduler.ScanFolder(fullPath, filePath, _queueWaitTime);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "[LibraryWatcher] An error occured when processing a watch event");
|
||||
}
|
||||
_logger.LogDebug("[LibraryWatcher] ProcessChange completed in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
|
||||
_logger.LogTrace("[LibraryWatcher] ProcessChange completed in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
private string GetFolder(string filePath, IEnumerable<string> libraryFolders)
|
||||
{
|
||||
// TODO: I can optimize this to avoid a library scan and instead do a Series Scan by finding the series that has a lowestFolderPath higher or equal to the filePath
|
||||
|
||||
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
|
||||
_logger.LogTrace("[LibraryWatcher] Parent Directory: {ParentDirectory}", parentDirectory);
|
||||
if (string.IsNullOrEmpty(parentDirectory)) return string.Empty;
|
||||
|
|
|
@ -114,7 +114,6 @@ public class ParseScannedFiles
|
|||
_eventHub = eventHub;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained
|
||||
/// </summary>
|
||||
|
@ -122,48 +121,53 @@ public class ParseScannedFiles
|
|||
/// <param name="seriesPaths">A dictionary mapping a normalized path to a list of <see cref="SeriesModified"/> to help scanner skip I/O</param>
|
||||
/// <param name="folderPath">A library folder or series folder</param>
|
||||
/// <param name="forceCheck">If we should bypass any folder last write time checks on the scan and force I/O</param>
|
||||
public IList<ScanResult> ProcessFiles(string folderPath, bool scanDirectoryByDirectory,
|
||||
public async Task<IList<ScanResult>> ProcessFiles(string folderPath, bool scanDirectoryByDirectory,
|
||||
IDictionary<string, IList<SeriesModified>> seriesPaths, Library library, bool forceCheck = false)
|
||||
{
|
||||
string normalizedPath;
|
||||
var result = new List<ScanResult>();
|
||||
var fileExtensions = string.Join("|", library.LibraryFileTypes.Select(l => l.FileTypeGroup.GetRegex()));
|
||||
var matcher = BuildMatcher(library);
|
||||
|
||||
var result = new List<ScanResult>();
|
||||
|
||||
if (scanDirectoryByDirectory)
|
||||
{
|
||||
// This is used in library scan, so we should check first for a ignore file and use that here as well
|
||||
var matcher = new GlobMatcher();
|
||||
foreach (var pattern in library.LibraryExcludePatterns.Where(p => !string.IsNullOrEmpty(p.Pattern)))
|
||||
{
|
||||
matcher.AddExclude(pattern.Pattern);
|
||||
}
|
||||
|
||||
var directories = _directoryService.GetDirectories(folderPath, matcher).ToList();
|
||||
var directories = _directoryService.GetDirectories(folderPath, matcher).Select(Parser.Parser.NormalizePath);
|
||||
foreach (var directory in directories)
|
||||
{
|
||||
// Since this is a loop, we need a list return
|
||||
normalizedPath = Parser.Parser.NormalizePath(directory);
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent(directory, library.Name, ProgressEventType.Updated));
|
||||
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, directory, forceCheck))
|
||||
{
|
||||
result.Add(new ScanResult()
|
||||
if (result.Exists(r => r.Folder == directory))
|
||||
{
|
||||
Files = ArraySegment<string>.Empty,
|
||||
Folder = directory,
|
||||
LibraryRoot = folderPath,
|
||||
HasChanged = false
|
||||
});
|
||||
continue;
|
||||
}
|
||||
result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty));
|
||||
}
|
||||
else if (seriesPaths.TryGetValue(normalizedPath, out var series) && series.All(s => !string.IsNullOrEmpty(s.LowestFolderPath)))
|
||||
else if (seriesPaths.TryGetValue(directory, out var series) && series.All(s => !string.IsNullOrEmpty(s.LowestFolderPath)))
|
||||
{
|
||||
// If there are multiple series inside this path, let's check each of them to see which was modified and only scan those
|
||||
// This is very helpful for ComicVine libraries by Publisher
|
||||
_logger.LogDebug("[ProcessFiles] {Directory} is dirty and has multiple series folders, checking if we can avoid a full scan", directory);
|
||||
foreach (var seriesModified in series)
|
||||
{
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesModified, seriesModified.LowestFolderPath!))
|
||||
var hasFolderChangedSinceLastScan = library.LastScanned.Truncate(TimeSpan.TicksPerSecond) <
|
||||
_directoryService
|
||||
.GetLastWriteTime(seriesModified.LowestFolderPath!)
|
||||
.Truncate(TimeSpan.TicksPerSecond);
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent(seriesModified.LowestFolderPath!, library.Name, ProgressEventType.Updated));
|
||||
|
||||
if (!hasFolderChangedSinceLastScan)
|
||||
{
|
||||
result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty));
|
||||
_logger.LogDebug("[ProcessFiles] {Directory} subfolder {Folder} did not change since last scan, adding entry to skip", directory, seriesModified.LowestFolderPath);
|
||||
result.Add(CreateScanResult(seriesModified.LowestFolderPath!, folderPath, false, ArraySegment<string>.Empty));
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("[ProcessFiles] {Directory} subfolder {Folder} changed, adding folders", directory, seriesModified.LowestFolderPath);
|
||||
result.Add(CreateScanResult(directory, folderPath, true,
|
||||
_directoryService.ScanFiles(seriesModified.LowestFolderPath!, fileExtensions, matcher)));
|
||||
}
|
||||
|
@ -173,19 +177,22 @@ public class ParseScannedFiles
|
|||
{
|
||||
// For a scan, this is doing everything in the directory loop before the folder Action is called...which leads to no progress indication
|
||||
result.Add(CreateScanResult(directory, folderPath, true,
|
||||
_directoryService.ScanFiles(directory, fileExtensions)));
|
||||
_directoryService.ScanFiles(directory, fileExtensions, matcher)));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
normalizedPath = Parser.Parser.NormalizePath(folderPath);
|
||||
var normalizedPath = Parser.Parser.NormalizePath(folderPath);
|
||||
var libraryRoot =
|
||||
library.Folders.FirstOrDefault(f =>
|
||||
Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ??
|
||||
normalizedPath.Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ??
|
||||
folderPath;
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent(normalizedPath, library.Name, ProgressEventType.Updated));
|
||||
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
|
||||
{
|
||||
result.Add(CreateScanResult(folderPath, libraryRoot, false, ArraySegment<string>.Empty));
|
||||
|
@ -193,13 +200,24 @@ public class ParseScannedFiles
|
|||
else
|
||||
{
|
||||
result.Add(CreateScanResult(folderPath, libraryRoot, true,
|
||||
_directoryService.ScanFiles(folderPath, fileExtensions)));
|
||||
_directoryService.ScanFiles(folderPath, fileExtensions, matcher)));
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static GlobMatcher BuildMatcher(Library library)
|
||||
{
|
||||
var matcher = new GlobMatcher();
|
||||
foreach (var pattern in library.LibraryExcludePatterns.Where(p => !string.IsNullOrEmpty(p.Pattern)))
|
||||
{
|
||||
matcher.AddExclude(pattern.Pattern);
|
||||
}
|
||||
|
||||
return matcher;
|
||||
}
|
||||
|
||||
private static ScanResult CreateScanResult(string folderPath, string libraryRoot, bool hasChanged,
|
||||
IList<string> files)
|
||||
{
|
||||
|
@ -243,7 +261,7 @@ public class ParseScannedFiles
|
|||
NormalizedName = normalizedSeries
|
||||
};
|
||||
|
||||
scannedSeries.AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
|
||||
scannedSeries.AddOrUpdate(existingKey, [info], (_, oldValue) =>
|
||||
{
|
||||
oldValue ??= new List<ParserInfo>();
|
||||
if (!oldValue.Contains(info))
|
||||
|
@ -338,7 +356,7 @@ public class ParseScannedFiles
|
|||
{
|
||||
try
|
||||
{
|
||||
var scanResults = ProcessFiles(folderPath, isLibraryScan, seriesPaths, library, forceCheck);
|
||||
var scanResults = await ProcessFiles(folderPath, isLibraryScan, seriesPaths, library, forceCheck);
|
||||
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
|
@ -414,15 +432,19 @@ public class ParseScannedFiles
|
|||
/// <param name="library"></param>
|
||||
private async Task ProcessScanResult(ScanResult result, IDictionary<string, IList<SeriesModified>> seriesPaths, Library library)
|
||||
{
|
||||
// TODO: This should return the result as we are modifying it as a side effect
|
||||
|
||||
// If the folder hasn't changed, generate fake ParserInfos for the Series that were in that folder.
|
||||
var normalizedFolder = Parser.Parser.NormalizePath(result.Folder);
|
||||
if (!result.HasChanged)
|
||||
{
|
||||
var normalizedFolder = Parser.Parser.NormalizePath(result.Folder);
|
||||
result.ParserInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
|
||||
{
|
||||
Series = fp.SeriesName,
|
||||
Format = fp.Format,
|
||||
}).ToList();
|
||||
result.ParserInfos = seriesPaths[normalizedFolder]
|
||||
.Select(fp => new ParserInfo()
|
||||
{
|
||||
Series = fp.SeriesName,
|
||||
Format = fp.Format,
|
||||
})
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", normalizedFolder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
|
@ -431,25 +453,24 @@ public class ParseScannedFiles
|
|||
}
|
||||
|
||||
var files = result.Files;
|
||||
var folder = result.Folder;
|
||||
var libraryRoot = result.LibraryRoot;
|
||||
|
||||
// When processing files for a folder and we do enter, we need to parse the information and combine parser infos
|
||||
// NOTE: We might want to move the merge step later in the process, like return and combine.
|
||||
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent($"{files.Count} files in {folder}", library.Name, ProgressEventType.Updated));
|
||||
|
||||
if (files.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] {Folder} is empty, no longer in this location, or has no file types that match Library File Types", folder);
|
||||
_logger.LogInformation("[ScannerService] {Folder} is empty, no longer in this location, or has no file types that match Library File Types", normalizedFolder);
|
||||
result.ParserInfos = ArraySegment<ParserInfo>.Empty;
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, normalizedFolder);
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent($"{files.Count} files in {normalizedFolder}", library.Name, ProgressEventType.Updated));
|
||||
|
||||
// Multiple Series can exist within a folder. We should instead put these infos on the result and perform merging above
|
||||
IList<ParserInfo> infos = files
|
||||
.Select(file => _readingItemService.ParseFile(file, folder, libraryRoot, library.Type))
|
||||
.Select(file => _readingItemService.ParseFile(file, normalizedFolder, result.LibraryRoot, library.Type))
|
||||
.Where(info => info != null)
|
||||
.ToList()!;
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
|
|||
{
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo
|
||||
|
@ -76,6 +76,9 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
|
|||
ret.Chapters = Parser.DefaultChapter;
|
||||
ret.Volumes = Parser.SpecialVolume;
|
||||
|
||||
// NOTE: This uses rootPath. LibraryRoot works better for manga, but it's not always that way.
|
||||
// It might be worth writing some logic if the file is a special, to take the folder above the Specials/
|
||||
// if present
|
||||
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
|
||||
}
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
|
|||
}
|
||||
}
|
||||
|
||||
protected void UpdateFromComicInfo(ParserInfo info)
|
||||
protected static void UpdateFromComicInfo(ParserInfo info)
|
||||
{
|
||||
if (info.ComicInfo == null) return;
|
||||
|
||||
|
@ -109,6 +109,10 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
|
|||
{
|
||||
info.Volumes = info.ComicInfo.Volume;
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
|
||||
{
|
||||
info.Chapters = info.ComicInfo.Number;
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Series))
|
||||
{
|
||||
info.Series = info.ComicInfo.Series.Trim();
|
||||
|
@ -125,16 +129,6 @@ public abstract class DefaultParser(IDirectoryService directoryService) : IDefau
|
|||
info.Volumes = Parser.SpecialVolume;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
|
||||
{
|
||||
info.Chapters = info.ComicInfo.Number;
|
||||
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
|
||||
{
|
||||
info.IsSpecial = false;
|
||||
info.Volumes = Parser.SpecialVolume;
|
||||
}
|
||||
}
|
||||
|
||||
// Patch is SeriesSort from ComicInfo
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
|
||||
{
|
||||
|
|
|
@ -103,7 +103,11 @@ public static class Parser
|
|||
private static readonly Regex CoverImageRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(?<!back)(?<!back_)(?<!back-)(cover|folder)(?![\w\d])",
|
||||
MatchOptions, RegexTimeout);
|
||||
|
||||
private static readonly Regex NormalizeRegex = new Regex(@"[^\p{L}0-9\+!\*]",
|
||||
/// <summary>
|
||||
/// Normalize everything within Kavita. Some characters don't fall under Unicode, like full-width characters and need to be
|
||||
/// added on a case-by-case basis.
|
||||
/// </summary>
|
||||
private static readonly Regex NormalizeRegex = new Regex(@"[^\p{L}0-9\+!*!+]",
|
||||
MatchOptions, RegexTimeout);
|
||||
|
||||
/// <summary>
|
||||
|
|
|
@ -45,7 +45,7 @@ public interface IScannerService
|
|||
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||
Task ScanSeries(int seriesId, bool bypassFolderOptimizationChecks = true);
|
||||
|
||||
Task ScanFolder(string folder);
|
||||
Task ScanFolder(string folder, string originalPath);
|
||||
Task AnalyzeFiles();
|
||||
|
||||
}
|
||||
|
@ -135,30 +135,35 @@ public class ScannerService : IScannerService
|
|||
/// Given a generic folder path, will invoke a Series scan or Library scan.
|
||||
/// </summary>
|
||||
/// <remarks>This will Schedule the job to run 1 minute in the future to allow for any close-by duplicate requests to be dropped</remarks>
|
||||
/// <param name="folder"></param>
|
||||
public async Task ScanFolder(string folder)
|
||||
/// <param name="folder">Normalized folder</param>
|
||||
/// <param name="originalPath">If invoked from LibraryWatcher, this maybe a nested folder and can allow for optimization</param>
|
||||
public async Task ScanFolder(string folder, string originalPath)
|
||||
{
|
||||
Series? series = null;
|
||||
try
|
||||
{
|
||||
series = await _unitOfWork.SeriesRepository.GetSeriesByFolderPath(folder, SeriesIncludes.Library);
|
||||
series = await _unitOfWork.SeriesRepository.GetSeriesThatContainsLowestFolderPath(originalPath,
|
||||
SeriesIncludes.Library) ??
|
||||
await _unitOfWork.SeriesRepository.GetSeriesByFolderPath(originalPath, SeriesIncludes.Library) ??
|
||||
await _unitOfWork.SeriesRepository.GetSeriesByFolderPath(folder, SeriesIncludes.Library);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
if (ex.Message.Equals("Sequence contains more than one element."))
|
||||
{
|
||||
_logger.LogCritical(ex, "[ScannerService] Multiple series map to this folder. Library scan will be used for ScanFolder");
|
||||
_logger.LogCritical(ex, "[ScannerService] Multiple series map to this folder or folder is at library root. Library scan will be used for ScanFolder");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Figure out why we have the library type restriction here
|
||||
if (series != null && (series.Library.Type != LibraryType.Book || series.Library.Type != LibraryType.LightNovel))
|
||||
if (series != null && series.Library.Type is not (LibraryType.Book or LibraryType.LightNovel))
|
||||
{
|
||||
if (TaskScheduler.HasScanTaskRunningForSeries(series.Id))
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
|
||||
return;
|
||||
}
|
||||
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder}, Series matched to folder and ScanSeries enqueued for 1 minute", folder);
|
||||
BackgroundJob.Schedule(() => ScanSeries(series.Id, true), TimeSpan.FromMinutes(1));
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -181,7 +181,9 @@ public class StatsService : IStatsService
|
|||
{
|
||||
InstallId = serverSettings.InstallId,
|
||||
KavitaVersion = serverSettings.InstallVersion,
|
||||
IsDocker = OsInfo.IsDocker
|
||||
IsDocker = OsInfo.IsDocker,
|
||||
FirstInstallDate = serverSettings.FirstInstallDate,
|
||||
FirstInstallVersion = serverSettings.FirstInstallVersion
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -268,6 +268,7 @@ public class Startup
|
|||
|
||||
// v0.8.2
|
||||
await ManualMigrateThemeDescription.Migrate(dataContext, logger);
|
||||
await MigrateInitialInstallData.Migrate(dataContext, logger, directoryService);
|
||||
|
||||
// Update the version in the DB after all migrations are run
|
||||
var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue