Merged develop into main

This commit is contained in:
Joseph Milazzo 2021-10-12 08:21:43 -05:00
commit aa710529f0
151 changed files with 4393 additions and 1703 deletions

View file

@ -16,6 +16,10 @@
<DocumentationFile>bin\Debug\API.xml</DocumentationFile>
</PropertyGroup>
<PropertyGroup>
<SatelliteResourceLanguages>en</SatelliteResourceLanguages>
</PropertyGroup>
<!-- Set the Product and Version info for our own projects -->
<PropertyGroup>
<Product>Kavita</Product>
@ -33,39 +37,38 @@
<ItemGroup>
<PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="8.1.1" />
<PackageReference Include="Docnet.Core" Version="2.3.1" />
<PackageReference Include="Docnet.Core" Version="2.4.0-alpha.1" />
<PackageReference Include="ExCSS" Version="4.1.0" />
<PackageReference Include="Flurl" Version="3.0.2" />
<PackageReference Include="Flurl.Http" Version="3.2.0" />
<PackageReference Include="Hangfire" Version="1.7.24" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.24" />
<PackageReference Include="Hangfire" Version="1.7.25" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.25" />
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
<PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.35" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.37" />
<PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.9" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="5.0.8" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="5.0.8" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.10" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="5.0.10" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="5.0.10" />
<PackageReference Include="Microsoft.AspNetCore.SignalR" Version="1.1.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="5.0.8">
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="5.0.10">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.10" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.2" />
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="2.1.3" />
<PackageReference Include="NetVips" Version="2.0.1" />
<PackageReference Include="NetVips.Native" Version="8.11.0" />
<PackageReference Include="NetVips.Native" Version="8.11.4" />
<PackageReference Include="NReco.Logging.File" Version="1.1.2" />
<PackageReference Include="Sentry.AspNetCore" Version="3.8.3" />
<PackageReference Include="SharpCompress" Version="0.29.0" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.27.0.35380">
<PackageReference Include="SharpCompress" Version="0.30.0" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.29.0.36737">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.1.5" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.2.2" />
<PackageReference Include="System.Drawing.Common" Version="5.0.2" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="6.12.0" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="6.12.2" />
<PackageReference Include="VersOne.Epub" Version="3.0.3.1" />
</ItemGroup>

View file

@ -7,10 +7,10 @@ using API.Constants;
using API.DTOs;
using API.DTOs.Account;
using API.Entities;
using API.Errors;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using AutoMapper;
using Kavita.Common;
using Microsoft.AspNetCore.Identity;
@ -31,13 +31,14 @@ namespace API.Controllers
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<AccountController> _logger;
private readonly IMapper _mapper;
private readonly IAccountService _accountService;
/// <inheritdoc />
public AccountController(UserManager<AppUser> userManager,
SignInManager<AppUser> signInManager,
ITokenService tokenService, IUnitOfWork unitOfWork,
ILogger<AccountController> logger,
IMapper mapper)
IMapper mapper, IAccountService accountService)
{
_userManager = userManager;
_signInManager = signInManager;
@ -45,6 +46,7 @@ namespace API.Controllers
_unitOfWork = unitOfWork;
_logger = logger;
_mapper = mapper;
_accountService = accountService;
}
/// <summary>
@ -61,30 +63,10 @@ namespace API.Controllers
if (resetPasswordDto.UserName != User.GetUsername() && !User.IsInRole(PolicyConstants.AdminRole))
return Unauthorized("You are not permitted to this operation.");
// Validate Password
foreach (var validator in _userManager.PasswordValidators)
var errors = await _accountService.ChangeUserPassword(user, resetPasswordDto.Password);
if (errors.Any())
{
var validationResult = await validator.ValidateAsync(_userManager, user, resetPasswordDto.Password);
if (!validationResult.Succeeded)
{
return BadRequest(
validationResult.Errors.Select(e => new ApiException(400, e.Code, e.Description)));
}
}
var result = await _userManager.RemovePasswordAsync(user);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return BadRequest(result.Errors.Select(e => new ApiException(400, e.Code, e.Description)));
}
result = await _userManager.AddPasswordAsync(user, resetPasswordDto.Password);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return BadRequest(result.Errors.Select(e => new ApiException(400, e.Code, e.Description)));
return BadRequest(errors);
}
_logger.LogInformation("{User}'s Password has been reset", resetPasswordDto.UserName);
@ -110,6 +92,13 @@ namespace API.Controllers
user.UserPreferences ??= new AppUserPreferences();
user.ApiKey = HashUtil.ApiKey();
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (!settings.EnableAuthentication && !registerDto.IsAdmin)
{
_logger.LogInformation("User {UserName} is being registered as non-admin with no server authentication. Using default password.", registerDto.Username);
registerDto.Password = AccountService.DefaultPassword;
}
var result = await _userManager.CreateAsync(user, registerDto.Password);
if (!result.Succeeded) return BadRequest(result.Errors);
@ -166,6 +155,14 @@ namespace API.Controllers
if (user == null) return Unauthorized("Invalid username");
var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (!settings.EnableAuthentication && !isAdmin)
{
_logger.LogDebug("User {UserName} is logging in with authentication disabled", loginDto.Username);
loginDto.Password = AccountService.DefaultPassword;
}
var result = await _signInManager
.CheckPasswordSignInAsync(user, loginDto.Password, false);

View file

@ -2,13 +2,11 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Constants;
using API.DTOs;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
namespace API.Controllers
@ -19,13 +17,11 @@ namespace API.Controllers
public class CollectionController : BaseApiController
{
private readonly IUnitOfWork _unitOfWork;
private readonly UserManager<AppUser> _userManager;
/// <inheritdoc />
public CollectionController(IUnitOfWork unitOfWork, UserManager<AppUser> userManager)
public CollectionController(IUnitOfWork unitOfWork)
{
_unitOfWork = unitOfWork;
_userManager = userManager;
}
/// <summary>
@ -36,7 +32,7 @@ namespace API.Controllers
public async Task<IEnumerable<CollectionTagDto>> GetAllTags()
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
var isAdmin = await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole);
var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
if (isAdmin)
{
return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync();

View file

@ -63,7 +63,7 @@ namespace API.Controllers
public async Task<ActionResult> DownloadVolume(int volumeId)
{
var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeByIdAsync(volumeId);
var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(volumeId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
try
{
@ -92,7 +92,7 @@ namespace API.Controllers
{
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeByIdAsync(chapter.VolumeId);
var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(chapter.VolumeId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
try
{

View file

@ -1,12 +1,9 @@
using System;
using System.IO;
using System.Net;
using System.IO;
using System.Threading.Tasks;
using API.Extensions;
using API.Interfaces;
using API.Services;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Net.Http.Headers;
namespace API.Controllers
{

View file

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
@ -179,7 +180,7 @@ namespace API.Controllers
try
{
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_unitOfWork.LibraryRepository.Delete(library);
await _unitOfWork.CommitAsync();
@ -203,7 +204,7 @@ namespace API.Controllers
[HttpPost("update")]
public async Task<ActionResult> UpdateLibrary(UpdateLibraryDto libraryForUserDto)
{
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id, LibraryIncludes.Folders);
var originalFolders = library.Folders.Select(x => x.Path).ToList();

View file

@ -5,7 +5,6 @@ using System.Linq;
using System.Threading.Tasks;
using System.Xml.Serialization;
using API.Comparators;
using API.Constants;
using API.DTOs;
using API.DTOs.Filtering;
using API.DTOs.OPDS;
@ -16,7 +15,6 @@ using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Kavita.Common;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
namespace API.Controllers
@ -26,7 +24,6 @@ namespace API.Controllers
private readonly IUnitOfWork _unitOfWork;
private readonly IDownloadService _downloadService;
private readonly IDirectoryService _directoryService;
private readonly UserManager<AppUser> _userManager;
private readonly ICacheService _cacheService;
private readonly IReaderService _readerService;
@ -41,13 +38,12 @@ namespace API.Controllers
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService,
IDirectoryService directoryService, UserManager<AppUser> userManager,
ICacheService cacheService, IReaderService readerService)
IDirectoryService directoryService, ICacheService cacheService,
IReaderService readerService)
{
_unitOfWork = unitOfWork;
_downloadService = downloadService;
_directoryService = directoryService;
_userManager = userManager;
_cacheService = cacheService;
_readerService = readerService;
@ -170,16 +166,16 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var isAdmin = await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole);
var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
IEnumerable <CollectionTagDto> tags;
IList<CollectionTagDto> tags;
if (isAdmin)
{
tags = await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync();
tags = (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()).ToList();
}
else
{
tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync();
tags = (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync()).ToList();
}
@ -201,6 +197,14 @@ namespace API.Controllers
});
}
if (tags.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed));
}
@ -213,7 +217,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var isAdmin = await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole);
var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
IEnumerable <CollectionTagDto> tags;
if (isAdmin)
@ -300,13 +304,13 @@ namespace API.Controllers
var feed = CreateFeed(readingList.Title + " Reading List", $"{apiKey}/reading-list/{readingListId}", apiKey);
var items = await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId);
var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId)).ToList();
foreach (var item in items)
{
feed.Entries.Add(new FeedEntry()
{
Id = item.ChapterId.ToString(),
Title = "Chapter " + item.ChapterNumber,
Title = $"{item.SeriesName} Chapter {item.ChapterNumber}",
Links = new List<FeedLink>()
{
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{item.SeriesId}/volume/{item.VolumeId}/chapter/{item.ChapterId}"),
@ -315,6 +319,14 @@ namespace API.Controllers
});
}
if (items.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed));
@ -373,6 +385,14 @@ namespace API.Controllers
feed.Entries.Add(CreateSeries(seriesDto, apiKey));
}
if (recentlyAdded.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed));
}
@ -404,6 +424,14 @@ namespace API.Controllers
feed.Entries.Add(CreateSeries(seriesDto, apiKey));
}
if (pagedList.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed));
}
@ -467,7 +495,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId);
var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId);
var feed = CreateFeed(series.Name + " - Volumes", $"{apiKey}/series/{series.Id}", apiKey);
feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesId}"));
foreach (var volumeDto in volumes)
@ -486,7 +514,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId);
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
var chapters =
(await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number),
_chapterSortComparer);
@ -517,7 +545,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId);
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId);
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);

View file

@ -97,7 +97,7 @@ namespace API.Controllers
public async Task<ActionResult> MarkRead(MarkReadDto markReadDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId);
var volumes = await _unitOfWork.VolumeRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List<AppUserProgress>();
foreach (var volume in volumes)
{
@ -125,7 +125,7 @@ namespace API.Controllers
public async Task<ActionResult> MarkUnread(MarkReadDto markReadDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId);
var volumes = await _unitOfWork.VolumeRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List<AppUserProgress>();
foreach (var volume in volumes)
{
@ -267,7 +267,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
user.Progresses ??= new List<AppUserProgress>();
var volumes = await _unitOfWork.SeriesRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
foreach (var volume in volumes)
{
_readerService.MarkChaptersAsRead(user, volume.SeriesId, volume.Chapters);
@ -294,7 +294,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
user.Progresses ??= new List<AppUserProgress>();
var volumes = await _unitOfWork.SeriesRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
foreach (var volume in volumes)
{
_readerService.MarkChaptersAsUnread(user, volume.SeriesId, volume.Chapters);

View file

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
@ -99,16 +100,20 @@ namespace API.Controllers
[HttpPost("delete-item")]
public async Task<ActionResult> DeleteListItem(UpdateReadingListPosition dto)
{
var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemsByIdAsync(dto.ReadingListId)).ToList();
var item = items.Find(r => r.Id == dto.ReadingListItemId);
items.Remove(item);
var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(dto.ReadingListId);
readingList.Items = readingList.Items.Where(r => r.Id != dto.ReadingListItemId).ToList();
for (var i = 0; i < items.Count; i++)
var index = 0;
foreach (var readingListItem in readingList.Items)
{
items[i].Order = i;
readingListItem.Order = index;
index++;
}
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
if (!_unitOfWork.HasChanges()) return Ok();
if (await _unitOfWork.CommitAsync())
{
return Ok("Updated");
}
@ -138,15 +143,10 @@ namespace API.Controllers
itemIdsToRemove.Contains(r.Id));
_unitOfWork.ReadingListRepository.BulkRemove(listItems);
if (_unitOfWork.HasChanges())
{
await _unitOfWork.CommitAsync();
return Ok("Updated");
}
else
{
return Ok("Nothing to remove");
}
if (!_unitOfWork.HasChanges()) return Ok("Nothing to remove");
await _unitOfWork.CommitAsync();
return Ok("Updated");
}
catch
{
@ -437,7 +437,7 @@ namespace API.Controllers
var existingChapterExists = readingList.Items.Select(rli => rli.ChapterId).ToHashSet();
var chaptersForSeries = (await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds))
.OrderBy(c => int.Parse(c.Volume.Name))
.OrderBy(c => float.Parse(c.Volume.Name))
.ThenBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting);
var index = lastOrder + 1;

View file

@ -10,9 +10,11 @@ using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Interfaces;
using API.SignalR;
using Kavita.Common;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
namespace API.Controllers
@ -22,12 +24,14 @@ namespace API.Controllers
private readonly ILogger<SeriesController> _logger;
private readonly ITaskScheduler _taskScheduler;
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub;
public SeriesController(ILogger<SeriesController> logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork)
public SeriesController(ILogger<SeriesController> logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub)
{
_logger = logger;
_taskScheduler = taskScheduler;
_unitOfWork = unitOfWork;
_messageHub = messageHub;
}
[HttpPost]
@ -97,14 +101,14 @@ namespace API.Controllers
public async Task<ActionResult<IEnumerable<VolumeDto>>> GetVolumes(int seriesId)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId));
return Ok(await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId));
}
[HttpGet("volume")]
public async Task<ActionResult<VolumeDto>> GetVolume(int volumeId)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.SeriesRepository.GetVolumeDtoAsync(volumeId, userId));
return Ok(await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(volumeId, userId));
}
[HttpGet("chapter")]
@ -217,7 +221,7 @@ namespace API.Controllers
[HttpPost("refresh-metadata")]
public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto)
{
_taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId);
_taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, true);
return Ok();
}
@ -296,6 +300,12 @@ namespace API.Controllers
if (await _unitOfWork.CommitAsync())
{
foreach (var tag in updateSeriesMetadataDto.Tags)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAddedToCollection,
MessageFactory.SeriesAddedToCollection(tag.Id,
updateSeriesMetadataDto.SeriesMetadata.SeriesId));
}
return Ok("Successfully updated");
}
}

View file

@ -3,11 +3,13 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Settings;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Converters;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Kavita.Common;
using Kavita.Common.Extensions;
using Microsoft.AspNetCore.Authorization;
@ -21,12 +23,22 @@ namespace API.Controllers
private readonly ILogger<SettingsController> _logger;
private readonly IUnitOfWork _unitOfWork;
private readonly ITaskScheduler _taskScheduler;
private readonly IAccountService _accountService;
public SettingsController(ILogger<SettingsController> logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler)
public SettingsController(ILogger<SettingsController> logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler, IAccountService accountService)
{
_logger = logger;
_unitOfWork = unitOfWork;
_taskScheduler = taskScheduler;
_accountService = accountService;
}
[AllowAnonymous]
[HttpGet("base-url")]
public async Task<ActionResult<string>> GetBaseUrl()
{
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
return Ok(settingsDto.BaseUrl);
}
[Authorize(Policy = "RequireAdminRole")]
@ -57,6 +69,7 @@ namespace API.Controllers
// We do not allow CacheDirectory changes, so we will ignore.
var currentSettings = await _unitOfWork.SettingsRepository.GetSettingsAsync();
var updateAuthentication = false;
foreach (var setting in currentSettings)
{
@ -80,6 +93,18 @@ namespace API.Controllers
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.BaseUrl && updateSettingsDto.BaseUrl + string.Empty != setting.Value)
{
var path = !updateSettingsDto.BaseUrl.StartsWith("/")
? $"/{updateSettingsDto.BaseUrl}"
: updateSettingsDto.BaseUrl;
path = !path.EndsWith("/")
? $"{path}/"
: path;
setting.Value = path;
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.LoggingLevel + string.Empty;
@ -93,6 +118,13 @@ namespace API.Controllers
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.EnableAuthentication && updateSettingsDto.EnableAuthentication + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.EnableAuthentication + string.Empty;
_unitOfWork.SettingsRepository.Update(setting);
updateAuthentication = true;
}
if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.AllowStatCollection + string.Empty;
@ -110,12 +142,33 @@ namespace API.Controllers
if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated");
if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync())
try
{
await _unitOfWork.CommitAsync();
if (updateAuthentication)
{
var users = await _unitOfWork.UserRepository.GetNonAdminUsersAsync();
foreach (var user in users)
{
var errors = await _accountService.ChangeUserPassword(user, AccountService.DefaultPassword);
if (!errors.Any()) continue;
await _unitOfWork.RollbackAsync();
return BadRequest(errors);
}
_logger.LogInformation("Server authentication changed. Updated all non-admins to default password");
}
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when updating server settings");
await _unitOfWork.RollbackAsync();
return BadRequest("There was a critical issue. Please try again.");
}
_logger.LogInformation("Server Settings updated");
_taskScheduler.ScheduleTasks();
return Ok(updateSettingsDto);
@ -148,5 +201,12 @@ namespace API.Controllers
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
return Ok(settingsDto.EnableOpds);
}
[HttpGet("authentication-enabled")]
public async Task<ActionResult<bool>> GetAuthenticationEnabled()
{
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
return Ok(settingsDto.EnableAuthentication);
}
}
}

View file

@ -148,7 +148,7 @@ namespace API.Controllers
chapter.CoverImage = filePath;
chapter.CoverImageLocked = true;
_unitOfWork.ChapterRepository.Update(chapter);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId);
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId);
volume.CoverImage = chapter.CoverImage;
_unitOfWork.VolumeRepository.Update(volume);
}
@ -185,7 +185,7 @@ namespace API.Controllers
chapter.CoverImage = string.Empty;
chapter.CoverImageLocked = false;
_unitOfWork.ChapterRepository.Update(chapter);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId);
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId);
volume.CoverImage = chapter.CoverImage;
_unitOfWork.VolumeRepository.Update(volume);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);

View file

@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs;
using API.Extensions;
using API.Interfaces;
@ -38,11 +39,24 @@ namespace API.Controllers
return Ok(await _unitOfWork.UserRepository.GetMembersAsync());
}
[AllowAnonymous]
[HttpGet("names")]
public async Task<ActionResult<IEnumerable<MemberDto>>> GetUserNames()
{
var setting = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (setting.EnableAuthentication)
{
return Unauthorized("This API cannot be used given your server's configuration");
}
var members = await _unitOfWork.UserRepository.GetMembersAsync();
return Ok(members.Select(m => m.Username));
}
[HttpGet("has-reading-progress")]
public async Task<ActionResult<bool>> HasReadingProgress(int libraryId)
{
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, userId));
}

View file

@ -1,8 +1,8 @@
namespace API.DTOs
namespace API.DTOs.Account
{
public class LoginDto
{
public string Username { get; init; }
public string Password { get; init; }
public string Password { get; set; }
}
}
}

View file

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using API.Entities.Enums;
namespace API.DTOs
@ -7,8 +8,11 @@ namespace API.DTOs
{
public int Id { get; init; }
public string Name { get; init; }
public string CoverImage { get; init; }
/// <summary>
/// Last time Library was scanned
/// </summary>
public DateTime LastScanned { get; init; }
public LibraryType Type { get; init; }
public ICollection<string> Folders { get; init; }
}
}
}

View file

@ -23,7 +23,7 @@ namespace API.DTOs.OPDS
public string Title { get; set; }
[XmlAttribute("count", Namespace = "http://vaemendis.net/opds-pse/ns")]
public int TotalPages { get; set; } = 0;
public int TotalPages { get; set; }
public bool ShouldSerializeTotalPages()
{

View file

@ -1,5 +1,4 @@
using API.Entities.Enums;
using Newtonsoft.Json;
namespace API.DTOs.Reader
{

View file

@ -8,7 +8,7 @@ namespace API.DTOs
public string Username { get; init; }
[Required]
[StringLength(32, MinimumLength = 6)]
public string Password { get; init; }
public string Password { get; set; }
public bool IsAdmin { get; init; }
}
}
}

View file

@ -1,4 +1,4 @@
namespace API.DTOs
namespace API.DTOs.Settings
{
public class ServerSettingDto
{
@ -21,5 +21,14 @@
/// Enables OPDS connections to be made to the server.
/// </summary>
public bool EnableOpds { get; set; }
/// <summary>
/// Enables Authentication on the server. Defaults to true.
/// </summary>
public bool EnableAuthentication { get; set; }
/// <summary>
/// Base Url for the kavita. Requires restart to take effect.
/// </summary>
public string BaseUrl { get; set; }
}
}

View file

@ -0,0 +1,51 @@
namespace API.Data.Metadata
{
/// <summary>
/// A representation of a ComicInfo.xml file
/// </summary>
/// <remarks>See reference of the loose spec here: https://github.com/Kussie/ComicInfoStandard/blob/main/ComicInfo.xsd</remarks>
public class ComicInfo
{
public string Summary { get; set; }
public string Title { get; set; }
public string Series { get; set; }
public string Number { get; set; }
public string Volume { get; set; }
public string Notes { get; set; }
public string Genre { get; set; }
public int PageCount { get; set; }
// ReSharper disable once InconsistentNaming
public string LanguageISO { get; set; }
public string Web { get; set; }
public int Month { get; set; }
public int Year { get; set; }
/// <summary>
/// Rating based on the content. Think PG-13, R for movies
/// </summary>
public string AgeRating { get; set; }
/// <summary>
/// User's rating of the content
/// </summary>
public float UserRating { get; set; }
public string AlternateSeries { get; set; }
public string StoryArc { get; set; }
public string SeriesGroup { get; set; }
public string AlternativeSeries { get; set; }
public string AlternativeNumber { get; set; }
/// <summary>
/// This is the Author. For Books, we map creator tag in OPF to this field. Comma separated if multiple.
/// </summary>
public string Writer { get; set; } // TODO: Validate if we should make this a list of writers
public string Penciller { get; set; }
public string Inker { get; set; }
public string Colorist { get; set; }
public string Letterer { get; set; }
public string CoverArtist { get; set; }
public string Editor { get; set; }
public string Publisher { get; set; }
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,25 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class LastScannedLibrary : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<DateTime>(
name: "LastScanned",
table: "Library",
type: "TEXT",
nullable: false,
defaultValue: new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified));
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "LastScanned",
table: "Library");
}
}
}

View file

@ -397,6 +397,9 @@ namespace API.Data.Migrations
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");

View file

@ -1,7 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Reader;

View file

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
@ -11,6 +12,17 @@ using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories
{
[Flags]
public enum LibraryIncludes
{
None = 1,
Series = 2,
AppUser = 4,
Folders = 8,
// Ratings = 16
}
public class LibraryRepository : ILibraryRepository
{
private readonly DataContext _context;
@ -58,7 +70,7 @@ namespace API.Data.Repositories
public async Task<bool> DeleteLibrary(int libraryId)
{
var library = await GetLibraryForIdAsync(libraryId);
var library = await GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.Series);
_context.Library.Remove(library);
return await _context.SaveChangesAsync() > 0;
}
@ -91,14 +103,37 @@ namespace API.Data.Repositories
.ToListAsync();
}
public async Task<Library> GetLibraryForIdAsync(int libraryId)
public async Task<Library> GetLibraryForIdAsync(int libraryId, LibraryIncludes includes)
{
return await _context.Library
.Where(x => x.Id == libraryId)
.Include(f => f.Folders)
.Include(l => l.Series)
.SingleAsync();
var query = _context.Library
.Where(x => x.Id == libraryId);
query = AddIncludesToQuery(query, includes);
return await query.SingleAsync();
}
private static IQueryable<Library> AddIncludesToQuery(IQueryable<Library> query, LibraryIncludes includeFlags)
{
if (includeFlags.HasFlag(LibraryIncludes.Folders))
{
query = query.Include(l => l.Folders);
}
if (includeFlags.HasFlag(LibraryIncludes.Series))
{
query = query.Include(l => l.Series);
}
if (includeFlags.HasFlag(LibraryIncludes.AppUser))
{
query = query.Include(l => l.AppUsers);
}
return query;
}
/// <summary>
/// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed.
/// </summary>
@ -106,7 +141,6 @@ namespace API.Data.Repositories
/// <returns></returns>
public async Task<Library> GetFullLibraryForIdAsync(int libraryId)
{
return await _context.Library
.Where(x => x.Id == libraryId)
.Include(f => f.Folders)

View file

@ -53,7 +53,7 @@ namespace API.Data.Repositories
{
return await _context.ReadingList
.Where(r => r.Id == readingListId)
.Include(r => r.Items)
.Include(r => r.Items.OrderBy(item => item.Order))
.SingleOrDefaultAsync();
}

View file

@ -1,15 +1,15 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.Data.Scanner;
using API.DTOs;
using API.DTOs.Filtering;
using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Interfaces.Repositories;
using API.Services.Tasks;
using AutoMapper;
using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore;
@ -26,9 +26,9 @@ namespace API.Data.Repositories
_mapper = mapper;
}
public void Add(Series series)
public void Attach(Series series)
{
_context.Series.Add(series);
_context.Series.Attach(series);
}
public void Update(Series series)
@ -36,19 +36,9 @@ namespace API.Data.Repositories
_context.Entry(series).State = EntityState.Modified;
}
public async Task<bool> SaveAllAsync()
public void Remove(Series series)
{
return await _context.SaveChangesAsync() > 0;
}
public bool SaveAll()
{
return _context.SaveChanges() > 0;
}
public async Task<Series> GetSeriesByNameAsync(string name)
{
return await _context.Series.SingleOrDefaultAsync(x => x.Name == name);
_context.Series.Remove(series);
}
public async Task<bool> DoesSeriesNameExistInLibrary(string name)
@ -64,11 +54,6 @@ namespace API.Data.Repositories
.CountAsync() > 1;
}
public Series GetSeriesByName(string name)
{
return _context.Series.SingleOrDefault(x => x.Name == name);
}
public async Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId)
{
return await _context.Series
@ -77,6 +62,43 @@ namespace API.Data.Repositories
.ToListAsync();
}
/// <summary>
/// Used for <see cref="ScannerService"/> to
/// </summary>
/// <param name="libraryId"></param>
/// <returns></returns>
public async Task<PagedList<Series>> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams)
{
var query = _context.Series
.Where(s => s.LibraryId == libraryId)
.Include(s => s.Metadata)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.AsSplitQuery()
.OrderBy(s => s.SortName);
return await PagedList<Series>.CreateAsync(query, userParams.PageNumber, userParams.PageSize);
}
/// <summary>
/// This is a heavy call. Returns all entities down to Files and Library and Series Metadata.
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<Series> GetFullSeriesForSeriesIdAsync(int seriesId)
{
return await _context.Series
.Where(s => s.Id == seriesId)
.Include(s => s.Metadata)
.Include(s => s.Library)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.AsSplitQuery()
.SingleOrDefaultAsync();
}
public async Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter)
{
var formats = filter.GetSqlFilter();
@ -103,41 +125,12 @@ namespace API.Data.Repositories
.ToListAsync();
}
public async Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId)
{
var volumes = await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.OrderBy(volume => volume.Number)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync();
await AddVolumeModifiers(userId, volumes);
SortSpecialChapters(volumes);
return volumes;
}
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
var sorter = new NaturalSortComparer();
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
{
v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
}
}
public async Task<IEnumerable<Volume>> GetVolumes(int seriesId)
{
return await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.OrderBy(vol => vol.Number)
.ToListAsync();
}
public async Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId)
{
@ -151,55 +144,8 @@ namespace API.Data.Repositories
return seriesList[0];
}
public async Task<Volume> GetVolumeAsync(int volumeId)
{
return await _context.Volume
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
}
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId)
{
return await _context.Volume
.Where(vol => vol.Id == volumeId)
.AsNoTracking()
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync();
}
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId)
{
var volume = await _context.Volume
.Where(vol => vol.Id == volumeId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync(vol => vol.Id == volumeId);
var volumeList = new List<VolumeDto>() {volume};
await AddVolumeModifiers(userId, volumeList);
return volumeList[0];
}
/// <summary>
/// Returns all volumes that contain a seriesId in passed array.
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false)
{
var query = _context.Volume
.Where(v => seriesIds.Contains(v.SeriesId));
if (includeChapters)
{
query = query.Include(v => v.Chapters);
}
return await query.ToListAsync();
}
public async Task<bool> DeleteSeriesAsync(int seriesId)
{
@ -209,11 +155,12 @@ namespace API.Data.Repositories
return await _context.SaveChangesAsync() > 0;
}
public async Task<Volume> GetVolumeByIdAsync(int volumeId)
{
return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
}
/// <summary>
/// Returns Volumes, Metadata, and Collection Tags
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<Series> GetSeriesByIdAsync(int seriesId)
{
return await _context.Series
@ -244,7 +191,7 @@ namespace API.Data.Repositories
}
/// <summary>
/// This returns a list of tuples<chapterId, seriesId> back for each series id passed
/// This returns a dictonary mapping seriesId -> list of chapters back for each series id passed
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
@ -301,24 +248,7 @@ namespace API.Data.Repositories
.SingleOrDefaultAsync();
}
private async Task AddVolumeModifiers(int userId, IReadOnlyCollection<VolumeDto> volumes)
{
var volIds = volumes.Select(s => s.Id);
var userProgress = await _context.AppUserProgresses
.Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId))
.AsNoTracking()
.ToListAsync();
foreach (var v in volumes)
{
foreach (var c in v.Chapters)
{
c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead);
}
v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead);
}
}
/// <summary>
/// Returns a list of Series that were added, ordered by Created desc
@ -497,5 +427,63 @@ namespace API.Data.Repositories
.AsNoTracking()
.ToListAsync();
}
/// <summary>
/// Returns the number of series for a given library (or all libraries if libraryId is 0)
/// </summary>
/// <param name="libraryId">Defaults to 0, library to restrict count to</param>
/// <returns></returns>
private async Task<int> GetSeriesCount(int libraryId = 0)
{
if (libraryId > 0)
{
return await _context.Series
.Where(s => s.LibraryId == libraryId)
.CountAsync();
}
return await _context.Series.CountAsync();
}
/// <summary>
/// Returns the number of series that should be processed in parallel to optimize speed and memory. Minimum of 50
/// </summary>
/// <param name="libraryId">Defaults to 0 meaning no library</param>
/// <returns></returns>
private async Task<Tuple<int, int>> GetChunkSize(int libraryId = 0)
{
// TODO: Think about making this bigger depending on number of files a user has in said library
// and number of cores and amount of memory. We can then make an optimal choice
var totalSeries = await GetSeriesCount(libraryId);
var procCount = Math.Max(Environment.ProcessorCount - 1, 1);
if (totalSeries < procCount * 2 || totalSeries < 50)
{
return new Tuple<int, int>(totalSeries, totalSeries);
}
return new Tuple<int, int>(totalSeries, Math.Max(totalSeries / procCount, 50));
}
public async Task<Chunk> GetChunkInfo(int libraryId = 0)
{
var (totalSeries, chunkSize) = await GetChunkSize(libraryId);
if (totalSeries == 0) return new Chunk()
{
TotalChunks = 0,
TotalSize = 0,
ChunkSize = 0
};
var totalChunks = Math.Max((int) Math.Ceiling((totalSeries * 1.0) / chunkSize), 1);
return new Chunk()
{
TotalSize = totalSeries,
ChunkSize = chunkSize,
TotalChunks = totalChunks
};
}
}
}

View file

@ -1,7 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces.Repositories;
@ -35,6 +35,15 @@ namespace API.Data.Repositories
return _mapper.Map<ServerSettingDto>(settings);
}
public ServerSettingDto GetSettingsDto()
{
var settings = _context.ServerSetting
.Select(x => x)
.AsNoTracking()
.ToList();
return _mapper.Map<ServerSettingDto>(settings);
}
public Task<ServerSetting> GetSettingAsync(ServerSettingKey key)
{
return _context.ServerSetting.SingleOrDefaultAsync(x => x.Key == key);

View file

@ -153,6 +153,16 @@ namespace API.Data.Repositories
return await _userManager.GetUsersInRoleAsync(PolicyConstants.AdminRole);
}
public async Task<IEnumerable<AppUser>> GetNonAdminUsersAsync()
{
return await _userManager.GetUsersInRoleAsync(PolicyConstants.PlebRole);
}
public async Task<bool> IsUserAdmin(AppUser user)
{
return await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole);
}
public async Task<AppUserRating> GetUserRating(int seriesId, int userId)
{
return await _context.AppUserRating.Where(r => r.SeriesId == seriesId && r.AppUserId == userId)
@ -237,8 +247,8 @@ namespace API.Data.Repositories
Libraries = u.Libraries.Select(l => new LibraryDto
{
Name = l.Name,
CoverImage = l.CoverImage,
Type = l.Type,
LastScanned = l.LastScanned,
Folders = l.Folders.Select(x => x.Path).ToList()
}).ToList()
})

View file

@ -1,9 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.DTOs;
using API.DTOs.Reader;
using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper;
@ -15,10 +14,17 @@ namespace API.Data.Repositories
public class VolumeRepository : IVolumeRepository
{
private readonly DataContext _context;
private readonly IMapper _mapper;
public VolumeRepository(DataContext context)
public VolumeRepository(DataContext context, IMapper mapper)
{
_context = context;
_mapper = mapper;
}
public void Add(Volume volume)
{
_context.Volume.Add(volume);
}
public void Update(Volume volume)
@ -26,6 +32,16 @@ namespace API.Data.Repositories
_context.Entry(volume).State = EntityState.Modified;
}
public void Remove(Volume volume)
{
_context.Volume.Remove(volume);
}
/// <summary>
/// Returns a list of non-tracked files for a given volume.
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
public async Task<IList<MangaFile>> GetFilesForVolume(int volumeId)
{
return await _context.Chapter
@ -36,6 +52,11 @@ namespace API.Data.Repositories
.ToListAsync();
}
/// <summary>
/// Returns the cover image file for the given volume
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
public async Task<string> GetVolumeCoverImageAsync(int volumeId)
{
return await _context.Volume
@ -45,6 +66,11 @@ namespace API.Data.Repositories
.SingleOrDefaultAsync();
}
/// <summary>
/// Returns all chapter Ids belonging to a list of Volume Ids
/// </summary>
/// <param name="volumeIds"></param>
/// <returns></returns>
public async Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds)
{
return await _context.Chapter
@ -52,5 +78,131 @@ namespace API.Data.Repositories
.Select(c => c.Id)
.ToListAsync();
}
/// <summary>
/// Returns all volumes that contain a seriesId in passed array.
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false)
{
var query = _context.Volume
.Where(v => seriesIds.Contains(v.SeriesId));
if (includeChapters)
{
query = query.Include(v => v.Chapters);
}
return await query.ToListAsync();
}
/// <summary>
/// Returns an individual Volume including Chapters and Files and Reading Progress for a given volumeId
/// </summary>
/// <param name="volumeId"></param>
/// <param name="userId"></param>
/// <returns></returns>
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId)
{
var volume = await _context.Volume
.Where(vol => vol.Id == volumeId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync(vol => vol.Id == volumeId);
var volumeList = new List<VolumeDto>() {volume};
await AddVolumeModifiers(userId, volumeList);
return volumeList[0];
}
/// <summary>
/// Returns the full Volumes including Chapters and Files for a given series
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumes(int seriesId)
{
return await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.OrderBy(vol => vol.Number)
.ToListAsync();
}
/// <summary>
/// Returns a single volume with Chapter and Files
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
public async Task<Volume> GetVolumeAsync(int volumeId)
{
return await _context.Volume
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
}
/// <summary>
/// Returns all volumes for a given series with progress information attached. Includes all Chapters as well.
/// </summary>
/// <param name="seriesId"></param>
/// <param name="userId"></param>
/// <returns></returns>
public async Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId)
{
var volumes = await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.OrderBy(volume => volume.Number)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync();
await AddVolumeModifiers(userId, volumes);
SortSpecialChapters(volumes);
return volumes;
}
public async Task<Volume> GetVolumeByIdAsync(int volumeId)
{
return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
}
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
var sorter = new NaturalSortComparer();
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
{
v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
}
}
private async Task AddVolumeModifiers(int userId, IReadOnlyCollection<VolumeDto> volumes)
{
var volIds = volumes.Select(s => s.Id);
var userProgress = await _context.AppUserProgresses
.Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId))
.AsNoTracking()
.ToListAsync();
foreach (var v in volumes)
{
foreach (var c in v.Chapters)
{
c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead);
}
v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead);
}
}
}
}

21
API/Data/Scanner/Chunk.cs Normal file
View file

@ -0,0 +1,21 @@
namespace API.Data.Scanner
{
/// <summary>
/// Represents a set of Entities which is broken up and iterated on
/// </summary>
public class Chunk
{
/// <summary>
/// Total number of entities
/// </summary>
public int TotalSize { get; set; }
/// <summary>
/// Size of each chunk to iterate over
/// </summary>
public int ChunkSize { get; set; }
/// <summary>
/// Total chunks to iterate over
/// </summary>
public int TotalChunks { get; set; }
}
}

View file

@ -49,6 +49,8 @@ namespace API.Data
new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json
new () {Key = ServerSettingKey.AllowStatCollection, Value = "true"},
new () {Key = ServerSettingKey.EnableOpds, Value = "false"},
new () {Key = ServerSettingKey.EnableAuthentication, Value = "true"},
new () {Key = ServerSettingKey.BaseUrl, Value = "/"},
};
foreach (var defaultSetting in defaultSettings)

View file

@ -1,6 +1,5 @@

using System;
using System.ComponentModel.DataAnnotations;
using API.Entities.Interfaces;
namespace API.Entities

View file

@ -20,6 +20,10 @@ namespace API.Entities.Enums
AllowStatCollection = 6,
[Description("EnableOpds")]
EnableOpds = 7,
[Description("EnableAuthentication")]
EnableAuthentication = 8,
[Description("BaseUrl")]
BaseUrl = 9
}
}

View file

@ -8,12 +8,12 @@ namespace API.Entities
public int Id { get; set; }
public string Path { get; set; }
/// <summary>
/// Used when scanning to see if we can skip if nothing has changed.
/// Used when scanning to see if we can skip if nothing has changed. (not implemented)
/// </summary>
public DateTime LastScanned { get; set; }
// Relationship
public Library Library { get; set; }
public int LibraryId { get; set; }
}
}
}

View file

@ -13,9 +13,13 @@ namespace API.Entities
public LibraryType Type { get; set; }
public DateTime Created { get; set; }
public DateTime LastModified { get; set; }
/// <summary>
/// Last time Library was scanned
/// </summary>
public DateTime LastScanned { get; set; }
public ICollection<FolderPath> Folders { get; set; }
public ICollection<AppUser> AppUsers { get; set; }
public ICollection<Series> Series { get; set; }
}
}
}

View file

@ -38,5 +38,13 @@ namespace API.Entities
{
return File.GetLastWriteTime(FilePath) > LastModified;
}
/// <summary>
/// Updates the Last Modified time of the underlying file
/// </summary>
public void UpdateLastModified()
{
LastModified = File.GetLastWriteTime(FilePath);
}
}
}

View file

@ -33,7 +33,7 @@ namespace API.Entities
/// <summary>
/// Summary information related to the Series
/// </summary>
public string Summary { get; set; } // TODO: Migrate into SeriesMetdata (with Metadata update)
public string Summary { get; set; } // NOTE: Migrate into SeriesMetdata (with Metadata update)
public DateTime Created { get; set; }
public DateTime LastModified { get; set; }
/// <summary>

View file

@ -8,6 +8,9 @@ namespace API.Entities
public class Volume : IEntityDate
{
public int Id { get; set; }
/// <summary>
/// A String representation of the volume number. Allows for floats
/// </summary>
public string Name { get; set; }
public int Number { get; set; }
public IList<Chapter> Chapters { get; set; }

View file

@ -36,12 +36,13 @@ namespace API.Extensions
services.AddScoped<IVersionUpdaterService, VersionUpdaterService>();
services.AddScoped<IDownloadService, DownloadService>();
services.AddScoped<IReaderService, ReaderService>();
services.AddScoped<IAccountService, AccountService>();
services.AddScoped<IPresenceTracker, PresenceTracker>();
services.AddSqLite(config, env);
services.AddLogging(config);
services.AddSignalR();
services.AddSignalR(opt => opt.EnableDetailedErrors = true);
}
private static void AddSqLite(this IServiceCollection services, IConfiguration config,

View file

@ -76,7 +76,8 @@ namespace API.Extensions
directoryIndex++;
}
foreach (var subDirectory in directory.EnumerateDirectories())
var sort = new NaturalSortComparer();
foreach (var subDirectory in directory.EnumerateDirectories().OrderBy(d => d.FullName, sort))
{
FlattenDirectory(root, subDirectory, ref directoryIndex);
}

View file

@ -1,4 +1,5 @@
using System.Linq;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using API.Helpers;
@ -41,8 +42,9 @@ namespace API.Extensions
public static void AddCacheHeader(this HttpResponse response, string filename)
{
if (filename == null || filename.Length <= 0) return;
var hashContent = filename + File.GetLastWriteTimeUtc(filename);
using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider();
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(filename)).Select(x => x.ToString("X2"))));
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2"))));
}
}

View file

@ -3,6 +3,7 @@ using System.Linq;
using API.DTOs;
using API.DTOs.Reader;
using API.DTOs.ReadingLists;
using API.DTOs.Settings;
using API.Entities;
using API.Helpers.Converters;
using AutoMapper;

View file

@ -1,5 +1,5 @@
using System.Collections.Generic;
using API.DTOs;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using AutoMapper;
@ -36,6 +36,12 @@ namespace API.Helpers.Converters
case ServerSettingKey.EnableOpds:
destination.EnableOpds = bool.Parse(row.Value);
break;
case ServerSettingKey.EnableAuthentication:
destination.EnableAuthentication = bool.Parse(row.Value);
break;
case ServerSettingKey.BaseUrl:
destination.BaseUrl = row.Value;
break;
}
}

View file

@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
@ -13,7 +14,7 @@ namespace API.Interfaces.Repositories
void Delete(Library library);
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
Task<bool> LibraryExists(string libraryName);
Task<Library> GetLibraryForIdAsync(int libraryId);
Task<Library> GetLibraryForIdAsync(int libraryId, LibraryIncludes includes);
Task<Library> GetFullLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId);
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);

View file

@ -1,7 +1,6 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Scanner;
using API.DTOs;
using API.DTOs.Filtering;
using API.Entities;
@ -11,12 +10,10 @@ namespace API.Interfaces.Repositories
{
public interface ISeriesRepository
{
void Add(Series series);
void Attach(Series series);
void Update(Series series);
Task<Series> GetSeriesByNameAsync(string name);
void Remove(Series series);
Task<bool> DoesSeriesNameExistInLibrary(string name);
Series GetSeriesByName(string name);
/// <summary>
/// Adds user information like progress, ratings, etc
/// </summary>
@ -25,7 +22,6 @@ namespace API.Interfaces.Repositories
/// <param name="userParams"></param>
/// <returns></returns>
Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter);
/// <summary>
/// Does not add user information like progress, ratings, etc.
/// </summary>
@ -34,20 +30,8 @@ namespace API.Interfaces.Repositories
/// <returns></returns>
Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery);
Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId);
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
Task<IEnumerable<Volume>> GetVolumes(int seriesId);
Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId);
/// <summary>
/// A fast lookup of just the volume information with no tracking.
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
Task<VolumeDto> GetVolumeDtoAsync(int volumeId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
Task<bool> DeleteSeriesAsync(int seriesId);
Task<Volume> GetVolumeByIdAsync(int volumeId);
Task<Series> GetSeriesByIdAsync(int seriesId);
Task<int[]> GetChapterIdsForSeriesAsync(int[] seriesIds);
Task<IDictionary<int, IList<int>>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds);
@ -58,16 +42,17 @@ namespace API.Interfaces.Repositories
/// <param name="series"></param>
/// <returns></returns>
Task AddSeriesModifiers(int userId, List<SeriesDto> series);
Task<string> GetSeriesCoverImageAsync(int seriesId);
Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, UserParams userParams, FilterDto filter);
Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter);
Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo
Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId);
Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams);
Task<IList<MangaFile>> GetFilesForSeries(int seriesId);
Task<IEnumerable<SeriesDto>> GetSeriesDtoForIdsAsync(IEnumerable<int> seriesIds, int userId);
Task<IList<string>> GetAllCoverImagesAsync();
Task<IEnumerable<string>> GetLockedCoverImagesAsync();
Task<PagedList<Series>> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams);
Task<Series> GetFullSeriesForSeriesIdAsync(int seriesId);
Task<Chunk> GetChunkInfo(int libraryId = 0);
}
}

View file

@ -1,6 +1,6 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
@ -10,6 +10,7 @@ namespace API.Interfaces.Repositories
{
void Update(ServerSetting settings);
Task<ServerSettingDto> GetSettingsDtoAsync();
ServerSettingDto GetSettingsDto();
Task<ServerSetting> GetSettingAsync(ServerSettingKey key);
Task<IEnumerable<ServerSetting>> GetSettingsAsync();

View file

@ -15,6 +15,8 @@ namespace API.Interfaces.Repositories
public void Delete(AppUser user);
Task<IEnumerable<MemberDto>> GetMembersAsync();
Task<IEnumerable<AppUser>> GetAdminUsersAsync();
Task<IEnumerable<AppUser>> GetNonAdminUsersAsync();
Task<bool> IsUserAdmin(AppUser user);
Task<AppUserRating> GetUserRating(int seriesId, int userId);
Task<AppUserPreferences> GetPreferencesAsync(string username);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForSeries(int userId, int seriesId);

View file

@ -7,9 +7,19 @@ namespace API.Interfaces.Repositories
{
public interface IVolumeRepository
{
void Add(Volume volume);
void Update(Volume volume);
void Remove(Volume volume);
Task<IList<MangaFile>> GetFilesForVolume(int volumeId);
Task<string> GetVolumeCoverImageAsync(int volumeId);
Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds);
// From Series Repo
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
Task<IEnumerable<Volume>> GetVolumes(int seriesId);
Task<Volume> GetVolumeByIdAsync(int volumeId);
}
}

View file

@ -0,0 +1,12 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
using API.Errors;
namespace API.Interfaces.Services
{
public interface IAccountService
{
Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword);
}
}

View file

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO.Compression;
using System.Threading.Tasks;
using API.Archive;
using API.Data.Metadata;
namespace API.Interfaces.Services
{
@ -12,7 +13,7 @@ namespace API.Interfaces.Services
int GetNumberOfPagesFromArchive(string archivePath);
string GetCoverImage(string archivePath, string fileName);
bool IsValidArchive(string archivePath);
string GetSummaryInfo(string archivePath);
ComicInfo GetComicInfo(string archivePath);
ArchiveLibrary CanOpen(string archivePath);
bool ArchiveNeedsFlattening(ZipArchive archive);
Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder);

View file

@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Metadata;
using API.Parser;
using VersOne.Epub;
@ -20,7 +21,7 @@ namespace API.Interfaces.Services
/// <param name="book">Book Reference, needed for if you expect Import statements</param>
/// <returns></returns>
Task<string> ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book);
string GetSummaryInfo(string filePath);
ComicInfo GetComicInfo(string filePath);
ParserInfo ParseInfo(string filePath);
/// <summary>
/// Extracts a PDF file's pages as images to an target directory

View file

@ -11,9 +11,8 @@ namespace API.Interfaces.Services
/// cover images if forceUpdate is true.
/// </summary>
/// <param name="libraryId">Library to scan against</param>
/// <param name="forceUpdate">Force overwriting for cover images</param>
Task ScanLibrary(int libraryId, bool forceUpdate);
Task ScanLibrary(int libraryId);
Task ScanLibraries();
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token);
Task ScanSeries(int libraryId, int seriesId, CancellationToken token);
}
}

View file

@ -1,7 +1,6 @@

using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
@ -210,7 +209,7 @@ namespace API.Interfaces.Services
/// <returns>-1 if nothing can be found</returns>
public async Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{
var volumes = (await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
@ -262,7 +261,7 @@ namespace API.Interfaces.Services
/// <returns>-1 if nothing can be found</returns>
public async Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{
var volumes = (await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);

View file

@ -21,29 +21,28 @@ namespace API.Parser
public const string SupportedExtensions =
ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions;
private const RegexOptions MatchOptions =
RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant;
public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
private static readonly string XmlRegexExtensions = @"\.xml";
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
private static readonly Regex BookFileRegex = new Regex(BookFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
private static readonly Regex CoverImageRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(cover|folder)(?![\w\d])",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
MatchOptions, RegexTimeout);
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9\+]",
MatchOptions, RegexTimeout);
private static readonly Regex[] MangaVolumeRegex = new[]
@ -51,43 +50,35 @@ namespace API.Parser
// Dance in the Vampire Bund v16-17
new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar
new Regex(
@"(?<Series>.*)(\b|_)(?!\[)(vol\.?)(?<Volume>\d+(-\d+)?)(?!\])",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
new Regex(
@"(?<Series>.*)(\b|_)(?!\[)v(?<Volume>\d+(-\d+)?)(?!\])",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Kodomo no Jikan vol. 10
MatchOptions, RegexTimeout),
// Kodomo no Jikan vol. 10, [dmntsf.net] One Piece - Digital Colored Comics Vol. 20.5-21.5 Ch. 177
new Regex(
@"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(\.\d)?(-\d+)?(\.\d)?)",
MatchOptions, RegexTimeout),
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex(
@"(vol\.? ?)(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"(vol\.? ?)(?<Volume>\d+(\.\d)?)",
MatchOptions, RegexTimeout),
// Tonikaku Cawaii [Volume 11].cbz
new Regex(
@"(volume )(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"(volume )(?<Volume>\d+(\.\d)?)",
MatchOptions, RegexTimeout),
// Tower Of God S01 014 (CBT) (digital).cbz
new Regex(
@"(?<Series>.*)(\b|_|)(S(?<Volume>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// vol_001-1.cbz for MangaPy default naming convention
new Regex(
@"(vol_)(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"(vol_)(?<Volume>\d+(\.\d)?)",
MatchOptions, RegexTimeout),
};
private static readonly Regex[] MangaSeriesRegex = new[]
@ -95,167 +86,138 @@ namespace API.Parser
// Grand Blue Dreaming - SP02
new Regex(
@"(?<Series>.*)(\b|_|-|\s)(?:sp)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar, Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz
new Regex(
@"^(?<Series>.*)( |_)Vol\.?(\d+|tbd)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex),
// The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake
new Regex(
@"(?<Series>.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
MatchOptions,
RegexTimeout),
// Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip
new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)(\s|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
MatchOptions,
RegexTimeout),
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
new Regex(
@"(?<Series>.*)( - )(?:v|vo|c)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip
new Regex(
@"(?<Series>.*)(?:, Chapter )(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz
new Regex(
@"(?<Series>.*)(\s|_|-)(?!Vol)(\s|_|-)(?:Chapter)(\s|_|-)(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz
new Regex(
@"(?<Series>.*) (\b|_|-)(vol)\.?(\s|-|_)?\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans]
new Regex(
@"(?<Series>.*) (\b|_|-)(vol)(ume)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
MatchOptions,
RegexTimeout),
//Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]
new Regex(
@"(?<Series>.*)(\bc\d+\b)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
//Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz
new Regex(
@"(?<Series>.*)(?: _|-|\[|\()\s?vol(ume)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz
new Regex(
@"^(?<Series>(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex(
@"(?<Series>.*) (\b|_|-)(v|ch\.?|c)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
//Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
// due to duplicate version identifiers in file.
new Regex(
@"(?<Series>.*)(v|s)\d+(-\d+)?(_|\s)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
//[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex(
@"(?<Series>.*)(v|s)\d+(-\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz
new Regex(
@"(?<Series>.*) (?<Chapter>\d+) (?:\(\d{4}\)) ",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)
new Regex(
@"(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?) \(\d{4}\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Noblesse - Episode 429 (74 Pages).7z
new Regex(
@"(?<Series>.*)(\s|_)(?:Episode|Ep\.?)(\s|_)(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)
new Regex(
@"(?<Series>.*)\(\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Tonikaku Kawaii (Ch 59-67) (Ongoing)
new Regex(
@"(?<Series>.*)(\s|_)\((c\s|ch\s|chapter\s)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Black Bullet (This is very loose, keep towards bottom)
new Regex(
@"(?<Series>.*)(_)(v|vo|c|volume)( |_)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar
new Regex(
@"(?<Series>.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1
new Regex(
@"(?<Series>.*)( |_)(?:Chp.? ?\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01
new Regex(
@"^(?!Vol)(?<Series>.*)( |_)Chapter( |_)(\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Fullmetal Alchemist chapters 101-108.cbz
new Regex(
@"^(?!vol)(?<Series>.*)( |_)(chapters( |_)?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1
new Regex(
@"^(?!Vol\.?)(?<Series>.*)( |_|-)(?<!-)(episode|chapter|(ch\.?) ?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Baketeriya ch01-05.zip
new Regex(
@"^(?!Vol)(?<Series>.*)ch\d+-?\d?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Magi - Ch.252-005.cbz
new Regex(
@"(?<Series>.*)( ?- ?)Ch\.\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [BAA]_Darker_than_Black_Omake-1.zip
new Regex(
@"^(?!Vol)(?<Series>.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?<Series>.*)( |_)(\d+)
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Kodoja #001 (March 2016)
new Regex(
@"(?<Series>.*)(\s|_|-)#",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Baketeriya ch01-05.zip, Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar, A Compendium of Ghosts - 031 - The Third Story_ Part 12 (Digital) (Cobalt001)
new Regex(
@"^(?!Vol\.?)(?<Series>.+?)( |_|-)(?<!-)(ch)?\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last)
new Regex(
@"^(?!Vol)(?<Series>.*)( |_|-)(ch?)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
};
private static readonly Regex[] ComicSeriesRegex = new[]
@ -263,110 +225,79 @@ namespace API.Parser
// Invincible Vol 01 Family matters (2005) (Digital)
new Regex(
@"(?<Series>.*)(\b|_)(vol\.?)( |_)(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Batman Beyond 2.0 001 (2013)
new Regex(
@"^(?<Series>.+?\S\.\d) (?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
new Regex(
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"^(?<Volume>\d+)\s(-\s|_)(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
MatchOptions, RegexTimeout),
// 01 Spider-Man & Wolverine 01.cbr
new Regex(
@"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"^(?<Volume>\d+)\s(?:-\s)(?<Series>.*) (\d+)?",
MatchOptions, RegexTimeout),
// Batman & Wildcat (1 of 3)
new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Volume>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: |_)v\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Amazing Man Comics chapter 25
new Regex(
@"^(?<Series>.*)(?: |_)c(hapter) \d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Amazing Man Comics issue #25
new Regex(
@"^(?<Series>.*)(?: |_)i(ssue) #\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Batman Wayne Family Adventures - Ep. 001 - Moving In
new Regex(
@"^(?<Series>.+?)(\s|_|-)?(?:Ep\.?)(\s|_|-)+\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
MatchOptions, RegexTimeout),
// Batgirl Vol.2000 #57 (December, 2004)
new Regex(
@"^(?<Series>.+?)(?: \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"^(?<Series>.+?)Vol\.?\s?#?(?:\d+)",
MatchOptions, RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.*)(?: |_)#\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.+?)(?: \d+)",
MatchOptions, RegexTimeout),
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
new Regex(
@"^(?<Series>.*)(?: |_)(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// The First Asterix Frieze (WebP by Doc MaKS)
new Regex(
@"^(?<Series>.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader)
new Regex(
@"^(?<Series>.+?)-(chapter-)?(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// MUST BE LAST: Batman & Daredevil - King of New York
new Regex(
@"^(?<Series>.*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
};
private static readonly Regex[] ComicVolumeRegex = new[]
{
// // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
// new Regex(
// @"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// // 01 Spider-Man & Wolverine 01.cbr
// new Regex(
// @"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// // Batman & Wildcat (1 of 3)
// new Regex(
// @"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: |_)v(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
// BUG: Negative lookbehind has to be fixed width
// NOTE: The case this is built for does not make much sense.
// new Regex(
// @"^(?<Series>.+?)(?<!c(hapter)|i(ssue))(?<!of)(?: |_)(?<!of )(?<Volume>\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
// new Regex(
// @"^(?<Series>.+?)(?<!c(hapter)|i(ssue))(?<!of)(?: (?<Volume>\d+))",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// // Batman & Robin the Teen Wonder #0
// new Regex(
// @"^(?<Series>.*)(?: |_)#(?<Volume>\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
MatchOptions, RegexTimeout),
// Batgirl Vol.2000 #57 (December, 2004)
new Regex(
@"^(?<Series>.+?)(?:\s|_)vol\.?\s?(?<Volume>\d+)",
MatchOptions, RegexTimeout),
};
private static readonly Regex[] ComicChapterRegex = new[]
@ -374,59 +305,68 @@ namespace API.Parser
// Batman & Wildcat (1 of 3)
new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Batman Beyond 04 (of 6) (1999)
new Regex(
@"(?<Series>.+?)(?<Chapter>\d+)(\s|_|-)?\(of",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Batman Beyond 2.0 001 (2013)
new Regex(
@"^(?<Series>.+?\S\.\d) (?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.+?)(?: |_)v(?<Volume>\d+)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.+?)(?:\s|_)#(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr
new Regex(
@"^(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
MatchOptions, RegexTimeout),
// Batgirl Vol.2000 #57 (December, 2004)
new Regex(
@"^(?<Series>.+?)(?:vol\.?\d+)\s#(?<Chapter>\d+)",
MatchOptions,
RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.+?)(?: (?<Chapter>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.+?)(?:\s|_)#(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Saga 001 (2012) (Digital) (Empire-Zone)
new Regex(
@"(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Amazing Man Comics chapter 25
new Regex(
@"^(?!Vol)(?<Series>.+?)( |_)c(hapter)( |_)(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Amazing Man Comics issue #25
new Regex(
@"^(?!Vol)(?<Series>.+?)( |_)i(ssue)( |_) #(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader)
new Regex(
@"^(?<Series>.+?)-(chapter-)?(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Cyberpunk 2077 - Your Voice 01
// new Regex(
// @"^(?<Series>.+?\s?-\s?(?:.+?))(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)$",
// MatchOptions,
// RegexTimeout),
};
private static readonly Regex[] ReleaseGroupRegex = new[]
{
// [TrinityBAKumA Finella&anon], [BAA]_, [SlowManga&OverloadScans], [batoto]
new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// (Shadowcat-Empire),
// new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled),
// MatchOptions),
};
private static readonly Regex[] MangaChapterRegex = new[]
@ -434,76 +374,62 @@ namespace API.Parser
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5
new Regex(
@"(\b|_)(c|ch)(\.?\s?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex(
@"v\d+\.(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove)
new Regex(
@"^(?<Series>.*)(?: |_)#(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Green Worldz - Chapter 027, Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 11-10
new Regex(
@"^(?!Vol)(?<Series>.*)\s?(?<!vol\. )\sChapter\s(?<Chapter>\d+(?:\.?[\d-]+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz
new Regex(
@"^(?!Vol)(?<Series>.+?)\s(?<!vol\. )(?<Chapter>\d+(?:.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"^(?!Vol)(?<Series>.+?)(?<!Vol)\.?\s(?<Chapter>\d+(?:.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)",
MatchOptions, RegexTimeout),
// Tower Of God S01 014 (CBT) (digital).cbz
new Regex(
@"(?<Series>.*)\sS(?<Volume>\d+)\s(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip
new Regex(
@"^((?!v|vo|vol|Volume).)*(\s|_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<ChapterPart>b)?(\s|_|\[|\()",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
@"^((?!v|vo|vol|Volume).)*(\s|_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<Part>b)?(\s|_|\[|\()",
MatchOptions, RegexTimeout),
// Yumekui-Merry_DKThias_Chapter21.zip
new Regex(
@"Chapter(?<Chapter>\d+(-\d+)?)", //(?:.\d+|-\d+)?
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar
new Regex(
@"(?<Series>.*)(\s|_)(vol\d+)?(\s|_)Chp\.? ?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Vol 1 Chapter 2
new Regex(
@"(?<Volume>((vol|volume|v))?(\s|_)?\.?\d+)(\s|_)(Chp|Chapter)\.?(\s|_)?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
};
private static readonly Regex[] MangaEditionRegex = {
// Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz
new Regex(
@"(?<Edition>({|\(|\[).* Edition(}|\)|\]))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz
new Regex(
@"(\b|_)(?<Edition>Omnibus(( |_)?Edition)?)(\b|_)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// To Love Ru v01 Uncensored (Ch.001-007)
new Regex(
@"(\b|_)(?<Edition>Uncensored)(\b|_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz
new Regex(
@"(\b|_)(?<Edition>Full(?: |_)Color)(\b|_)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
};
private static readonly Regex[] CleanupRegex =
@ -511,18 +437,15 @@ namespace API.Parser
// (), {}, []
new Regex(
@"(?<Cleanup>(\{\}|\[\]|\(\)))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// (Complete)
new Regex(
@"(?<Cleanup>(\{Complete\}|\[Complete\]|\(Complete\)))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
// Anything in parenthesis
new Regex(
@"\(.*\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
};
private static readonly Regex[] MangaSpecialRegex =
@ -530,15 +453,21 @@ namespace API.Parser
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
new Regex(
@"(?<Special>Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
MatchOptions, RegexTimeout),
};
private static readonly Regex[] ComicSpecialRegex =
{
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
new Regex(
@"(?<Special>Specials?|OneShot|One\-Shot|Extra( Chapter)?|Book \d.+?|Compendium \d.+?|Omnibus \d.+?|[_\s\-]TPB[_\s\-]|FCBD \d.+?|Absolute \d.+?|Preview \d.+?|Art Collection|Side( |_)Stories|Bonus)",
MatchOptions, RegexTimeout),
};
// If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found.
private static readonly Regex SpecialMarkerRegex = new Regex(
@"(?<Special>SP\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout
MatchOptions, RegexTimeout
);
@ -552,7 +481,7 @@ namespace API.Parser
/// <returns><see cref="ParserInfo"/> or null if Series was empty</returns>
public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga)
{
var fileName = Path.GetFileName(filePath);
var fileName = Path.GetFileNameWithoutExtension(filePath);
ParserInfo ret;
if (IsEpub(filePath))
@ -562,7 +491,7 @@ namespace API.Parser
Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName),
Series = ParseSeries(fileName) ?? ParseComicSeries(fileName),
Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName),
Filename = fileName,
Filename = Path.GetFileName(filePath),
Format = ParseFormat(filePath),
FullFilePath = filePath
};
@ -574,14 +503,14 @@ namespace API.Parser
Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName),
Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName),
Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName),
Filename = fileName,
Filename = Path.GetFileName(filePath),
Format = ParseFormat(filePath),
Title = Path.GetFileNameWithoutExtension(fileName),
FullFilePath = filePath
};
}
if (IsImage(filePath) && IsCoverImage(fileName)) return null;
if (IsImage(filePath) && IsCoverImage(filePath)) return null;
if (IsImage(filePath))
{
@ -600,7 +529,7 @@ namespace API.Parser
var edition = ParseEdition(fileName);
if (!string.IsNullOrEmpty(edition))
{
ret.Series = CleanTitle(ret.Series.Replace(edition, ""));
ret.Series = CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic);
ret.Edition = edition;
}
@ -625,11 +554,11 @@ namespace API.Parser
if (string.IsNullOrEmpty(ret.Series))
{
ret.Series = CleanTitle(fileName);
ret.Series = CleanTitle(fileName, type is LibraryType.Comic);
}
// Pdfs may have .pdf in the series name, remove that
if (IsPdf(fileName) && ret.Series.ToLower().EndsWith(".pdf"))
if (IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
{
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
}
@ -673,7 +602,7 @@ namespace API.Parser
if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1))
{
ret.Series = CleanTitle(folder);
ret.Series = CleanTitle(folder, type is LibraryType.Comic);
break;
}
@ -750,6 +679,23 @@ namespace API.Parser
return string.Empty;
}
public static string ParseComicSpecial(string filePath)
{
foreach (var regex in ComicSpecialRegex)
{
var matches = regex.Matches(filePath);
foreach (Match match in matches)
{
if (match.Groups["Special"].Success && match.Groups["Special"].Value != string.Empty)
{
return match.Groups["Special"].Value;
}
}
}
return string.Empty;
}
public static string ParseSeries(string filename)
{
foreach (var regex in MangaSeriesRegex)
@ -775,7 +721,7 @@ namespace API.Parser
{
if (match.Groups["Series"].Success && match.Groups["Series"].Value != string.Empty)
{
return CleanTitle(match.Groups["Series"].Value);
return CleanTitle(match.Groups["Series"].Value, true);
}
}
}
@ -793,12 +739,8 @@ namespace API.Parser
if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue;
var value = match.Groups["Volume"].Value;
if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Volume"].Value);
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
var to = RemoveLeadingZeroes(tokens[1]);
return $"{@from}-{to}";
var hasPart = match.Groups["Part"].Success;
return FormatValue(value, hasPart);
}
}
@ -815,18 +757,32 @@ namespace API.Parser
if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue;
var value = match.Groups["Volume"].Value;
if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Volume"].Value);
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
var to = RemoveLeadingZeroes(tokens[1]);
return $"{@from}-{to}";
var hasPart = match.Groups["Part"].Success;
return FormatValue(value, hasPart);
}
}
return DefaultVolume;
}
private static string FormatValue(string value, bool hasPart)
{
if (!value.Contains("-"))
{
return RemoveLeadingZeroes(hasPart ? AddChapterPart(value) : value);
}
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
if (tokens.Length == 2)
{
var to = RemoveLeadingZeroes(hasPart ? AddChapterPart(tokens[1]) : tokens[1]);
return $"{@from}-{to}";
}
return @from;
}
public static string ParseChapter(string filename)
{
foreach (var regex in MangaChapterRegex)
@ -837,24 +793,9 @@ namespace API.Parser
if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue;
var value = match.Groups["Chapter"].Value;
var hasChapterPart = match.Groups["ChapterPart"].Success;
if (!value.Contains("-"))
{
return RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(value) : value);
}
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
if (tokens.Length == 2)
{
var to = RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(tokens[1]) : tokens[1]);
return $"{@from}-{to}";
}
return from;
var hasPart = match.Groups["Part"].Success;
return FormatValue(value, hasPart);
}
}
@ -881,16 +822,8 @@ namespace API.Parser
if (match.Groups["Chapter"].Success && match.Groups["Chapter"] != Match.Empty)
{
var value = match.Groups["Chapter"].Value;
if (value.Contains("-"))
{
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
var to = RemoveLeadingZeroes(tokens[1]);
return $"{from}-{to}";
}
return RemoveLeadingZeroes(match.Groups["Chapter"].Value);
var hasPart = match.Groups["Part"].Success;
return FormatValue(value, hasPart);
}
}
@ -908,12 +841,30 @@ namespace API.Parser
{
if (match.Success)
{
title = title.Replace(match.Value, "").Trim();
title = title.Replace(match.Value, string.Empty).Trim();
}
}
}
// TODO: Since we have loops like this, think about using a method
foreach (var regex in MangaEditionRegex)
{
var matches = regex.Matches(title);
foreach (Match match in matches)
{
if (match.Success)
{
title = title.Replace(match.Value, string.Empty).Trim();
}
}
}
return title;
}
private static string RemoveMangaSpecialTags(string title)
{
foreach (var regex in MangaSpecialRegex)
{
var matches = regex.Matches(title);
foreach (Match match in matches)
@ -928,9 +879,9 @@ namespace API.Parser
return title;
}
private static string RemoveSpecialTags(string title)
private static string RemoveComicSpecialTags(string title)
{
foreach (var regex in MangaSpecialRegex)
foreach (var regex in ComicSpecialRegex)
{
var matches = regex.Matches(title);
foreach (Match match in matches)
@ -954,14 +905,16 @@ namespace API.Parser
/// </example>
/// </summary>
/// <param name="title"></param>
/// <param name="isComic"></param>
/// <returns></returns>
public static string CleanTitle(string title)
public static string CleanTitle(string title, bool isComic = false)
{
title = RemoveReleaseGroup(title);
title = RemoveEditionTagHolders(title);
title = RemoveSpecialTags(title);
title = isComic ? RemoveComicSpecialTags(title) : RemoveMangaSpecialTags(title);
title = title.Replace("_", " ").Trim();
if (title.EndsWith("-") || title.EndsWith(","))
@ -1009,7 +962,7 @@ namespace API.Parser
private static string PerformPadding(string number)
{
var num = Int32.Parse(number);
var num = int.Parse(number);
return num switch
{
< 10 => "00" + num,
@ -1064,7 +1017,7 @@ namespace API.Parser
public static string Normalize(string name)
{
return Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty);
return NormalizeRegex.Replace(name, string.Empty).ToLower();
}

View file

@ -1,19 +1,11 @@
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Helpers;
using API.Interfaces;
using API.Services;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Server.Kestrel.Core;
@ -21,9 +13,6 @@ using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.IO;
using NetVips;
using Sentry;
namespace API
{
@ -103,62 +92,6 @@ namespace API
opts.ListenAnyIP(HttpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; });
});
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
if (environment != Environments.Development)
{
webBuilder.UseSentry(options =>
{
options.Dsn = "https://40f4e7b49c094172a6f99d61efb2740f@o641015.ingest.sentry.io/5757423";
options.MaxBreadcrumbs = 200;
options.AttachStacktrace = true;
options.Debug = false;
options.SendDefaultPii = false;
options.DiagnosticLevel = SentryLevel.Debug;
options.ShutdownTimeout = TimeSpan.FromSeconds(5);
options.Release = BuildInfo.Version.ToString();
options.AddExceptionFilterForType<OutOfMemoryException>();
options.AddExceptionFilterForType<NetVips.VipsException>();
options.AddExceptionFilterForType<InvalidDataException>();
options.AddExceptionFilterForType<KavitaException>();
options.BeforeSend = sentryEvent =>
{
if (sentryEvent.Exception != null
&& sentryEvent.Exception.Message.StartsWith("[GetCoverImage]")
&& sentryEvent.Exception.Message.StartsWith("[BookService]")
&& sentryEvent.Exception.Message.StartsWith("[ExtractArchive]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]")
&& sentryEvent.Exception.Message.Contains("EPUB parsing error")
&& sentryEvent.Exception.Message.Contains("Unsupported EPUB version")
&& sentryEvent.Exception.Message.Contains("Incorrect EPUB")
&& sentryEvent.Exception.Message.Contains("Access is Denied"))
{
return null; // Don't send this event to Sentry
}
sentryEvent.ServerName = null; // Never send Server Name to Sentry
return sentryEvent;
};
options.ConfigureScope(scope =>
{
scope.User = new User()
{
Id = HashUtil.AnonymousToken()
};
scope.Contexts.App.Name = BuildInfo.AppName;
scope.Contexts.App.Version = BuildInfo.Version.ToString();
scope.Contexts.App.StartTime = DateTime.UtcNow;
scope.Contexts.App.Hash = HashUtil.AnonymousToken();
scope.Contexts.App.Build = BuildInfo.Release;
scope.SetTag("culture", Thread.CurrentThread.CurrentCulture.Name);
scope.SetTag("branch", BuildInfo.Branch);
});
});
}
webBuilder.UseStartup<Startup>();
});
}

View file

@ -0,0 +1,53 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Errors;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class AccountService : IAccountService
{
private readonly UserManager<AppUser> _userManager;
private readonly ILogger<AccountService> _logger;
public const string DefaultPassword = "[k.2@RZ!mxCQkJzE";
public AccountService(UserManager<AppUser> userManager, ILogger<AccountService> logger)
{
_userManager = userManager;
_logger = logger;
}
public async Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword)
{
foreach (var validator in _userManager.PasswordValidators)
{
var validationResult = await validator.ValidateAsync(_userManager, user, newPassword);
if (!validationResult.Succeeded)
{
return validationResult.Errors.Select(e => new ApiException(400, e.Code, e.Description));
}
}
var result = await _userManager.RemovePasswordAsync(user);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return result.Errors.Select(e => new ApiException(400, e.Code, e.Description));
}
result = await _userManager.AddPasswordAsync(user, newPassword);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return result.Errors.Select(e => new ApiException(400, e.Code, e.Description));
}
return new List<ApiException>();
}
}
}

View file

@ -8,15 +8,14 @@ using System.Threading.Tasks;
using System.Xml.Serialization;
using API.Archive;
using API.Comparators;
using API.Data.Metadata;
using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks;
using Kavita.Common;
using Microsoft.Extensions.Logging;
using Microsoft.IO;
using SharpCompress.Archives;
using SharpCompress.Common;
using Image = NetVips.Image;
namespace API.Services
{
@ -28,14 +27,12 @@ namespace API.Services
{
private readonly ILogger<ArchiveService> _logger;
private readonly IDirectoryService _directoryService;
private readonly NaturalSortComparer _comparer;
private const string ComicInfoFilename = "comicinfo";
public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService)
{
_logger = logger;
_directoryService = directoryService;
_comparer = new NaturalSortComparer();
}
/// <summary>
@ -81,13 +78,11 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using ZipArchive archive = ZipFile.OpenRead(archivePath);
using var archive = ZipFile.OpenRead(archivePath);
return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName));
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
return archive.Entries.Count(entry => !entry.IsDirectory &&
!Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
@ -130,7 +125,7 @@ namespace API.Services
/// <returns>Entry name of match, null if no match</returns>
public string FirstFileEntry(IEnumerable<string> entryFullNames)
{
var result = entryFullNames.OrderBy(Path.GetFileName, _comparer)
var result = entryFullNames.OrderBy(Path.GetFileName, new NaturalSortComparer())
.FirstOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x)
&& Parser.Parser.IsImage(x)
&& !x.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith));
@ -160,7 +155,6 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
var entryNames = archive.Entries.Select(e => e.FullName).ToArray();
@ -172,7 +166,6 @@ namespace API.Services
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList();
@ -301,66 +294,69 @@ namespace API.Services
return null;
}
public string GetSummaryInfo(string archivePath)
public ComicInfo GetComicInfo(string archivePath)
{
var summary = string.Empty;
if (!IsValidArchive(archivePath)) return summary;
if (!IsValidArchive(archivePath)) return null;
ComicInfo info = null;
try
{
if (!File.Exists(archivePath)) return summary;
if (!File.Exists(archivePath)) return null;
var libraryHandler = CanOpen(archivePath);
switch (libraryHandler)
{
case ArchiveLibrary.Default:
{
_logger.LogTrace("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
var entry = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
&& Path.GetFileNameWithoutExtension(x.Name)?.ToLower() == ComicInfoFilename
&& !Path.GetFileNameWithoutExtension(x.Name).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)
&& Parser.Parser.IsXml(x.FullName));
var entry = archive.Entries.SingleOrDefault(x =>
!Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
&& Path.GetFileNameWithoutExtension(x.Name)?.ToLower() == ComicInfoFilename
&& !Path.GetFileNameWithoutExtension(x.Name)
.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)
&& Parser.Parser.IsXml(x.FullName));
if (entry != null)
{
using var stream = entry.Open();
var serializer = new XmlSerializer(typeof(ComicInfo));
info = (ComicInfo) serializer.Deserialize(stream);
return (ComicInfo) serializer.Deserialize(stream);
}
break;
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogTrace("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
&& !Path.GetFileNameWithoutExtension(entry.Key).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)
return FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser
.HasBlacklistedFolderInPath(
Path.GetDirectoryName(
entry.Key) ?? string.Empty)
&& !Path
.GetFileNameWithoutExtension(
entry.Key).StartsWith(Parser
.Parser
.MacOsMetadataFileStartsWith)
&& Parser.Parser.IsXml(entry.Key)));
break;
}
case ArchiveLibrary.NotSupported:
_logger.LogWarning("[GetSummaryInfo] This archive cannot be read: {ArchivePath}", archivePath);
return summary;
_logger.LogWarning("[GetComicInfo] This archive cannot be read: {ArchivePath}", archivePath);
return null;
default:
_logger.LogWarning("[GetSummaryInfo] There was an exception when reading archive stream: {ArchivePath}", archivePath);
return summary;
}
if (info != null)
{
return info.Summary;
_logger.LogWarning(
"[GetComicInfo] There was an exception when reading archive stream: {ArchivePath}",
archivePath);
return null;
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[GetSummaryInfo] There was an exception when reading archive stream: {Filepath}", archivePath);
_logger.LogWarning(ex, "[GetComicInfo] There was an exception when reading archive stream: {Filepath}", archivePath);
}
return summary;
return null;
}
private static void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath)
{
DirectoryService.ExistOrCreate(extractPath);
@ -410,14 +406,12 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
ExtractArchiveEntries(archive, extractPath);
break;
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)

View file

@ -4,12 +4,12 @@ using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Web;
using API.Data.Metadata;
using API.Entities.Enums;
using API.Interfaces.Services;
using API.Parser;
@ -165,22 +165,43 @@ namespace API.Services
return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss());
}
public string GetSummaryInfo(string filePath)
public ComicInfo GetComicInfo(string filePath)
{
if (!IsValidFile(filePath) || Parser.Parser.IsPdf(filePath)) return string.Empty;
if (!IsValidFile(filePath) || Parser.Parser.IsPdf(filePath)) return null;
try
{
using var epubBook = EpubReader.OpenBook(filePath);
return epubBook.Schema.Package.Metadata.Description;
var publicationDate =
epubBook.Schema.Package.Metadata.Dates.FirstOrDefault(date => date.Event == "publication")?.Date;
var info = new ComicInfo()
{
Summary = epubBook.Schema.Package.Metadata.Description,
Writer = string.Join(",", epubBook.Schema.Package.Metadata.Creators),
Publisher = string.Join(",", epubBook.Schema.Package.Metadata.Publishers),
Month = !string.IsNullOrEmpty(publicationDate) ? DateTime.Parse(publicationDate).Month : 0,
Year = !string.IsNullOrEmpty(publicationDate) ? DateTime.Parse(publicationDate).Year : 0,
};
// Parse tags not exposed via Library
foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems)
{
switch (metadataItem.Name)
{
case "calibre:rating":
info.UserRating = float.Parse(metadataItem.Content);
break;
}
}
return info;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[BookService] There was an exception getting summary, defaulting to empty string");
_logger.LogWarning(ex, "[GetComicInfo] There was an exception getting metadata");
}
return string.Empty;
return null;
}
private bool IsValidFile(string filePath)
@ -393,7 +414,7 @@ namespace API.Services
/// <returns></returns>
public string GetCoverImage(string fileFilePath, string fileName)
{
if (!IsValidFile(fileFilePath)) return String.Empty;
if (!IsValidFile(fileFilePath)) return string.Empty;
if (Parser.Parser.IsPdf(fileFilePath))
{
@ -411,8 +432,8 @@ namespace API.Services
?? epubBook.Content.Images.Values.FirstOrDefault();
if (coverImageContent == null) return string.Empty;
using var stream = coverImageContent.GetContentStream();
using var stream = StreamManager.GetStream("BookService.GetCoverImage", coverImageContent.ReadContent());
return ImageService.WriteCoverThumbnail(stream, fileName);
}
catch (Exception ex)

View file

@ -1,16 +0,0 @@
namespace API.Services
{
public class ComicInfo
{
public string Summary { get; set; }
public string Title { get; set; }
public string Series { get; set; }
public string Notes { get; set; }
public string Publisher { get; set; }
public string Genre { get; set; }
public int PageCount { get; set; }
// ReSharper disable once InconsistentNaming
public string LanguageISO { get; set; }
public string Web { get; set; }
}
}

View file

@ -46,7 +46,7 @@ namespace API.Services
var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions)
.OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault();
return firstImage;
}
@ -73,7 +73,7 @@ namespace API.Services
{
using var thumbnail = Image.Thumbnail(path, ThumbnailWidth);
var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png"));
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, filename));
return filename;
}
catch (Exception e)
@ -93,7 +93,7 @@ namespace API.Services
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
public static string WriteCoverThumbnail(Stream stream, string fileName)
{
using var thumbnail = NetVips.Image.ThumbnailStream(stream, ThumbnailWidth);
using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth);
var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png"));
return filename;

View file

@ -1,13 +1,15 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.Data.Metadata;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.SignalR;
@ -74,7 +76,7 @@ namespace API.Services
private string GetCoverImage(MangaFile file, int volumeId, int chapterId)
{
file.LastModified = DateTime.Now;
file.UpdateLastModified();
switch (file.Format)
{
case MangaFormat.Pdf:
@ -102,6 +104,7 @@ namespace API.Services
if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked))
{
_logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile?.FilePath);
chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id);
return true;
}
@ -117,8 +120,7 @@ namespace API.Services
public bool UpdateMetadata(Volume volume, bool forceUpdate)
{
// We need to check if Volume coverImage matches first chapters if forceUpdate is false
if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate
, false)) return false;
if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate)) return false;
volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault();
@ -137,6 +139,8 @@ namespace API.Services
{
var madeUpdate = false;
if (series == null) return false;
// NOTE: This will fail if we replace the cover of the first volume on a first scan. Because the series will already have a cover image
if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked))
{
series.Volumes ??= new List<Volume>();
@ -167,6 +171,9 @@ namespace API.Services
private bool UpdateSeriesSummary(Series series, bool forceUpdate)
{
// NOTE: This can be problematic when the file changes and a summary already exists, but it is likely
// better to let the user kick off a refresh metadata on an individual Series than having overhead of
// checking File last write time.
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return false;
var isBook = series.Library.Type == LibraryType.Book;
@ -177,18 +184,21 @@ namespace API.Services
if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return false;
if (Parser.Parser.IsPdf(firstFile.FilePath)) return false;
if (series.Format is MangaFormat.Archive or MangaFormat.Epub)
var comicInfo = GetComicInfo(series.Format, firstFile);
if (string.IsNullOrEmpty(comicInfo?.Summary)) return false;
series.Summary = comicInfo.Summary;
return true;
}
private ComicInfo GetComicInfo(MangaFormat format, MangaFile firstFile)
{
if (format is MangaFormat.Archive or MangaFormat.Epub)
{
var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath);
if (!string.IsNullOrEmpty(series.Summary))
{
series.Summary = summary;
firstFile.LastModified = DateTime.Now;
return true;
}
return Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetComicInfo(firstFile.FilePath) : _archiveService.GetComicInfo(firstFile.FilePath);
}
firstFile.LastModified = DateTime.Now; // NOTE: Should I put this here as well since it might not have actually been parsed?
return false;
return null;
}
@ -200,34 +210,65 @@ namespace API.Services
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public async Task RefreshMetadata(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_logger.LogInformation("[MetadataService] Beginning metadata refresh of {LibraryName}", library.Name);
// PERF: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
foreach (var series in library.Series)
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew();
var totalTime = 0L;
_logger.LogDebug($"[MetadataService] Refreshing Library {library.Name}. Total Items: {chunkInfo.TotalSize}. Total Chunks: {chunkInfo.TotalChunks} with {chunkInfo.ChunkSize} size.");
// This technically does
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
var chapterUpdated = false;
foreach (var chapter in volume.Chapters)
totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart();
_logger.LogDebug($"[MetadataService] Processing chunk {chunk} / {chunkInfo.TotalChunks} with size {chunkInfo.ChunkSize} Series ({chunk * chunkInfo.ChunkSize} - {(chunk + 1) * chunkInfo.ChunkSize}");
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
new UserParams()
{
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
_logger.LogDebug($"[MetadataService] Fetched {nonLibrarySeries.Count} series for refresh");
Parallel.ForEach(nonLibrarySeries, series =>
{
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
var chapterUpdated = false;
foreach (var chapter in volume.Chapters)
{
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
}
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
}
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
UpdateMetadata(series, volumeUpdated || forceUpdate);
});
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
foreach (var series in nonLibrarySeries)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(library.Id, series.Id));
}
}
else
{
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
}
UpdateMetadata(series, volumeUpdated || forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
}
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
_logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
}
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
}
@ -239,15 +280,13 @@ namespace API.Services
public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId);
var series = library.Series.SingleOrDefault(s => s.Id == seriesId);
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
if (series == null)
{
_logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId);
_logger.LogError("[MetadataService] Series {SeriesId} was not found on Library {LibraryId}", seriesId, libraryId);
return;
}
_logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name);
_logger.LogInformation("[MetadataService] Beginning metadata refresh of {SeriesName}", series.Name);
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
@ -261,14 +300,14 @@ namespace API.Services
}
UpdateMetadata(series, volumeUpdated || forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
_logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.RefreshMetadataEvent(libraryId, seriesId));
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(series.LibraryId, series.Id));
}
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
}
}
}

View file

@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using API.Entities.Enums;
@ -52,27 +53,27 @@ namespace API.Services
var scanLibrarySetting = setting;
_logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting);
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(),
() => CronConverter.ConvertToCronNotation(scanLibrarySetting));
() => CronConverter.ConvertToCronNotation(scanLibrarySetting), TimeZoneInfo.Local);
}
else
{
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily);
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily, TimeZoneInfo.Local);
}
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
if (setting != null)
{
_logger.LogDebug("Scheduling Backup Task for {Setting}", setting);
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting));
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local);
}
else
{
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly);
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local);
}
RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily);
RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate("check-for-updates", () => _scannerService.ScanLibraries(), Cron.Daily);
RecurringJob.AddOrUpdate("check-for-updates", () => _scannerService.ScanLibraries(), Cron.Daily, TimeZoneInfo.Local);
}
#region StatsTasks
@ -88,7 +89,7 @@ namespace API.Services
}
_logger.LogDebug("Scheduling stat collection daily");
RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily);
RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily, TimeZoneInfo.Local);
}
public void CancelStatsTasks()
@ -111,7 +112,7 @@ namespace API.Services
public void ScheduleUpdaterTasks()
{
_logger.LogInformation("Scheduling Auto-Update tasks");
RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Weekly);
RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Weekly, TimeZoneInfo.Local);
}
#endregion
@ -119,7 +120,7 @@ namespace API.Services
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId));
// When we do a scan, force cache to re-unpack in case page numbers change
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
}
@ -141,7 +142,7 @@ namespace API.Services
BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory));
}
public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false)
public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = true)
{
_logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId);
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
@ -150,7 +151,7 @@ namespace API.Services
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
{
_logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId);
BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, forceUpdate, CancellationToken.None));
BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, CancellationToken.None));
}
public void BackupDatabase()

View file

@ -125,7 +125,7 @@ namespace API.Services.Tasks
_directoryService.CopyFilesToDirectory(
chapterImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir);
}
catch (IOException e)
catch (IOException)
{
// Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file.
}

View file

@ -1,11 +1,9 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Interfaces;
using API.Interfaces.Services;
using Hangfire;
using Microsoft.Extensions.Logging;
using NetVips;
namespace API.Services.Tasks
{

View file

@ -7,9 +7,11 @@ using System.Threading;
using System.Threading.Tasks;
using API.Comparators;
using API.Data;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
@ -46,81 +48,114 @@ namespace API.Services.Tasks
[DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token)
public async Task ScanSeries(int libraryId, int seriesId, CancellationToken token)
{
var sw = new Stopwatch();
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId, seriesId);
var dirs = DirectoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(f => f.Path), files.Select(f => f.FilePath).ToList());
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{ seriesId });
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
var folderPaths = library.Folders.Select(f => f.Path).ToList();
var dirs = DirectoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList());
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanner = new ParseScannedFiles(_bookService, _logger);
var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime);
// If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
// Hence we clear out anything but what we selected for
var firstSeries = library.Series.FirstOrDefault();
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
RemoveParsedInfosNotForSeries(parsedSeries, series);
// If nothing was found, first validate any of the files still exist. If they don't then we have a deletion and can skip the rest of the logic flow
if (parsedSeries.Count == 0)
{
var anyFilesExist =
(await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)).Any(m => File.Exists(m.FilePath));
if (!anyFilesExist)
{
try
{
_unitOfWork.SeriesRepository.Remove(series);
await CommitAndSend(totalFiles, parsedSeries, sw, scanElapsedTime, series);
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an error during ScanSeries to delete the series");
await _unitOfWork.RollbackAsync();
}
}
else
{
// We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely,
// the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root
// is the series folder.
var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName));
if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
{
dirs = new Dictionary<string, string>();
var path = Directory.GetParent(existingFolder)?.FullName;
if (!folderPaths.Contains(path) || !folderPaths.Any(p => p.Contains(path ?? string.Empty)))
{
_logger.LogInformation("[ScanService] Aborted: {SeriesName} has bad naming convention and sits at root of library. Cannot scan series without deletion occuring. Correct file names to have Series Name within it or perform Scan Library", series.OriginalName);
return;
}
if (!string.IsNullOrEmpty(path))
{
dirs[path] = string.Empty;
}
}
_logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory", series.OriginalName);
scanner = new ParseScannedFiles(_bookService, _logger);
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
totalFiles += totalFiles2;
scanElapsedTime += scanElapsedTime2;
RemoveParsedInfosNotForSeries(parsedSeries, series);
}
}
// At this point, parsedSeries will have at least one key and we can perform the update. If it still doesn't, just return and don't do anything
if (parsedSeries.Count == 0) return;
try
{
UpdateSeries(series, parsedSeries);
await CommitAndSend(totalFiles, parsedSeries, sw, scanElapsedTime, series);
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an error during ScanSeries to update the series");
await _unitOfWork.RollbackAsync();
}
// Tell UI that this series is done
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId, series.Name),
cancellationToken: token);
await CleanupDbEntities();
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, series.Id, false));
}
private static void RemoveParsedInfosNotForSeries(Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, Series series)
{
var keys = parsedSeries.Keys;
foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
foreach (var key in keys.Where(key =>
!series.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || series.Format != key.Format))
{
parsedSeries.Remove(key);
}
}
if (parsedSeries.Count == 0)
private async Task CommitAndSend(int totalFiles,
Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, Stopwatch sw, long scanElapsedTime, Series series)
{
if (_unitOfWork.HasChanges())
{
// We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely,
// the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root
// is the series folder.
var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName));
if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
{
dirs = new Dictionary<string, string>();
var path = Path.GetPathRoot(existingFolder);
if (!string.IsNullOrEmpty(path))
{
dirs[path] = string.Empty;
}
}
_logger.LogDebug("{SeriesName} has bad naming convention, forcing rescan at a higher directory.", series.OriginalName);
scanner = new ParseScannedFiles(_bookService, _logger);
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
totalFiles += totalFiles2;
scanElapsedTime += scanElapsedTime2;
// If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
// Hence we clear out anything but what we selected for
firstSeries = library.Series.FirstOrDefault();
keys = parsedSeries.Keys;
foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
{
parsedSeries.Remove(key);
}
await _unitOfWork.CommitAsync();
_logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
}
var sw = new Stopwatch();
UpdateLibrary(library, parsedSeries);
_unitOfWork.LibraryRepository.Update(library);
if (await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
await CleanupDbEntities();
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
// Tell UI that this series is done
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId), cancellationToken: token);
}
else
{
_logger.LogCritical(
"There was a critical error that resulted in a failed scan. Please check logs and rescan");
await _unitOfWork.RollbackAsync();
}
}
@ -132,7 +167,7 @@ namespace API.Services.Tasks
var libraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync();
foreach (var lib in libraries)
{
await ScanLibrary(lib.Id, false);
await ScanLibrary(lib.Id);
}
_logger.LogInformation("Scan of All Libraries Finished");
}
@ -144,24 +179,26 @@ namespace API.Services.Tasks
/// ie) all entities will be rechecked for new cover images and comicInfo.xml changes
/// </summary>
/// <param name="libraryId"></param>
/// <param name="forceUpdate"></param>
[DisableConcurrentExecution(360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanLibrary(int libraryId, bool forceUpdate)
public async Task ScanLibrary(int libraryId)
{
Library library;
try
{
library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId);
library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
_logger.LogError(ex, "There was an issue fetching Library {LibraryId}", libraryId);
_logger.LogError(ex, "[ScannerService] There was an issue fetching Library {LibraryId}", libraryId);
return;
}
_logger.LogInformation("Beginning file scan on {LibraryName}", library.Name);
_logger.LogInformation("[ScannerService] Beginning file scan on {LibraryName}", library.Name);
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 0));
var scanner = new ParseScannedFiles(_bookService, _logger);
var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime);
@ -171,25 +208,27 @@ namespace API.Services.Tasks
}
var sw = Stopwatch.StartNew();
UpdateLibrary(library, series);
await UpdateLibrary(library, series);
library.LastScanned = DateTime.Now;
_unitOfWork.LibraryRepository.Update(library);
if (await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}",
"[ScannerService] Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}",
totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
}
else
{
_logger.LogCritical(
"There was a critical error that resulted in a failed scan. Please check logs and rescan");
"[ScannerService] There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
await CleanupAbandonedChapters();
await CleanupDbEntities();
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibrary, MessageFactory.ScanLibraryEvent(libraryId, "complete"));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, false));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 100));
}
/// <summary>
@ -212,78 +251,171 @@ namespace API.Services.Tasks
_logger.LogInformation("Removed {Count} abandoned collection tags", cleanedUp);
}
private void UpdateLibrary(Library library, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
private async Task UpdateLibrary(Library library, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries));
if (parsedSeries == null) return;
// First, remove any series that are not in parsedSeries list
var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries).ToList();
library.Series = RemoveMissingSeries(library.Series, missingSeries, out var removeCount);
if (removeCount > 0)
// Library contains no Series, so we need to fetch series in groups of ChunkSize
var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
var stopwatch = Stopwatch.StartNew();
var totalTime = 0L;
// Update existing series
_logger.LogDebug("[ScannerService] Updating existing series");
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{
_logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount);
foreach (var s in missingSeries)
{
_logger.LogDebug("Removed {SeriesName} ({Format})", s.Name, s.Format);
}
if (chunkInfo.TotalChunks == 0) continue;
totalTime += stopwatch.ElapsedMilliseconds;
stopwatch.Restart();
_logger.LogDebug($"[ScannerService] Processing chunk {chunk} / {chunkInfo.TotalChunks} with size {chunkInfo.ChunkSize} Series ({chunk * chunkInfo.ChunkSize} - {(chunk + 1) * chunkInfo.ChunkSize}");
var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id, new UserParams()
{
PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
// First, remove any series that are not in parsedSeries list
var missingSeries = FindSeriesNotOnDisk(nonLibrarySeries, parsedSeries).ToList();
foreach (var missing in missingSeries)
{
_unitOfWork.SeriesRepository.Remove(missing);
}
var cleanedSeries = RemoveMissingSeries(nonLibrarySeries, missingSeries, out var removeCount);
if (removeCount > 0)
{
_logger.LogInformation("[ScannerService] Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount);
foreach (var s in missingSeries)
{
_logger.LogDebug("[ScannerService] Removed {SeriesName} ({Format})", s.Name, s.Format);
}
}
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
var librarySeries = cleanedSeries.ToList();
Parallel.ForEach(librarySeries, (series) =>
{
UpdateSeries(series, parsedSeries);
});
await _unitOfWork.CommitAsync();
_logger.LogInformation(
"[ScannerService] Processed {SeriesStart} - {SeriesEnd} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, totalTime, library.Name);
// Emit any series removed
foreach (var missing in missingSeries)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesRemoved, MessageFactory.SeriesRemovedEvent(missing.Id, missing.Name, library.Id));
}
var progress = Math.Max(0, Math.Min(100, ((chunk + 1F) * chunkInfo.ChunkSize) / chunkInfo.TotalSize));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(library.Id, progress));
}
// Add new series that have parsedInfos
_logger.LogDebug("[ScannerService] Adding new series");
var newSeries = new List<Series>();
var allSeries = (await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(library.Id)).ToList();
foreach (var (key, infos) in parsedSeries)
{
// Key is normalized already
Series existingSeries;
try
{
existingSeries = library.Series.SingleOrDefault(s =>
(s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName)
&& (s.Format == key.Format || s.Format == MangaFormat.Unknown));
}
catch (Exception e)
{
_logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key.NormalizedName);
var duplicateSeries = library.Series.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
foreach (var series in duplicateSeries)
{
_logger.LogCritical("{Key} maps with {Series}", key.Name, series.OriginalName);
}
Series existingSeries;
try
{
existingSeries = allSeries.SingleOrDefault(s =>
(s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName)
&& (s.Format == key.Format || s.Format == MangaFormat.Unknown));
}
catch (Exception e)
{
_logger.LogCritical(e, "[ScannerService] There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it. This will be skipped", key.NormalizedName);
var duplicateSeries = allSeries.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
foreach (var series in duplicateSeries)
{
_logger.LogCritical("[ScannerService] Duplicate Series Found: {Key} maps with {Series}", key.Name, series.OriginalName);
}
continue;
}
if (existingSeries == null)
{
existingSeries = DbFactory.Series(infos[0].Series);
existingSeries.Format = key.Format;
library.Series.Add(existingSeries);
}
continue;
}
existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name);
existingSeries.OriginalName ??= infos[0].Series;
existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>());
existingSeries.Format = key.Format;
if (existingSeries != null) continue;
existingSeries = DbFactory.Series(infos[0].Series);
existingSeries.Format = key.Format;
newSeries.Add(existingSeries);
}
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
var librarySeries = library.Series.ToList();
Parallel.ForEach(librarySeries, (series) =>
var i = 0;
foreach(var series in newSeries)
{
try
{
_logger.LogInformation("Processing series {SeriesName}", series.OriginalName);
UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception updating volumes for {SeriesName}", series.Name);
}
});
try
{
_logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName);
UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
series.LibraryId = library.Id; // We have to manually set this since we aren't adding the series to the Library's series.
_unitOfWork.SeriesRepository.Attach(series);
if (await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
// Last step, remove any series that have no pages
library.Series = library.Series.Where(s => s.Pages > 0).ToList();
// Inform UI of new series added
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAdded, MessageFactory.SeriesAddedEvent(series.Id, series.Name, library.Id));
var progress = Math.Max(0F, Math.Min(100F, i * 1F / newSeries.Count));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(library.Id, progress));
}
else
{
// This is probably not needed. Better to catch the exception.
_logger.LogCritical(
"[ScannerService] There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
i++;
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
}
}
_logger.LogDebug(
"[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
}
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
private void UpdateSeries(Series series, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
try
{
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray();
UpdateVolumes(series, parsedInfos);
series.Pages = series.Volumes.Sum(v => v.Pages);
series.NormalizedName = Parser.Parser.Normalize(series.Name);
series.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>());
if (series.Format == MangaFormat.Unknown)
{
series.Format = parsedInfos[0].Format;
}
series.OriginalName ??= parsedInfos[0].Series;
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
}
}
public static IEnumerable<Series> FindSeriesNotOnDisk(IEnumerable<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
var foundSeries = parsedSeries.Select(s => s.Key.Name).ToList();
return existingSeries.Where(es => !es.NameInList(foundSeries) && !SeriesHasMatchingParserInfoFormat(es, parsedSeries));
@ -332,7 +464,7 @@ namespace API.Services.Tasks
/// <param name="missingSeries">Series not found on disk or can't be parsed</param>
/// <param name="removeCount"></param>
/// <returns>the updated existingSeries</returns>
public static ICollection<Series> RemoveMissingSeries(ICollection<Series> existingSeries, IEnumerable<Series> missingSeries, out int removeCount)
public static IList<Series> RemoveMissingSeries(IList<Series> existingSeries, IEnumerable<Series> missingSeries, out int removeCount)
{
var existingCount = existingSeries.Count;
var missingList = missingSeries.ToList();
@ -351,7 +483,7 @@ namespace API.Services.Tasks
var startingVolumeCount = series.Volumes.Count;
// Add new volumes and update chapters per volume
var distinctVolumes = parsedInfos.DistinctVolumes();
_logger.LogDebug("Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
_logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
foreach (var volumeNumber in distinctVolumes)
{
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
@ -359,9 +491,10 @@ namespace API.Services.Tasks
{
volume = DbFactory.Volume(volumeNumber);
series.Volumes.Add(volume);
_unitOfWork.VolumeRepository.Add(volume);
}
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
_logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages);
@ -371,23 +504,26 @@ namespace API.Services.Tasks
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
if (series.Volumes.Count != nonDeletedVolumes.Count)
{
_logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
_logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
(series.Volumes.Count - nonDeletedVolumes.Count), series.Name);
var deletedVolumes = series.Volumes.Except(nonDeletedVolumes);
foreach (var volume in deletedVolumes)
{
var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files";
if (new FileInfo(file).Exists)
{
_logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file);
}
_logger.LogDebug("Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? "";
if (!string.IsNullOrEmpty(file) && File.Exists(file))
{
_logger.LogError(
"[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
file);
}
_logger.LogDebug("[ScannerService] Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
}
series.Volumes = nonDeletedVolumes;
}
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
_logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
series.Name, startingVolumeCount, series.Volumes.Count);
}
@ -417,7 +553,7 @@ namespace API.Services.Tasks
if (chapter == null)
{
_logger.LogDebug(
"Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
"[ScannerService] Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
volume.Chapters.Add(DbFactory.Chapter(info));
}
else
@ -454,7 +590,7 @@ namespace API.Services.Tasks
{
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
{
_logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
_logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
volume.Chapters.Remove(existingChapter);
}
else
@ -470,42 +606,47 @@ namespace API.Services.Tasks
private MangaFile CreateMangaFile(ParserInfo info)
{
switch (info.Format)
MangaFile mangaFile = null;
switch (info.Format)
{
case MangaFormat.Archive:
{
return new MangaFile()
mangaFile = new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath)
};
break;
}
case MangaFormat.Pdf:
case MangaFormat.Epub:
{
return new MangaFile()
mangaFile = new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = _bookService.GetNumberOfPages(info.FullFilePath)
};
break;
}
case MangaFormat.Image:
{
return new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = 1
};
mangaFile = new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = 1
};
break;
}
default:
_logger.LogWarning("[Scanner] Ignoring {Filename}. File type is not supported", info.Filename);
break;
}
return null;
mangaFile?.UpdateLastModified();
return mangaFile;
}
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
@ -515,20 +656,31 @@ namespace API.Services.Tasks
if (existingFile != null)
{
existingFile.Format = info.Format;
if (existingFile.HasFileBeenModified() || existingFile.Pages == 0)
if (!existingFile.HasFileBeenModified() && existingFile.Pages != 0) return;
switch (existingFile.Format)
{
existingFile.Pages = (existingFile.Format == MangaFormat.Epub || existingFile.Format == MangaFormat.Pdf)
? _bookService.GetNumberOfPages(info.FullFilePath)
: _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
case MangaFormat.Epub:
case MangaFormat.Pdf:
existingFile.Pages = _bookService.GetNumberOfPages(info.FullFilePath);
break;
case MangaFormat.Image:
existingFile.Pages = 1;
break;
case MangaFormat.Unknown:
existingFile.Pages = 0;
break;
case MangaFormat.Archive:
existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
break;
}
existingFile.LastModified = File.GetLastWriteTime(info.FullFilePath);
}
else
{
var file = CreateMangaFile(info);
if (file != null)
{
chapter.Files.Add(file);
}
if (file == null) return;
chapter.Files.Add(file);
}
}
}

View file

@ -1,23 +1,52 @@
using System.Threading;
using System;
using API.DTOs.Update;
namespace API.SignalR
{
public static class MessageFactory
{
public static SignalRMessage ScanSeriesEvent(int seriesId)
public static SignalRMessage ScanSeriesEvent(int seriesId, string seriesName)
{
return new SignalRMessage()
{
Name = SignalREvents.ScanSeries,
Body = new
{
SeriesId = seriesId
SeriesId = seriesId,
SeriesName = seriesName
}
};
}
public static SignalRMessage ScanLibraryEvent(int libraryId, string stage)
public static SignalRMessage SeriesAddedEvent(int seriesId, string seriesName, int libraryId)
{
return new SignalRMessage()
{
Name = SignalREvents.SeriesAdded,
Body = new
{
SeriesId = seriesId,
SeriesName = seriesName,
LibraryId = libraryId
}
};
}
public static SignalRMessage SeriesRemovedEvent(int seriesId, string seriesName, int libraryId)
{
return new SignalRMessage()
{
Name = SignalREvents.SeriesRemoved,
Body = new
{
SeriesId = seriesId,
SeriesName = seriesName,
LibraryId = libraryId
}
};
}
public static SignalRMessage ScanLibraryProgressEvent(int libraryId, float progress)
{
return new SignalRMessage()
{
@ -25,11 +54,14 @@ namespace API.SignalR
Body = new
{
LibraryId = libraryId,
Stage = stage
Progress = progress,
EventTime = DateTime.Now
}
};
}
public static SignalRMessage RefreshMetadataEvent(int libraryId, int seriesId)
{
return new SignalRMessage()
@ -52,5 +84,17 @@ namespace API.SignalR
};
}
public static SignalRMessage SeriesAddedToCollection(int tagId, int seriesId)
{
return new SignalRMessage
{
Name = SignalREvents.UpdateVersion,
Body = new
{
TagId = tagId,
SeriesId = seriesId
}
};
}
}
}

View file

@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Extensions;
using API.SignalR.Presence;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.SignalR;
@ -13,8 +15,14 @@ namespace API.SignalR
[Authorize]
public class MessageHub : Hub
{
private readonly IPresenceTracker _tracker;
private static readonly HashSet<string> Connections = new HashSet<string>();
public MessageHub(IPresenceTracker tracker)
{
_tracker = tracker;
}
public static bool IsConnected
{
get
@ -33,6 +41,12 @@ namespace API.SignalR
Connections.Add(Context.ConnectionId);
}
await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync(SignalREvents.OnlineUsers, currentUsers);
await base.OnConnectedAsync();
}
@ -43,6 +57,12 @@ namespace API.SignalR
Connections.Remove(Context.ConnectionId);
}
await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync(SignalREvents.OnlineUsers, currentUsers);
await base.OnDisconnectedAsync(exception);
}
}

View file

@ -1,41 +0,0 @@
using System;
using System.Threading.Tasks;
using API.Extensions;
using API.SignalR.Presence;
using Microsoft.AspNetCore.SignalR;
namespace API.SignalR
{
/// <summary>
/// Keeps track of who is logged into the app
/// </summary>
public class PresenceHub : Hub
{
private readonly IPresenceTracker _tracker;
public PresenceHub(IPresenceTracker tracker)
{
_tracker = tracker;
}
public override async Task OnConnectedAsync()
{
await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync("GetOnlineUsers", currentUsers);
}
public override async Task OnDisconnectedAsync(Exception exception)
{
await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync("GetOnlineUsers", currentUsers);
await base.OnDisconnectedAsync(exception);
}
}
}

View file

@ -6,6 +6,10 @@
public const string ScanSeries = "ScanSeries";
public const string RefreshMetadata = "RefreshMetadata";
public const string ScanLibrary = "ScanLibrary";
public const string SeriesAdded = "SeriesAdded";
public const string SeriesRemoved = "SeriesRemoved";
public const string ScanLibraryProgress = "ScanLibraryProgress";
public const string OnlineUsers = "OnlineUsers";
public const string SeriesAddedToCollection = "SeriesAddedToCollection";
}
}

View file

@ -5,6 +5,8 @@ using System.Linq;
using System.Net;
using System.Net.Sockets;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Repositories;
using API.Middleware;
using API.Services;
using API.Services.HostedServices;
@ -52,8 +54,41 @@ namespace API
services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo { Title = "Kavita API", Version = "v1" });
c.SwaggerDoc("Kavita API", new OpenApiInfo()
{
Description = "Kavita provides a set of APIs that are authenticated by JWT. JWT token can be copied from local storage.",
Title = "Kavita API",
Version = "v1",
});
var filePath = Path.Combine(AppContext.BaseDirectory, "API.xml");
c.IncludeXmlComments(filePath);
c.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme {
In = ParameterLocation.Header,
Description = "Please insert JWT with Bearer into field",
Name = "Authorization",
Type = SecuritySchemeType.ApiKey
});
c.AddSecurityRequirement(new OpenApiSecurityRequirement {
{
new OpenApiSecurityScheme
{
Reference = new OpenApiReference
{
Type = ReferenceType.SecurityScheme,
Id = "Bearer"
}
},
Array.Empty<string>()
}
});
c.AddServer(new OpenApiServer()
{
Description = "Local Server",
Url = "http://localhost:5000/",
});
});
services.AddResponseCompression(options =>
{
@ -88,14 +123,17 @@ namespace API
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env,
IHostApplicationLifetime applicationLifetime)
IHostApplicationLifetime applicationLifetime, IServiceProvider serviceProvider)
{
app.UseMiddleware<ExceptionMiddleware>();
if (env.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI(c => c.SwaggerEndpoint("/swagger/v1/swagger.json", "API v1"));
app.UseSwaggerUI(c =>
{
c.SwaggerEndpoint("/swagger/v1/swagger.json", "Kavita API " + BuildInfo.Version);
});
app.UseHangfireDashboard();
}
@ -124,11 +162,29 @@ namespace API
app.UseDefaultFiles();
// This is not implemented completely. Commenting out until implemented
// var service = serviceProvider.GetRequiredService<IUnitOfWork>();
// var settings = service.SettingsRepository.GetSettingsDto();
// if (!string.IsNullOrEmpty(settings.BaseUrl) && !settings.BaseUrl.Equals("/"))
// {
// var path = !settings.BaseUrl.StartsWith("/")
// ? $"/{settings.BaseUrl}"
// : settings.BaseUrl;
// path = !path.EndsWith("/")
// ? $"{path}/"
// : path;
// app.UsePathBase(path);
// Console.WriteLine("Starting with base url as " + path);
// }
app.UseStaticFiles(new StaticFileOptions
{
ContentTypeProvider = new FileExtensionContentTypeProvider()
});
app.Use(async (context, next) =>
{
context.Response.GetTypedHeaders().CacheControl =
@ -147,7 +203,6 @@ namespace API
{
endpoints.MapControllers();
endpoints.MapHub<MessageHub>("hubs/messages");
endpoints.MapHub<PresenceHub>("hubs/presence");
endpoints.MapHangfireDashboard();
endpoints.MapFallbackToController("Index", "Fallback");
});