Kavita+ Overhaul & New Changelog (#3507)

This commit is contained in:
Joe Milazzo 2025-01-20 08:14:57 -06:00 committed by GitHub
parent d880c1690c
commit a5707617f2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
249 changed files with 14775 additions and 2300 deletions

View file

@ -31,6 +31,10 @@ public interface IDirectoryService
string TemplateDirectory { get; }
string PublisherDirectory { get; }
/// <summary>
/// Used for caching documents that may need to stay on disk for more than a day
/// </summary>
string LongTermCacheDirectory { get; }
/// <summary>
/// Original BookmarkDirectory. Only used for resetting directory. Use <see cref="ServerSettingKey.BackupDirectory"/> for actual path.
/// </summary>
string BookmarkDirectory { get; }
@ -89,6 +93,7 @@ public class DirectoryService : IDirectoryService
public string CustomizedTemplateDirectory { get; }
public string TemplateDirectory { get; }
public string PublisherDirectory { get; }
public string LongTermCacheDirectory { get; }
private readonly ILogger<DirectoryService> _logger;
private const RegexOptions MatchOptions = RegexOptions.Compiled | RegexOptions.IgnoreCase;
@ -126,6 +131,8 @@ public class DirectoryService : IDirectoryService
ExistOrCreate(TemplateDirectory);
PublisherDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "images", "publishers");
ExistOrCreate(PublisherDirectory);
LongTermCacheDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "cache-long");
ExistOrCreate(LongTermCacheDirectory);
}
/// <summary>

View file

@ -8,7 +8,10 @@ using System.Threading.Tasks;
using System.Web;
using API.Data;
using API.DTOs.Email;
using API.Entities;
using API.Services.Plus;
using Kavita.Common;
using Kavita.Common.Extensions;
using MailKit.Security;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Hosting;
@ -29,6 +32,8 @@ internal class EmailOptionsDto
/// Filenames to attach
/// </summary>
public IList<string>? Attachments { get; set; }
public int? ToUserId { get; set; }
public required string Template { get; set; }
}
public interface IEmailService
@ -43,6 +48,9 @@ public interface IEmailService
Task<string> GenerateEmailLink(HttpRequest request, string token, string routePart, string email,
bool withHost = true);
Task<bool> SendTokenExpiredEmail(int userId, ScrobbleProvider provider);
Task<bool> SendTokenExpiringSoonEmail(int userId, ScrobbleProvider provider);
}
public class EmailService : IEmailService
@ -56,6 +64,14 @@ public class EmailService : IEmailService
private const string TemplatePath = @"{0}.html";
private const string LocalHost = "localhost:4200";
public const string SendToDeviceTemplate = "SendToDevice";
public const string EmailTestTemplate = "EmailTest";
public const string EmailChangeTemplate = "EmailChange";
public const string TokenExpirationTemplate = "TokenExpiration";
public const string TokenExpiringSoonTemplate = "TokenExpiringSoon";
public const string EmailConfirmTemplate = "EmailConfirm";
public const string EmailPasswordResetTemplate = "EmailPasswordReset";
public EmailService(ILogger<EmailService> logger, IUnitOfWork unitOfWork, IDirectoryService directoryService,
IHostEnvironment environment, ILocalizationService localizationService)
{
@ -104,12 +120,13 @@ public class EmailService : IEmailService
var emailOptions = new EmailOptionsDto()
{
Subject = "Kavita - Email Test",
Body = UpdatePlaceHolders(await GetEmailBody("EmailTest"), placeholders),
Template = EmailTestTemplate,
Body = UpdatePlaceHolders(await GetEmailBody(EmailTestTemplate), placeholders),
Preheader = "Kavita - Email Test",
ToEmails = new List<string>()
{
adminEmail
}
},
};
await SendEmail(emailOptions);
@ -139,7 +156,8 @@ public class EmailService : IEmailService
var emailOptions = new EmailOptionsDto()
{
Subject = UpdatePlaceHolders("Your email has been changed on {{InvitingUser}}'s Server", placeholders),
Body = UpdatePlaceHolders(await GetEmailBody("EmailChange"), placeholders),
Template = EmailChangeTemplate,
Body = UpdatePlaceHolders(await GetEmailBody(EmailChangeTemplate), placeholders),
Preheader = UpdatePlaceHolders("Your email has been changed on {{InvitingUser}}'s Server", placeholders),
ToEmails = new List<string>()
{
@ -155,9 +173,9 @@ public class EmailService : IEmailService
/// </summary>
/// <param name="email"></param>
/// <returns></returns>
public bool IsValidEmail(string email)
public bool IsValidEmail(string? email)
{
return new EmailAddressAttribute().IsValid(email);
return !string.IsNullOrEmpty(email) && new EmailAddressAttribute().IsValid(email);
}
public async Task<string> GenerateEmailLink(HttpRequest request, string token, string routePart, string email, bool withHost = true)
@ -180,6 +198,66 @@ public class EmailService : IEmailService
.Replace("//", "/");
}
public async Task<bool> SendTokenExpiredEmail(int userId, ScrobbleProvider provider)
{
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (user == null || !IsValidEmail(user.Email) || !settings.IsEmailSetup()) return false;
var placeholders = new List<KeyValuePair<string, string>>
{
new ("{{UserName}}", user.UserName!),
new ("{{Provider}}", provider.ToDescription()),
new ("{{Link}}", $"{settings.HostName}/settings#account" ),
};
var emailOptions = new EmailOptionsDto()
{
Subject = UpdatePlaceHolders("Kavita - Your {{Provider}} token has expired and scrobbling events have stopped", placeholders),
Template = TokenExpirationTemplate,
Body = UpdatePlaceHolders(await GetEmailBody(TokenExpirationTemplate), placeholders),
Preheader = UpdatePlaceHolders("Kavita - Your {{Provider}} token has expired and scrobbling events have stopped", placeholders),
ToEmails = new List<string>()
{
user.Email
}
};
await SendEmail(emailOptions);
return true;
}
public async Task<bool> SendTokenExpiringSoonEmail(int userId, ScrobbleProvider provider)
{
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (user == null || !IsValidEmail(user.Email) || !settings.IsEmailSetup()) return false;
var placeholders = new List<KeyValuePair<string, string>>
{
new ("{{UserName}}", user.UserName!),
new ("{{Provider}}", provider.ToDescription()),
new ("{{Link}}", $"{settings.HostName}/settings#account" ),
};
var emailOptions = new EmailOptionsDto()
{
Subject = UpdatePlaceHolders("Kavita - Your {{Provider}} token will expire soon!", placeholders),
Template = TokenExpiringSoonTemplate,
Body = UpdatePlaceHolders(await GetEmailBody(TokenExpiringSoonTemplate), placeholders),
Preheader = UpdatePlaceHolders("Kavita - Your {{Provider}} token will expire soon!", placeholders),
ToEmails = new List<string>()
{
user.Email
}
};
await SendEmail(emailOptions);
return true;
}
/// <summary>
/// Sends an invite email to a user to setup their account
/// </summary>
@ -195,7 +273,8 @@ public class EmailService : IEmailService
var emailOptions = new EmailOptionsDto()
{
Subject = UpdatePlaceHolders("You've been invited to join {{InvitingUser}}'s Server", placeholders),
Body = UpdatePlaceHolders(await GetEmailBody("EmailConfirm"), placeholders),
Template = EmailConfirmTemplate,
Body = UpdatePlaceHolders(await GetEmailBody(EmailConfirmTemplate), placeholders),
Preheader = UpdatePlaceHolders("You've been invited to join {{InvitingUser}}'s Server", placeholders),
ToEmails = new List<string>()
{
@ -221,8 +300,9 @@ public class EmailService : IEmailService
var emailOptions = new EmailOptionsDto()
{
Subject = UpdatePlaceHolders("A password reset has been requested", placeholders),
Body = UpdatePlaceHolders(await GetEmailBody("EmailPasswordReset"), placeholders),
Preheader = "A password reset has been requested",
Template = EmailPasswordResetTemplate,
Body = UpdatePlaceHolders(await GetEmailBody(EmailPasswordResetTemplate), placeholders),
Preheader = "Email confirmation is required for continued access. Click the button to confirm your email.",
ToEmails = new List<string>()
{
dto.EmailAddress
@ -242,11 +322,9 @@ public class EmailService : IEmailService
{
Subject = "Send file from Kavita",
Preheader = "File(s) sent from Kavita",
ToEmails = new List<string>()
{
data.DestinationEmail
},
Body = await GetEmailBody("SendToDevice"),
ToEmails = [data.DestinationEmail],
Template = SendToDeviceTemplate,
Body = await GetEmailBody(SendToDeviceTemplate),
Attachments = data.FilePaths.ToList()
};
@ -302,21 +380,66 @@ public class EmailService : IEmailService
ServicePointManager.SecurityProtocol = SecurityProtocolType.SystemDefault;
var emailAddress = userEmailOptions.ToEmails[0];
AppUser? user;
if (userEmailOptions.Template == SendToDeviceTemplate)
{
user = await _unitOfWork.UserRepository.GetUserByDeviceEmail(emailAddress);
}
else
{
user = await _unitOfWork.UserRepository.GetUserByEmailAsync(emailAddress);
}
try
{
await smtpClient.SendAsync(email);
if (user != null)
{
await LogEmailHistory(user.Id, userEmailOptions.Template, userEmailOptions.Subject, userEmailOptions.Body, "Sent");
}
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue sending the email");
if (user != null)
{
await LogEmailHistory(user.Id, userEmailOptions.Template, userEmailOptions.Subject, userEmailOptions.Body, "Failed", ex.Message);
}
_logger.LogError("Could not find user on file for email, {Template} email was not sent and not recorded into history table", userEmailOptions.Template);
throw;
}
finally
{
await smtpClient.DisconnectAsync(true);
}
}
/// <summary>
/// Logs email history for the specified user.
/// </summary>
private async Task LogEmailHistory(int appUserId, string emailTemplate, string subject, string body, string deliveryStatus, string? errorMessage = null)
{
var emailHistory = new EmailHistory
{
AppUserId = appUserId,
EmailTemplate = emailTemplate,
Sent = deliveryStatus == "Sent",
Body = body,
Subject = subject,
SendDate = DateTime.UtcNow,
DeliveryStatus = deliveryStatus,
ErrorMessage = errorMessage
};
_unitOfWork.DataContext.EmailHistory.Add(emailHistory);
await _unitOfWork.CommitAsync();
}
private async Task<string> GetTemplatePath(string templateName)
{
if ((await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).SmtpConfig.CustomizedTemplates)

View file

@ -1,12 +1,13 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs;
using API.DTOs.Collection;
using API.DTOs.KavitaPlus.ExternalMetadata;
using API.DTOs.Metadata.Matching;
using API.DTOs.Recommendation;
using API.DTOs.Scrobbling;
using API.DTOs.SeriesDetail;
@ -15,44 +16,24 @@ using API.Entities.Enums;
using API.Entities.Metadata;
using API.Extensions;
using API.Helpers;
using API.SignalR;
using AutoMapper;
using Flurl.Http;
using Hangfire;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
namespace API.Services.Plus;
#nullable enable
/// <summary>
/// Used for matching and fetching metadata on a series
/// </summary>
internal class ExternalMetadataIdsDto
{
public long? MalId { get; set; }
public int? AniListId { get; set; }
public string? SeriesName { get; set; }
public string? LocalizedSeriesName { get; set; }
public MediaFormat? PlusMediaFormat { get; set; } = MediaFormat.Unknown;
}
internal class SeriesDetailPlusApiDto
{
public IEnumerable<MediaRecommendationDto> Recommendations { get; set; }
public IEnumerable<UserReviewDto> Reviews { get; set; }
public IEnumerable<RatingDto> Ratings { get; set; }
public int? AniListId { get; set; }
public long? MalId { get; set; }
}
public interface IExternalMetadataService
{
Task<ExternalSeriesDetailDto?> GetExternalSeriesDetail(int? aniListId, long? malId, int? seriesId);
Task<SeriesDetailPlusDto> GetSeriesDetailPlus(int seriesId, LibraryType libraryType);
Task ForceKavitaPlusRefresh(int seriesId);
Task<SeriesDetailPlusDto?> GetSeriesDetailPlus(int seriesId, LibraryType libraryType);
//Task ForceKavitaPlusRefresh(int seriesId);
Task FetchExternalDataTask();
/// <summary>
/// This is an entry point and provides a level of protection against calling upstream API. Will only allow 100 new
@ -64,6 +45,9 @@ public interface IExternalMetadataService
Task GetNewSeriesData(int seriesId, LibraryType libraryType);
Task<IList<MalStackDto>> GetStacksForUser(int userId);
Task<IList<ExternalSeriesMatchDto>> MatchSeries(MatchSeriesDto dto);
Task FixSeriesMatch(int seriesId, ExternalSeriesDetailDto dto);
Task UpdateSeriesDontMatch(int seriesId, bool dontMatch);
}
public class ExternalMetadataService : IExternalMetadataService
@ -72,9 +56,11 @@ public class ExternalMetadataService : IExternalMetadataService
private readonly ILogger<ExternalMetadataService> _logger;
private readonly IMapper _mapper;
private readonly ILicenseService _licenseService;
private readonly IScrobblingService _scrobblingService;
private readonly IEventHub _eventHub;
private readonly TimeSpan _externalSeriesMetadataCache = TimeSpan.FromDays(30);
public static readonly ImmutableArray<LibraryType> NonEligibleLibraryTypes = ImmutableArray.Create
(LibraryType.Comic, LibraryType.Book, LibraryType.Image, LibraryType.ComicVine);
public static readonly HashSet<LibraryType> NonEligibleLibraryTypes =
[LibraryType.Comic, LibraryType.Book, LibraryType.Image, LibraryType.ComicVine];
private readonly SeriesDetailPlusDto _defaultReturn = new()
{
Recommendations = null,
@ -84,16 +70,17 @@ public class ExternalMetadataService : IExternalMetadataService
// Allow 50 requests per 24 hours
private static readonly RateLimiter RateLimiter = new RateLimiter(50, TimeSpan.FromHours(24), false);
public ExternalMetadataService(IUnitOfWork unitOfWork, ILogger<ExternalMetadataService> logger, IMapper mapper, ILicenseService licenseService)
public ExternalMetadataService(IUnitOfWork unitOfWork, ILogger<ExternalMetadataService> logger, IMapper mapper,
ILicenseService licenseService, IScrobblingService scrobblingService, IEventHub eventHub)
{
_unitOfWork = unitOfWork;
_logger = logger;
_mapper = mapper;
_licenseService = licenseService;
_scrobblingService = scrobblingService;
_eventHub = eventHub;
FlurlHttp.ConfigureClient(Configuration.KavitaPlusApiUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlConfiguration.ConfigureClientForUrl(Configuration.KavitaPlusApiUrl);
}
/// <summary>
@ -110,7 +97,7 @@ public class ExternalMetadataService : IExternalMetadataService
/// This is a task that runs on a schedule and slowly fetches data from Kavita+ to keep
/// data in the DB non-stale and fetched.
/// </summary>
/// <remarks>To avoid blasting Kavita+ API, this only processes a few records. The goal is to slowly build </remarks>
/// <remarks>To avoid blasting Kavita+ API, this only processes 25 records. The goal is to slowly build out/refresh the data</remarks>
/// <returns></returns>
[DisableConcurrentExecution(60 * 60 * 60)]
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
@ -138,21 +125,24 @@ public class ExternalMetadataService : IExternalMetadataService
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task ForceKavitaPlusRefresh(int seriesId)
{
if (!await _licenseService.HasActiveLicense()) return;
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeBySeriesIdAsync(seriesId);
if (!IsPlusEligible(libraryType)) return;
// Remove from Blacklist if applicable
await _unitOfWork.ExternalSeriesMetadataRepository.RemoveFromBlacklist(seriesId);
var metadata = await _unitOfWork.ExternalSeriesMetadataRepository.GetExternalSeriesMetadata(seriesId);
if (metadata == null) return;
metadata.ValidUntilUtc = DateTime.UtcNow.Subtract(_externalSeriesMetadataCache);
await _unitOfWork.CommitAsync();
}
// public async Task ForceKavitaPlusRefresh(int seriesId)
// {
// // TODO: I think we can remove this now
// if (!await _licenseService.HasActiveLicense()) return;
// var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeBySeriesIdAsync(seriesId);
// if (!IsPlusEligible(libraryType)) return;
//
// // Remove from Blacklist if applicable
// var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
// series!.IsBlacklisted = false;
// _unitOfWork.SeriesRepository.Update(series);
//
// var metadata = await _unitOfWork.ExternalSeriesMetadataRepository.GetExternalSeriesMetadata(seriesId);
// if (metadata == null) return;
//
// metadata.ValidUntilUtc = DateTime.UtcNow.Subtract(_externalSeriesMetadataCache);
// await _unitOfWork.CommitAsync();
// }
/// <summary>
/// Fetches data from Kavita+
@ -198,13 +188,7 @@ public class ExternalMetadataService : IExternalMetadataService
var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value;
var result = await ($"{Configuration.KavitaPlusApiUrl}/api/metadata/v2/stacks?username={user.MalUserName}")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.GetJsonAsync<IList<MalStackDto>>();
if (result == null)
@ -221,6 +205,72 @@ public class ExternalMetadataService : IExternalMetadataService
}
}
/// <summary>
/// Returns the match results for a Series from UI Flow
/// </summary>
/// <param name="dto"></param>
/// <returns></returns>
public async Task<IList<ExternalSeriesMatchDto>> MatchSeries(MatchSeriesDto dto)
{
var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value;
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(dto.SeriesId,
SeriesIncludes.Metadata | SeriesIncludes.ExternalMetadata);
var potentialAnilistId = ScrobblingService.ExtractId<int?>(dto.Query, ScrobblingService.AniListWeblinkWebsite);
var potentialMalId = ScrobblingService.ExtractId<long?>(dto.Query, ScrobblingService.MalWeblinkWebsite);
List<string> altNames = [series.LocalizedName, series.OriginalName];
if (potentialAnilistId == null && potentialMalId == null && !string.IsNullOrEmpty(dto.Query))
{
altNames.Add(dto.Query);
}
var matchRequest = new MatchSeriesRequestDto()
{
Format = series.Format == MangaFormat.Epub ? PlusMediaFormat.LightNovel : PlusMediaFormat.Manga,
Query = dto.Query,
SeriesName = series.Name,
AlternativeNames = altNames,
Year = series.Metadata.ReleaseYear,
AniListId = potentialAnilistId ?? ScrobblingService.GetAniListId(series),
MalId = potentialMalId ?? ScrobblingService.GetMalId(series),
};
try
{
var results = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/match-series")
.WithKavitaPlusHeaders(license)
.PostJsonAsync(matchRequest)
.ReceiveJson<IList<ExternalSeriesMatchDto>>();
// Some summaries can contain multiple <br/>s, we need to ensure it's only 1
foreach (var result in results)
{
result.Series.Summary = CleanSummary(result.Series.Summary);
}
return results;
}
catch (Exception ex)
{
_logger.LogError(ex, "An error happened during the request to Kavita+ API");
}
return ArraySegment<ExternalSeriesMatchDto>.Empty;
}
private static string CleanSummary(string? summary)
{
if (string.IsNullOrWhiteSpace(summary))
{
return string.Empty; // Return as is if null, empty, or whitespace.
}
return summary.Replace("<br/>", string.Empty);
}
/// <summary>
/// Retrieves Metadata about a Recommended External Series
/// </summary>
@ -249,16 +299,18 @@ public class ExternalMetadataService : IExternalMetadataService
/// Returns Series Detail data from Kavita+ - Review, Recs, Ratings
/// </summary>
/// <param name="seriesId"></param>
/// <param name="libraryType"></param>
/// <returns></returns>
public async Task<SeriesDetailPlusDto> GetSeriesDetailPlus(int seriesId, LibraryType libraryType)
public async Task<SeriesDetailPlusDto?> GetSeriesDetailPlus(int seriesId, LibraryType libraryType)
{
if (!IsPlusEligible(libraryType) || !await _licenseService.HasActiveLicense()) return _defaultReturn;
// Check blacklist (bad matches)
if (await _unitOfWork.ExternalSeriesMetadataRepository.IsBlacklistedSeries(seriesId)) return _defaultReturn;
// Check blacklist (bad matches) or if there is a don't match
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
if (series == null || !series.WillScrobble()) return _defaultReturn;
var needsRefresh =
await _unitOfWork.ExternalSeriesMetadataRepository.ExternalSeriesMetadataNeedsRefresh(seriesId);
await _unitOfWork.ExternalSeriesMetadataRepository.NeedsDataRefresh(seriesId);
if (!needsRefresh)
{
@ -266,28 +318,105 @@ public class ExternalMetadataService : IExternalMetadataService
return await _unitOfWork.ExternalSeriesMetadataRepository.GetSeriesDetailPlusDto(seriesId);
}
var data = await _unitOfWork.SeriesRepository.GetPlusSeriesDto(seriesId);
if (data == null) return _defaultReturn;
// Get from Kavita+ API the Full Series metadata with rec/rev and cache to ExternalMetadata tables
return await FetchExternalMetadataForSeries(seriesId, libraryType, data);
}
/// <summary>
/// This will override any sort of matching that was done prior and force it to be what the user Selected
/// </summary>
/// <param name="seriesId"></param>
/// <param name="dto"></param>
public async Task FixSeriesMatch(int seriesId, ExternalSeriesDetailDto dto)
{
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Library);
if (series == null) return;
// Remove from Blacklist
series.IsBlacklisted = false;
series.DontMatch = false;
_unitOfWork.SeriesRepository.Update(series);
// Refetch metadata with a Direct lookup
await FetchExternalMetadataForSeries(seriesId, series.Library.Type, new PlusSeriesDto()
{
SeriesName = dto.Name,
AniListId = dto.AniListId,
MalId = dto.MALId,
MediaFormat = dto.PlusMediaFormat,
});
// Find all scrobble events and rewrite them to be the correct
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
_unitOfWork.ScrobbleRepository.Remove(events);
await _unitOfWork.CommitAsync();
// Regenerate all events for the series for all users
BackgroundJob.Enqueue(() => _scrobblingService.CreateEventsFromExistingHistoryForSeries(seriesId));
await _eventHub.SendMessageAsync(MessageFactory.Info,
MessageFactory.InfoEvent($"Fix Match: {series.Name}", "Scrobble Events are regenerating with the new match"));
_logger.LogInformation("Matched {SeriesName} with Kavita+ Series {MatchSeriesName}", series.Name, dto.Name);
}
/// <summary>
/// Sets a series to Dont Match and removes all previously cached
/// </summary>
/// <param name="seriesId"></param>
public async Task UpdateSeriesDontMatch(int seriesId, bool dontMatch)
{
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.ExternalMetadata);
if (series == null) return;
_logger.LogInformation("User has asked Kavita to stop matching/scrobbling on {SeriesName}", series.Name);
series.DontMatch = dontMatch;
if (dontMatch)
{
// When we set as DontMatch, we will clear existing External Metadata
var externalSeriesMetadata = await GetOrCreateExternalSeriesMetadataForSeries(seriesId, series!);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(series.ExternalSeriesMetadata);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(externalSeriesMetadata.ExternalReviews);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(externalSeriesMetadata.ExternalRatings);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(externalSeriesMetadata.ExternalRecommendations);
}
_unitOfWork.SeriesRepository.Update(series);
await _unitOfWork.CommitAsync();
}
/// <summary>
/// Requests the full SeriesDetail (rec, review, metadata) data for a Series. Will save to ExternalMetadata tables.
/// </summary>
/// <param name="seriesId"></param>
/// <param name="libraryType"></param>
/// <param name="data"></param>
/// <returns></returns>
private async Task<SeriesDetailPlusDto> FetchExternalMetadataForSeries(int seriesId, LibraryType libraryType, PlusSeriesDto data)
{
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
if (series == null) return _defaultReturn;
try
{
var data = await _unitOfWork.SeriesRepository.GetPlusSeriesDto(seriesId);
if (data == null) return _defaultReturn;
_logger.LogDebug("Fetching Kavita+ Series Detail data for {SeriesName}", data.SeriesName);
var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value;
var result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.PostJsonAsync(data)
.ReceiveJson<SeriesDetailPlusApiDto>();
// Clear out existing results
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
var externalSeriesMetadata = await GetExternalSeriesMetadataForSeries(seriesId, series!);
var externalSeriesMetadata = await GetOrCreateExternalSeriesMetadataForSeries(seriesId, series!);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(externalSeriesMetadata.ExternalReviews);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(externalSeriesMetadata.ExternalRatings);
_unitOfWork.ExternalSeriesMetadataRepository.Remove(externalSeriesMetadata.ExternalRecommendations);
@ -339,17 +468,24 @@ public class ExternalMetadataService : IExternalMetadataService
}
catch (Exception ex)
{
_logger.LogError(ex, "An error happened during the request to Kavita+ API");
_logger.LogError(ex, "Unable to fetch external series metadata from Kavita+");
}
// Blacklist the series as it wasn't found in Kavita+
await _unitOfWork.ExternalSeriesMetadataRepository.CreateBlacklistedSeries(seriesId);
series.IsBlacklisted = true;
await _unitOfWork.CommitAsync();
return _defaultReturn;
}
private async Task<ExternalSeriesMetadata> GetExternalSeriesMetadataForSeries(int seriesId, Series series)
/// <summary>
/// Gets from DB or creates a new one with just SeriesId
/// </summary>
/// <param name="seriesId"></param>
/// <param name="series"></param>
/// <returns></returns>
private async Task<ExternalSeriesMetadata> GetOrCreateExternalSeriesMetadataForSeries(int seriesId, Series series)
{
var externalSeriesMetadata = await _unitOfWork.ExternalSeriesMetadataRepository.GetExternalSeriesMetadata(seriesId);
if (externalSeriesMetadata != null) return externalSeriesMetadata;
@ -454,20 +590,14 @@ public class ExternalMetadataService : IExternalMetadataService
}
payload.SeriesName = series.Name;
payload.LocalizedSeriesName = series.LocalizedName;
payload.PlusMediaFormat = ConvertToMediaFormat(series.Library.Type, series.Format);
payload.PlusMediaFormat = series.Library.Type.ConvertToPlusMediaFormat(series.Format);
}
}
try
{
return await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-by-ids")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.PostJsonAsync(payload)
.ReceiveJson<ExternalSeriesDetailDto>();
@ -479,16 +609,4 @@ public class ExternalMetadataService : IExternalMetadataService
return null;
}
private static MediaFormat ConvertToMediaFormat(LibraryType libraryType, MangaFormat seriesFormat)
{
return libraryType switch
{
LibraryType.Manga => seriesFormat == MangaFormat.Epub ? MediaFormat.LightNovel : MediaFormat.Manga,
LibraryType.Comic => MediaFormat.Comic,
LibraryType.Book => MediaFormat.Book,
LibraryType.LightNovel => MediaFormat.LightNovel,
_ => MediaFormat.Unknown
};
}
}

View file

@ -1,10 +1,13 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using API.Constants;
using API.Data;
using API.DTOs.Account;
using API.DTOs.License;
using API.DTOs.KavitaPlus.License;
using API.Entities.Enums;
using API.Extensions;
using API.Services.Tasks;
using EasyCaching.Core;
using Flurl.Http;
using Kavita.Common;
@ -29,17 +32,20 @@ public interface ILicenseService
Task<bool> HasActiveLicense(bool forceCheck = false);
Task<bool> HasActiveSubscription(string? license);
Task<bool> ResetLicense(string license, string email);
Task<LicenseInfoDto?> GetLicenseInfo(bool forceCheck = false);
}
public class LicenseService(
IEasyCachingProviderFactory cachingProviderFactory,
IUnitOfWork unitOfWork,
ILogger<LicenseService> logger)
ILogger<LicenseService> logger,
IVersionUpdaterService versionUpdaterService)
: ILicenseService
{
private readonly TimeSpan _licenseCacheTimeout = TimeSpan.FromHours(8);
public const string Cron = "0 */4 * * *";
public const string Cron = "0 */9 * * *";
private const string CacheKey = "license";
private const string LicenseInfoCacheKey = "license-info";
/// <summary>
@ -53,13 +59,7 @@ public class LicenseService(
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/license/check")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.PostJsonAsync(new LicenseValidDto()
{
License = license,
@ -87,13 +87,7 @@ public class LicenseService(
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/license/register")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.PostJsonAsync(new EncryptLicenseDto()
{
License = license.Trim(),
@ -118,36 +112,6 @@ public class LicenseService(
}
}
/// <summary>
/// Checks licenses and updates cache
/// </summary>
/// <remarks>Expected to be called at startup and on reoccurring basis</remarks>
// public async Task ValidateLicenseStatus()
// {
// var provider = _cachingProviderFactory.GetCachingProvider(EasyCacheProfiles.License);
// try
// {
// var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
// if (string.IsNullOrEmpty(license.Value)) {
// await provider.SetAsync(CacheKey, false, _licenseCacheTimeout);
// return;
// }
//
// _logger.LogInformation("Validating Kavita+ License");
//
// await provider.FlushAsync();
// var isValid = await IsLicenseValid(license.Value);
// await provider.SetAsync(CacheKey, isValid, _licenseCacheTimeout);
//
// _logger.LogInformation("Validating Kavita+ License - Complete");
// }
// catch (Exception ex)
// {
// _logger.LogError(ex, "There was an error talking with Kavita+ API for license validation. Rescheduling check in 30 mins");
// await provider.SetAsync(CacheKey, false, _licenseCacheTimeout);
// BackgroundJob.Schedule(() => ValidateLicenseStatus(), TimeSpan.FromMinutes(30));
// }
// }
/// <summary>
/// Checks licenses and updates cache
@ -192,13 +156,7 @@ public class LicenseService(
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/license/check-sub")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.PostJsonAsync(new LicenseValidDto()
{
License = license,
@ -230,6 +188,8 @@ public class LicenseService(
var provider = cachingProviderFactory.GetCachingProvider(EasyCacheProfiles.License);
await provider.RemoveAsync(CacheKey);
}
public async Task AddLicense(string license, string email, string? discordId)
@ -251,13 +211,7 @@ public class LicenseService(
{
var encryptedLicense = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
var response = await (Configuration.KavitaPlusApiUrl + "/api/license/reset")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", encryptedLicense.Value)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(encryptedLicense.Value)
.PostJsonAsync(new ResetLicenseDto()
{
License = license.Trim(),
@ -283,4 +237,67 @@ public class LicenseService(
return false;
}
/// <summary>
/// Fetches information about the license from Kavita+. If there is no license or an exception, will return null and can be assumed it is not active
/// </summary>
/// <param name="forceCheck"></param>
/// <returns></returns>
public async Task<LicenseInfoDto?> GetLicenseInfo(bool forceCheck = false)
{
// Check if there is a license
var hasLicense =
!string.IsNullOrEmpty((await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey))
.Value);
if (!hasLicense) return null;
// Check the cache
var licenseInfoProvider = cachingProviderFactory.GetCachingProvider(EasyCacheProfiles.LicenseInfo);
if (!forceCheck)
{
var cacheValue = await licenseInfoProvider.GetAsync<LicenseInfoDto>(LicenseInfoCacheKey);
if (cacheValue.HasValue) return cacheValue.Value;
}
try
{
var encryptedLicense = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
var response = await (Configuration.KavitaPlusApiUrl + "/api/license/info")
.WithKavitaPlusHeaders(encryptedLicense.Value)
.GetJsonAsync<LicenseInfoDto>();
// This indicates a mismatch on installId or no active subscription
if (response == null) return null;
// Ensure that current version is within the 3 version limit. Don't count Nightly releases or Hotfixes
var releases = await versionUpdaterService.GetAllReleases();
response.IsValidVersion = releases
.Where(r => !r.UpdateTitle.Contains("Hotfix")) // We don't care about Hotfix releases
.Where(r => !r.IsPrerelease || BuildInfo.Version.IsWithinStableRelease(new Version(r.UpdateVersion))) // Ensure we don't take current nightlies within the current/last stable
.Take(3)
.All(r => new Version(r.UpdateVersion) <= BuildInfo.Version);
response.HasLicense = hasLicense;
// Cache if the license is valid here as well
var licenseProvider = cachingProviderFactory.GetCachingProvider(EasyCacheProfiles.License);
await licenseProvider.SetAsync(CacheKey, response.IsActive, _licenseCacheTimeout);
// Cache the license info if IsActive and ExpirationDate > DateTime.UtcNow + 2
if (response.IsActive && response.ExpirationDate > DateTime.UtcNow.AddDays(2))
{
await licenseInfoProvider.SetAsync(LicenseInfoCacheKey, response, _licenseCacheTimeout);
}
return response;
}
catch (FlurlHttpException e)
{
logger.LogError(e, "An error happened during the request to Kavita+ API");
}
return null;
}
}

View file

@ -1,122 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs;
using API.DTOs.Recommendation;
using API.DTOs.Scrobbling;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Builders;
using Flurl.Http;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
namespace API.Services.Plus;
#nullable enable
public interface IRecommendationService
{
//Task<RecommendationDto> GetRecommendationsForSeries(int userId, int seriesId);
}
public class RecommendationService : IRecommendationService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<RecommendationService> _logger;
public RecommendationService(IUnitOfWork unitOfWork, ILogger<RecommendationService> logger)
{
_unitOfWork = unitOfWork;
_logger = logger;
FlurlHttp.ConfigureClient(Configuration.KavitaPlusApiUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
}
public async Task<RecommendationDto> GetRecommendationsForSeries(int userId, int seriesId)
{
var series =
await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId,
SeriesIncludes.Metadata | SeriesIncludes.Library | SeriesIncludes.Volumes | SeriesIncludes.Chapters);
if (series == null || series.Library.Type == LibraryType.Comic) return new RecommendationDto();
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var canSeeExternalSeries = user is {AgeRestriction: AgeRating.NotApplicable} &&
await _unitOfWork.UserRepository.IsUserAdminAsync(user);
var recDto = new RecommendationDto()
{
ExternalSeries = new List<ExternalSeriesDto>(),
OwnedSeries = new List<SeriesDto>()
};
var recs = await GetRecommendations(license.Value, series);
foreach (var rec in recs)
{
// Find the series based on name and type and that the user has access too
var seriesForRec = await _unitOfWork.SeriesRepository.GetSeriesDtoByNamesAndMetadataIds(rec.RecommendationNames,
series.Library.Type, ScrobblingService.CreateUrl(ScrobblingService.AniListWeblinkWebsite, rec.AniListId),
ScrobblingService.CreateUrl(ScrobblingService.MalWeblinkWebsite, rec.MalId));
if (seriesForRec != null)
{
recDto.OwnedSeries.Add(seriesForRec);
continue;
}
if (!canSeeExternalSeries) continue;
// We can show this based on user permissions
if (string.IsNullOrEmpty(rec.Name) || string.IsNullOrEmpty(rec.SiteUrl) || string.IsNullOrEmpty(rec.CoverUrl)) continue;
recDto.ExternalSeries.Add(new ExternalSeriesDto()
{
Name = string.IsNullOrEmpty(rec.Name) ? rec.RecommendationNames.First() : rec.Name,
Url = rec.SiteUrl,
CoverUrl = rec.CoverUrl,
Summary = rec.Summary,
AniListId = rec.AniListId,
MalId = rec.MalId
});
}
await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, recDto.OwnedSeries);
recDto.OwnedSeries = recDto.OwnedSeries.DistinctBy(s => s.Id).OrderBy(r => r.Name).ToList();
recDto.ExternalSeries = recDto.ExternalSeries.DistinctBy(s => s.Name.ToNormalized()).OrderBy(r => r.Name).ToList();
return recDto;
}
protected async Task<IEnumerable<MediaRecommendationDto>> GetRecommendations(string license, Series series)
{
try
{
return await (Configuration.KavitaPlusApiUrl + "/api/recommendation")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.PostJsonAsync(new PlusSeriesDtoBuilder(series).Build())
.ReceiveJson<IEnumerable<MediaRecommendationDto>>();
}
catch (Exception e)
{
_logger.LogError(e, "An error happened during the request to Kavita+ API");
}
return new List<MediaRecommendationDto>();
}
}

View file

@ -10,6 +10,7 @@ using API.DTOs.Filtering;
using API.DTOs.Scrobbling;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Metadata;
using API.Entities.Scrobble;
using API.Extensions;
using API.Helpers;
@ -20,6 +21,7 @@ using Hangfire;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Kavita.Common.Helpers;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Services.Plus;
@ -54,6 +56,7 @@ public interface IScrobblingService
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
Task ProcessUpdatesSinceLastSync();
Task CreateEventsFromExistingHistory(int userId = 0);
Task CreateEventsFromExistingHistoryForSeries(int seriesId = 0);
Task ClearEventsForSeries(int userId, int seriesId);
}
@ -64,6 +67,7 @@ public class ScrobblingService : IScrobblingService
private readonly ILogger<ScrobblingService> _logger;
private readonly ILicenseService _licenseService;
private readonly ILocalizationService _localizationService;
private readonly IEmailService _emailService;
public const string AniListWeblinkWebsite = "https://anilist.co/manga/";
public const string MalWeblinkWebsite = "https://myanimelist.net/manga/";
@ -99,16 +103,16 @@ public class ScrobblingService : IScrobblingService
public ScrobblingService(IUnitOfWork unitOfWork, IEventHub eventHub, ILogger<ScrobblingService> logger,
ILicenseService licenseService, ILocalizationService localizationService)
ILicenseService licenseService, ILocalizationService localizationService, IEmailService emailService)
{
_unitOfWork = unitOfWork;
_eventHub = eventHub;
_logger = logger;
_licenseService = licenseService;
_localizationService = localizationService;
_emailService = emailService;
FlurlHttp.ConfigureClient(Configuration.KavitaPlusApiUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlConfiguration.ConfigureClientForUrl(Configuration.KavitaPlusApiUrl);
}
@ -123,13 +127,76 @@ public class ScrobblingService : IScrobblingService
var users = await _unitOfWork.UserRepository.GetAllUsersAsync();
foreach (var user in users)
{
if (string.IsNullOrEmpty(user.AniListAccessToken) || !TokenService.HasTokenExpired(user.AniListAccessToken)) continue;
_logger.LogInformation("User {UserName}'s AniList token has expired! They need to regenerate it for scrobbling to work", user.UserName);
await _eventHub.SendMessageToAsync(MessageFactory.ScrobblingKeyExpired,
MessageFactory.ScrobblingKeyExpiredEvent(ScrobbleProvider.AniList), user.Id);
if (string.IsNullOrEmpty(user.AniListAccessToken)) continue;
var tokenExpiry = JwtHelper.GetTokenExpiry(user.AniListAccessToken);
// Send early reminder 5 days before token expiry
if (await ShouldSendEarlyReminder(user.Id, tokenExpiry))
{
await _emailService.SendTokenExpiringSoonEmail(user.Id, ScrobbleProvider.AniList);
}
// Send expiration notification after token expiry
if (await ShouldSendExpirationReminder(user.Id, tokenExpiry))
{
await _emailService.SendTokenExpiredEmail(user.Id, ScrobbleProvider.AniList);
}
// Check token validity
if (JwtHelper.IsTokenValid(user.AniListAccessToken)) continue;
_logger.LogInformation(
"User {UserName}'s AniList token has expired or is expiring in a few days! They need to regenerate it for scrobbling to work",
user.UserName);
// Notify user via event
await _eventHub.SendMessageToAsync(
MessageFactory.ScrobblingKeyExpired,
MessageFactory.ScrobblingKeyExpiredEvent(ScrobbleProvider.AniList),
user.Id);
}
}
/// <summary>
/// Checks if an early reminder email should be sent.
/// </summary>
private async Task<bool> ShouldSendEarlyReminder(int userId, DateTime tokenExpiry)
{
var earlyReminderDate = tokenExpiry.AddDays(-5);
if (earlyReminderDate <= DateTime.UtcNow)
{
var hasAlreadySentReminder = await _unitOfWork.DataContext.EmailHistory
.AnyAsync(h => h.AppUserId == userId && h.Sent &&
h.EmailTemplate == EmailService.TokenExpiringSoonTemplate &&
h.SendDate >= earlyReminderDate);
return !hasAlreadySentReminder;
}
return false;
}
/// <summary>
/// Checks if an expiration notification email should be sent.
/// </summary>
private async Task<bool> ShouldSendExpirationReminder(int userId, DateTime tokenExpiry)
{
if (tokenExpiry <= DateTime.UtcNow)
{
var hasAlreadySentExpirationEmail = await _unitOfWork.DataContext.EmailHistory
.AnyAsync(h => h.AppUserId == userId && h.Sent &&
h.EmailTemplate == EmailService.TokenExpirationTemplate &&
h.SendDate >= tokenExpiry);
return !hasAlreadySentExpirationEmail;
}
return false;
}
public async Task<bool> HasTokenExpired(int userId, ScrobbleProvider provider)
{
var token = await GetTokenForProvider(userId, provider);
@ -156,13 +223,7 @@ public class ScrobblingService : IScrobblingService
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/valid-key?provider=" + provider + "&key=" + token)
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license.Value)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license.Value)
.GetStringAsync();
return bool.Parse(response);
@ -230,7 +291,7 @@ public class ScrobblingService : IScrobblingService
AniListId = ExtractId<int?>(series.Metadata.WebLinks, AniListWeblinkWebsite),
MalId = GetMalId(series),
AppUserId = userId,
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
Format = series.Library.Type.ConvertToPlusMediaFormat(series.Format),
ReviewBody = reviewBody,
ReviewTitle = reviewTitle
};
@ -277,7 +338,7 @@ public class ScrobblingService : IScrobblingService
AniListId = GetAniListId(series),
MalId = GetMalId(series),
AppUserId = userId,
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
Format = series.Library.Type.ConvertToPlusMediaFormat(series.Format),
Rating = rating
};
_unitOfWork.ScrobbleRepository.Attach(evt);
@ -285,16 +346,16 @@ public class ScrobblingService : IScrobblingService
_logger.LogDebug("Added Scrobbling Rating update on {SeriesName} with Userid {UserId}", series.Name, userId);
}
private static long? GetMalId(Series series)
public static long? GetMalId(Series series)
{
var malId = ExtractId<long?>(series.Metadata.WebLinks, MalWeblinkWebsite);
return malId ?? series.ExternalSeriesMetadata.MalId;
return malId ?? series.ExternalSeriesMetadata?.MalId;
}
private static int? GetAniListId(Series series)
public static int? GetAniListId(Series seriesWithExternalMetadata)
{
var aniListId = ExtractId<int?>(series.Metadata.WebLinks, AniListWeblinkWebsite);
return aniListId ?? series.ExternalSeriesMetadata.AniListId;
var aniListId = ExtractId<int?>(seriesWithExternalMetadata.Metadata.WebLinks, AniListWeblinkWebsite);
return aniListId ?? seriesWithExternalMetadata.ExternalSeriesMetadata?.AniListId;
}
public async Task ScrobbleReadingUpdate(int userId, int seriesId)
@ -340,7 +401,7 @@ public class ScrobblingService : IScrobblingService
(int) await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(seriesId, userId),
ChapterNumber =
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(seriesId, userId),
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
Format = series.Library.Type.ConvertToPlusMediaFormat(series.Format),
};
_unitOfWork.ScrobbleRepository.Attach(evt);
@ -360,8 +421,8 @@ public class ScrobblingService : IScrobblingService
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Metadata | SeriesIncludes.Library | SeriesIncludes.ExternalMetadata);
if (series == null) throw new KavitaException(await _localizationService.Translate(userId, "series-doesnt-exist"));
_logger.LogInformation("Processing Scrobbling want-to-read event for {UserId} on {SeriesName}", userId, series.Name);
if (await CheckIfCannotScrobble(userId, seriesId, series)) return;
_logger.LogInformation("Processing Scrobbling want-to-read event for {UserId} on {SeriesName}", userId, series.Name);
var existing = await _unitOfWork.ScrobbleRepository.Exists(userId, series.Id,
onWantToRead ? ScrobbleEventType.AddWantToRead : ScrobbleEventType.RemoveWantToRead);
@ -375,7 +436,7 @@ public class ScrobblingService : IScrobblingService
AniListId = GetAniListId(series),
MalId = GetMalId(series),
AppUserId = userId,
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
Format = series.Library.Type.ConvertToPlusMediaFormat(series.Format),
};
_unitOfWork.ScrobbleRepository.Attach(evt);
await _unitOfWork.CommitAsync();
@ -384,6 +445,7 @@ public class ScrobblingService : IScrobblingService
private async Task<bool> CheckIfCannotScrobble(int userId, int seriesId, Series series)
{
if (series.DontMatch) return true;
if (await _unitOfWork.UserRepository.HasHoldOnSeries(userId, seriesId))
{
_logger.LogInformation("Series {SeriesName} is on UserId {UserId}'s hold list. Not scrobbling", series.Name,
@ -403,13 +465,7 @@ public class ScrobblingService : IScrobblingService
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/rate-limit?accessToken=" + aniListToken)
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.GetStringAsync();
return int.Parse(response);
@ -427,13 +483,7 @@ public class ScrobblingService : IScrobblingService
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/update")
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.PostJsonAsync(data)
.ReceiveJson<ScrobbleResponseDto>();
@ -463,9 +513,18 @@ public class ScrobblingService : IScrobblingService
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series"))
{
// Log the Series name and Id in ScrobbleErrors
_logger.LogInformation("Kavita+ was unable to match the series");
_logger.LogInformation("Kavita+ was unable to match the series: {SeriesName}", evt.Series.Name);
if (!await _unitOfWork.ScrobbleRepository.HasErrorForSeries(evt.SeriesId))
{
// Create a new ExternalMetadata entry to indicate that this is not matchable
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(evt.SeriesId, SeriesIncludes.ExternalMetadata);
if (series.ExternalSeriesMetadata == null)
{
series.ExternalSeriesMetadata = new ExternalSeriesMetadata() {SeriesId = evt.SeriesId};
}
series!.IsBlacklisted = true;
_unitOfWork.SeriesRepository.Update(series);
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
{
Comment = UnknownSeriesErrorMessage,
@ -473,7 +532,7 @@ public class ScrobblingService : IScrobblingService
LibraryId = evt.LibraryId,
SeriesId = evt.SeriesId
});
await _unitOfWork.ExternalSeriesMetadataRepository.CreateBlacklistedSeries(evt.SeriesId, false);
}
evt.IsErrored = true;
@ -501,7 +560,7 @@ public class ScrobblingService : IScrobblingService
}
catch (FlurlHttpException ex)
{
_logger.LogError("Scrobbling to Kavita+ API failed due to error: {ErrorMessage}", ex.Message);
_logger.LogError(ex, "Scrobbling to Kavita+ API failed due to error: {ErrorMessage}", ex.Message);
if (ex.Message.Contains("Call failed with status code 500 (Internal Server Error)"))
{
if (!await _unitOfWork.ScrobbleRepository.HasErrorForSeries(evt.SeriesId))
@ -523,11 +582,19 @@ public class ScrobblingService : IScrobblingService
}
/// <summary>
/// This will back fill events from existing progress history, ratings, and want to read for users that have a valid license
/// This will backfill events from existing progress history, ratings, and want to read for users that have a valid license
/// </summary>
/// <param name="userId">Defaults to 0 meaning all users. Allows a userId to be set if a scrobble key is added to a user</param>
public async Task CreateEventsFromExistingHistory(int userId = 0)
{
if (!await _licenseService.HasActiveLicense()) return;
if (userId != 0)
{
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
if (user == null || string.IsNullOrEmpty(user.AniListAccessToken)) return;
}
var libAllowsScrobbling = (await _unitOfWork.LibraryRepository.GetLibrariesAsync())
.ToDictionary(lib => lib.Id, lib => lib.AllowScrobbling);
@ -535,8 +602,6 @@ public class ScrobblingService : IScrobblingService
.Where(l => userId == 0 || userId == l.Id)
.Select(u => u.Id);
if (!await _licenseService.HasActiveLicense()) return;
foreach (var uId in userIds)
{
var wantToRead = await _unitOfWork.SeriesRepository.GetWantToReadForUserAsync(uId);
@ -553,13 +618,6 @@ public class ScrobblingService : IScrobblingService
await ScrobbleRatingUpdate(uId, rating.SeriesId, rating.Rating);
}
var reviews = await _unitOfWork.UserRepository.GetSeriesWithReviews(uId);
foreach (var review in reviews)
{
if (!libAllowsScrobbling[review.Series.LibraryId]) continue;
await ScrobbleReviewUpdate(uId, review.SeriesId, review.Tagline, review.Review);
}
var seriesWithProgress = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(0, uId,
new UserParams(), new FilterDto()
{
@ -578,7 +636,59 @@ public class ScrobblingService : IScrobblingService
if (series.PagesRead <= 0) continue; // Since we only scrobble when things are higher, we can
await ScrobbleReadingUpdate(uId, series.Id);
}
}
}
public async Task CreateEventsFromExistingHistoryForSeries(int seriesId = 0)
{
if (!await _licenseService.HasActiveLicense()) return;
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
if (series == null) return;
_logger.LogInformation("Creating Scrobbling events for Series {SeriesName}", series.Name);
var libAllowsScrobbling = (await _unitOfWork.LibraryRepository.GetLibrariesAsync())
.ToDictionary(lib => lib.Id, lib => lib.AllowScrobbling);
var userIds = (await _unitOfWork.UserRepository.GetAllUsersAsync())
.Select(u => u.Id);
foreach (var uId in userIds)
{
var wantToRead = await _unitOfWork.SeriesRepository.GetWantToReadForUserAsync(uId);
foreach (var wtr in wantToRead)
{
if (!libAllowsScrobbling[wtr.LibraryId]) continue;
await ScrobbleWantToReadUpdate(uId, wtr.Id, true);
}
var ratings = await _unitOfWork.UserRepository.GetSeriesWithRatings(uId);
foreach (var rating in ratings)
{
if (!libAllowsScrobbling[rating.Series.LibraryId]) continue;
await ScrobbleRatingUpdate(uId, rating.SeriesId, rating.Rating);
}
var seriesWithProgress = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(0, uId,
new UserParams(), new FilterDto()
{
ReadStatus = new ReadStatus()
{
Read = true,
InProgress = true,
NotRead = false
},
Libraries = libAllowsScrobbling.Keys.Where(k => libAllowsScrobbling[k]).ToList(),
SeriesNameQuery = series.Name
});
foreach (var seriesProgress in seriesWithProgress)
{
if (!libAllowsScrobbling[seriesProgress.LibraryId]) continue;
if (seriesProgress.PagesRead <= 0) continue; // Since we only scrobble when things are higher, we can
await ScrobbleReadingUpdate(uId, seriesProgress.Id);
}
}
}
@ -856,7 +966,7 @@ public class ScrobblingService : IScrobblingService
{
if (ex.Message.Contains("Access token is invalid"))
{
_logger.LogCritical("Access Token for UserId: {UserId} needs to be rotated to continue scrobbling", evt.AppUser.Id);
_logger.LogCritical("Access Token for UserId: {UserId} needs to be regenerated/renewed to continue scrobbling", evt.AppUser.Id);
evt.IsErrored = true;
evt.ErrorDetails = AccessTokenErrorMessage;
_unitOfWork.ScrobbleRepository.Update(evt);
@ -956,6 +1066,41 @@ public class ScrobblingService : IScrobblingService
return default(T?);
}
/// <summary>
/// Generate a URL from a given ID and website
/// </summary>
/// <typeparam name="T">Type of the ID (e.g., int, long, string)</typeparam>
/// <param name="id">The ID to embed in the URL</param>
/// <param name="website">The base website URL</param>
/// <returns>The generated URL or null if the website is not supported</returns>
public static string? GenerateUrl<T>(T id, string website)
{
if (!WeblinkExtractionMap.ContainsKey(website))
{
return null; // Unsupported website
}
if (id == null)
{
throw new ArgumentNullException(nameof(id), "ID cannot be null.");
}
// Ensure the type of the ID matches supported types
if (typeof(T) == typeof(int) || typeof(T) == typeof(long) || typeof(T) == typeof(string))
{
return $"{website}{id}";
}
throw new ArgumentException("Unsupported ID type. Supported types are int, long, and string.", nameof(id));
}
public static string CreateUrl(string url, long? id)
{
if (id is null or 0) return string.Empty;
return $"{url}{id}/";
}
private async Task<int> SetAndCheckRateLimit(IDictionary<int, int> userRateLimits, AppUser user, string license)
{
if (string.IsNullOrEmpty(user.AniListAccessToken)) return 0;
@ -982,9 +1127,4 @@ public class ScrobblingService : IScrobblingService
return count;
}
public static string CreateUrl(string url, long? id)
{
if (id is null or 0) return string.Empty;
return $"{url}{id}/";
}
}

View file

@ -6,6 +6,7 @@ using System.Text;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs.KavitaPlus.ExternalMetadata;
using API.DTOs.Scrobbling;
using API.Entities;
using API.Entities.Enums;
@ -20,7 +21,7 @@ using Microsoft.Extensions.Logging;
namespace API.Services.Plus;
#nullable enable
sealed class SeriesCollection
internal sealed class SeriesCollection
{
public required IList<ExternalMetadataIdsDto> Series { get; set; }
public required string Summary { get; set; }
@ -158,7 +159,7 @@ public class SmartCollectionSyncService : ISmartCollectionSyncService
var normalizedLocalizedSeriesName = seriesInfo.LocalizedSeriesName?.ToNormalized();
// Search for existing series in the collection
var formats = GetMangaFormats(seriesInfo.PlusMediaFormat);
var formats = seriesInfo.PlusMediaFormat.GetMangaFormats();
var existingSeries = collection.Items.FirstOrDefault(s =>
(s.Name.ToNormalized() == normalizedSeriesName ||
s.NormalizedName == normalizedSeriesName ||
@ -243,19 +244,7 @@ public class SmartCollectionSyncService : ISmartCollectionSyncService
}
}
private static IList<MangaFormat> GetMangaFormats(MediaFormat? mediaFormat)
{
if (mediaFormat == null) return [MangaFormat.Archive];
return mediaFormat switch
{
MediaFormat.Manga => [MangaFormat.Archive, MangaFormat.Image],
MediaFormat.Comic => [MangaFormat.Archive],
MediaFormat.LightNovel => [MangaFormat.Epub, MangaFormat.Pdf],
MediaFormat.Book => [MangaFormat.Epub, MangaFormat.Pdf],
MediaFormat.Unknown => [MangaFormat.Archive],
_ => [MangaFormat.Archive]
};
}
private static long GetStackId(string url)
{
@ -270,13 +259,7 @@ public class SmartCollectionSyncService : ISmartCollectionSyncService
var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value;
var seriesForStack = await ($"{Configuration.KavitaPlusApiUrl}/api/metadata/v2/stack?stackId=" + stackId)
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
.WithKavitaPlusHeaders(license)
.GetJsonAsync<SeriesCollection>();
return seriesForStack;

View file

@ -0,0 +1,110 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs.Recommendation;
using API.DTOs.SeriesDetail;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using Flurl.Http;
using Hangfire;
using Kavita.Common;
using Microsoft.Extensions.Logging;
using Org.BouncyCastle.Bcpg.Sig;
namespace API.Services.Plus;
public interface IWantToReadSyncService
{
Task Sync();
}
/// <summary>
/// Responsible for syncing Want To Read from upstream providers with Kavita
/// </summary>
public class WantToReadSyncService : IWantToReadSyncService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<WantToReadSyncService> _logger;
private readonly ILicenseService _licenseService;
public WantToReadSyncService(IUnitOfWork unitOfWork, ILogger<WantToReadSyncService> logger, ILicenseService licenseService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_licenseService = licenseService;
}
public async Task Sync()
{
if (!await _licenseService.HasActiveLicense()) return;
var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value;
var users = await _unitOfWork.UserRepository.GetAllUsersAsync(AppUserIncludes.WantToRead);
foreach (var user in users)
{
if (string.IsNullOrEmpty(user.MalUserName) && string.IsNullOrEmpty(user.AniListAccessToken)) continue;
try
{
_logger.LogInformation("Syncing want to read for user: {UserName}", user.UserName);
var wantToReadSeries =
await (
$"{Configuration.KavitaPlusApiUrl}/api/metadata/v2/want-to-read?malUsername={user.MalUserName}&aniListToken={user.AniListAccessToken}")
.WithKavitaPlusHeaders(license)
.WithTimeout(
TimeSpan.FromSeconds(120)) // Give extra time as MAL + AniList can result in a lot of data
.GetJsonAsync<List<ExternalSeriesDetailDto>>();
// Match the series (note: There may be duplicates in the final result)
foreach (var unmatchedSeries in wantToReadSeries)
{
var match = await _unitOfWork.SeriesRepository.MatchSeries(unmatchedSeries);
if (match == null)
{
continue;
}
// There is a match, add it
user.WantToRead.Add(new AppUserWantToRead()
{
SeriesId = match.Id,
});
_logger.LogDebug("Added {MatchName} ({Format}) to Want to Read", match.Name, match.Format);
}
// Remove existing Want to Read that are duplicates
user.WantToRead = user.WantToRead.DistinctBy(d => d.SeriesId).ToList();
// TODO: Need to write in the history table the last sync time
// Save the left over entities
_unitOfWork.UserRepository.Update(user);
await _unitOfWork.CommitAsync();
// Trigger CleanupService to cleanup any series in WantToRead that don't belong
RecurringJob.TriggerJob(TaskScheduler.RemoveFromWantToReadTaskId);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when processing want to read series sync for {User}", user.UserName);
}
}
}
// Allow syncing if there are any libraries that have an appropriate Provider, the user has the appropriate token, and the last Sync validates
// private async Task<bool> CanSync(AppUser? user)
// {
//
// if (collection is not {Source: ScrobbleProvider.Mal}) return false;
// if (string.IsNullOrEmpty(collection.SourceUrl)) return false;
// if (collection.LastSyncUtc.Truncate(TimeSpan.TicksPerHour) >= DateTime.UtcNow.AddDays(SyncDelta).Truncate(TimeSpan.TicksPerHour)) return false;
// return true;
// }
}

View file

@ -6,6 +6,7 @@ using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Converters;
using API.Services.Plus;
using API.Services.Tasks;
@ -60,6 +61,7 @@ public class TaskScheduler : ITaskScheduler
private readonly ILicenseService _licenseService;
private readonly IExternalMetadataService _externalMetadataService;
private readonly ISmartCollectionSyncService _smartCollectionSyncService;
private readonly IWantToReadSyncService _wantToReadSyncService;
private readonly IEventHub _eventHub;
public static BackgroundJobServer Client => new ();
@ -80,6 +82,7 @@ public class TaskScheduler : ITaskScheduler
public const string LicenseCheckId = "license-check";
public const string KavitaPlusDataRefreshId = "kavita+-data-refresh";
public const string KavitaPlusStackSyncId = "kavita+-stack-sync";
public const string KavitaPlusWantToReadSyncId = "kavita+-want-to-read-sync";
public static readonly ImmutableArray<string> ScanTasks =
["ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"];
@ -98,7 +101,8 @@ public class TaskScheduler : ITaskScheduler
ICleanupService cleanupService, IStatsService statsService, IVersionUpdaterService versionUpdaterService,
IThemeService themeService, IWordCountAnalyzerService wordCountAnalyzerService, IStatisticService statisticService,
IMediaConversionService mediaConversionService, IScrobblingService scrobblingService, ILicenseService licenseService,
IExternalMetadataService externalMetadataService, ISmartCollectionSyncService smartCollectionSyncService, IEventHub eventHub)
IExternalMetadataService externalMetadataService, ISmartCollectionSyncService smartCollectionSyncService,
IWantToReadSyncService wantToReadSyncService, IEventHub eventHub)
{
_cacheService = cacheService;
_logger = logger;
@ -117,6 +121,7 @@ public class TaskScheduler : ITaskScheduler
_licenseService = licenseService;
_externalMetadataService = externalMetadataService;
_smartCollectionSyncService = smartCollectionSyncService;
_wantToReadSyncService = wantToReadSyncService;
_eventHub = eventHub;
}
@ -204,12 +209,15 @@ public class TaskScheduler : ITaskScheduler
RecurringJob.AddOrUpdate(CheckScrobblingTokensId, () => _scrobblingService.CheckExternalAccessTokens(),
Cron.Daily, RecurringJobOptions);
BackgroundJob.Enqueue(() => _scrobblingService.CheckExternalAccessTokens()); // We also kick off an immediate check on startup
RecurringJob.AddOrUpdate(LicenseCheckId, () => _licenseService.HasActiveLicense(true),
// Get the License Info (and cache it) on first load. This will internally cache the Github releases for the Version Service
await _licenseService.GetLicenseInfo(true); // Kick this off first to cache it then let it refresh every 9 hours (8 hour cache)
RecurringJob.AddOrUpdate(LicenseCheckId, () => _licenseService.GetLicenseInfo(false),
LicenseService.Cron, RecurringJobOptions);
// KavitaPlus Scrobbling (every 4 hours)
RecurringJob.AddOrUpdate(ProcessScrobblingEventsId, () => _scrobblingService.ProcessUpdatesSinceLastSync(),
"0 */4 * * *", RecurringJobOptions);
"0 */1 * * *", RecurringJobOptions);
RecurringJob.AddOrUpdate(ProcessProcessedScrobblingEventsId, () => _scrobblingService.ClearProcessedEvents(),
Cron.Daily, RecurringJobOptions);
@ -218,8 +226,13 @@ public class TaskScheduler : ITaskScheduler
() => _externalMetadataService.FetchExternalDataTask(), Cron.Daily(Rnd.Next(1, 4)),
RecurringJobOptions);
// This shouldn't be so close to fetching data due to Rate limit concerns
RecurringJob.AddOrUpdate(KavitaPlusStackSyncId,
() => _smartCollectionSyncService.Sync(), Cron.Daily(Rnd.Next(1, 4)),
() => _smartCollectionSyncService.Sync(), Cron.Daily(Rnd.Next(6, 10)),
RecurringJobOptions);
RecurringJob.AddOrUpdate(KavitaPlusWantToReadSyncId,
() => _wantToReadSyncService.Sync(), Cron.Weekly(DayOfWeekHelper.Random()),
RecurringJobOptions);
}

View file

@ -18,6 +18,7 @@ using Microsoft.Extensions.Logging;
using NetVips;
namespace API.Services.Tasks.Metadata;
#nullable enable
public interface ICoverDbService
{
@ -50,6 +51,10 @@ public class CoverDbService : ICoverDbService
{
["https://app.plex.tv"] = "https://plex.tv"
};
/// <summary>
/// Cache of the publisher/favicon list
/// </summary>
private static readonly TimeSpan CacheDuration = TimeSpan.FromDays(1);
public CoverDbService(ILogger<CoverDbService> logger, IDirectoryService directoryService,
IEasyCachingProviderFactory cacheFactory, IHostEnvironment env)
@ -229,7 +234,7 @@ public class CoverDbService : ICoverDbService
_logger.LogTrace("Fetching publisher image from {Url}", personImageLink.Sanitize());
// Download the publisher file using Flurl
// Download the file using Flurl
var personStream = await personImageLink
.AllowHttpStatus("2xx,304")
.GetStreamAsync();
@ -263,7 +268,7 @@ public class CoverDbService : ICoverDbService
private async Task<string> GetCoverPersonImagePath(Person person)
{
var tempFile = Path.Join(_directoryService.TempDirectory, "people.yml");
var tempFile = Path.Join(_directoryService.LongTermCacheDirectory, "people.yml");
// Check if the file already exists and skip download in Development environment
if (File.Exists(tempFile))
@ -286,7 +291,7 @@ public class CoverDbService : ICoverDbService
if (!File.Exists(tempFile))
{
var masterPeopleFile = await $"{NewHost}people/people.yml"
.DownloadFileAsync(_directoryService.TempDirectory);
.DownloadFileAsync(_directoryService.LongTermCacheDirectory);
if (!File.Exists(tempFile) || string.IsNullOrEmpty(masterPeopleFile))
{
@ -307,12 +312,16 @@ public class CoverDbService : ICoverDbService
return $"{NewHost}{coverAuthor.ImagePath}";
}
private static async Task<string> FallbackToKavitaReaderFavicon(string baseUrl)
private async Task<string> FallbackToKavitaReaderFavicon(string baseUrl)
{
const string urlsFileName = "publishers.txt";
var correctSizeLink = string.Empty;
// TODO: Pull this down and store it in temp/ to save on requests
var allOverrides = await $"{NewHost}favicons/urls.txt"
.GetStringAsync();
var allOverrides = await GetCachedData(urlsFileName) ??
await $"{NewHost}favicons/{urlsFileName}".GetStringAsync();
// Cache immediately
await CacheDataAsync(urlsFileName, allOverrides);
if (!string.IsNullOrEmpty(allOverrides))
{
@ -335,11 +344,16 @@ public class CoverDbService : ICoverDbService
return correctSizeLink;
}
private static async Task<string> FallbackToKavitaReaderPublisher(string publisherName)
private async Task<string> FallbackToKavitaReaderPublisher(string publisherName)
{
const string publisherFileName = "publishers.txt";
var externalLink = string.Empty;
// TODO: Pull this down and store it in temp/ to save on requests
var allOverrides = await $"{NewHost}publishers/publishers.txt".GetStringAsync();
var allOverrides = await GetCachedData(publisherFileName) ??
await $"{NewHost}publishers/{publisherFileName}".GetStringAsync();
// Cache immediately
await CacheDataAsync(publisherFileName, allOverrides);
if (!string.IsNullOrEmpty(allOverrides))
{
@ -369,4 +383,35 @@ public class CoverDbService : ICoverDbService
return externalLink;
}
private async Task CacheDataAsync(string fileName, string? content)
{
if (content == null) return;
try
{
var filePath = _directoryService.FileSystem.Path.Join(_directoryService.LongTermCacheDirectory, fileName);
await File.WriteAllTextAsync(filePath, content);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to cache {FileName}", fileName);
}
}
private async Task<string?> GetCachedData(string cacheFile)
{
// Form the full file path:
var filePath = _directoryService.FileSystem.Path.Join(_directoryService.LongTermCacheDirectory, cacheFile);
if (!File.Exists(filePath)) return null;
var fileInfo = new FileInfo(filePath);
if (DateTime.UtcNow - fileInfo.LastWriteTimeUtc <= CacheDuration)
{
return await File.ReadAllTextAsync(filePath);
}
return null;
}
}

View file

@ -325,7 +325,7 @@ public class ProcessSeries : IProcessSeries
var personSw = Stopwatch.StartNew();
var chapterPeople = chapters.SelectMany(c => c.People.Where(p => p.Role == PersonRole.Writer)).ToList();
await UpdateSeriesMetadataPeople(series.Metadata, series.Metadata.People, chapterPeople, PersonRole.Writer);
_logger.LogDebug("[TIME] Kavita took {Time} ms to process writer on Series: {File} for {Count} people", personSw.ElapsedMilliseconds, series.Name, chapterPeople.Count);
_logger.LogTrace("[TIME] Kavita took {Time} ms to process writer on Series: {File} for {Count} people", personSw.ElapsedMilliseconds, series.Name, chapterPeople.Count);
}
if (!series.Metadata.ColoristLocked)
@ -457,7 +457,7 @@ public class ProcessSeries : IProcessSeries
await _unitOfWork.CollectionTagRepository.UpdateCollectionAgeRating(collectionTag);
}
_logger.LogDebug("[TIME] Kavita took {Time} ms to process collections on Series: {Name}", sw.ElapsedMilliseconds, series.Name);
_logger.LogTrace("[TIME] Kavita took {Time} ms to process collections on Series: {Name}", sw.ElapsedMilliseconds, series.Name);
}
@ -918,7 +918,7 @@ public class ProcessSeries : IProcessSeries
var personSw = Stopwatch.StartNew();
var people = TagHelper.GetTagValues(comicInfo.Writer);
await UpdateChapterPeopleAsync(chapter, people, PersonRole.Writer);
_logger.LogDebug("[TIME] Kavita took {Time} ms to process writer on Chapter: {File} for {Count} people", personSw.ElapsedMilliseconds, chapter.Files.First().FileName, people.Count);
_logger.LogTrace("[TIME] Kavita took {Time} ms to process writer on Chapter: {File} for {Count} people", personSw.ElapsedMilliseconds, chapter.Files.First().FileName, people.Count);
}
if (!chapter.EditorLocked)
@ -987,7 +987,7 @@ public class ProcessSeries : IProcessSeries
await UpdateChapterTags(chapter, tags);
}
_logger.LogDebug("[TIME] Kavita took {Time} ms to create/update Chapter: {File}", sw.ElapsedMilliseconds, chapter.Files.First().FileName);
_logger.LogTrace("[TIME] Kavita took {Time} ms to create/update Chapter: {File}", sw.ElapsedMilliseconds, chapter.Files.First().FileName);
}
private async Task UpdateChapterGenres(Chapter chapter, IEnumerable<string> genreNames)

View file

@ -317,7 +317,7 @@ public class ScannerService : IScannerService
// Process Series
var seriesProcessStopWatch = Stopwatch.StartNew();
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, seriesLeftToProcess, bypassFolderOptimizationChecks);
_logger.LogDebug("[TIME] Kavita took {Time} ms to process {SeriesName}", seriesProcessStopWatch.ElapsedMilliseconds, parsedSeries[pSeries][0].Series);
_logger.LogTrace("[TIME] Kavita took {Time} ms to process {SeriesName}", seriesProcessStopWatch.ElapsedMilliseconds, parsedSeries[pSeries][0].Series);
seriesLeftToProcess--;
}
@ -644,7 +644,7 @@ public class ScannerService : IScannerService
totalFiles += pSeries.Value.Count;
var seriesProcessStopWatch = Stopwatch.StartNew();
await _processSeries.ProcessSeriesAsync(pSeries.Value, library, seriesLeftToProcess, forceUpdate);
_logger.LogDebug("[TIME] Kavita took {Time} ms to process {SeriesName}", seriesProcessStopWatch.ElapsedMilliseconds, pSeries.Value[0].Series);
_logger.LogTrace("[TIME] Kavita took {Time} ms to process {SeriesName}", seriesProcessStopWatch.ElapsedMilliseconds, pSeries.Value[0].Series);
seriesLeftToProcess--;
}

View file

@ -60,8 +60,7 @@ public class StatsService : IStatsService
_emailService = emailService;
_cacheService = cacheService;
FlurlHttp.ConfigureClient(ApiUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlConfiguration.ConfigureClientForUrl(ApiUrl);
}
/// <summary>

View file

@ -1,8 +1,11 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using API.DTOs.Update;
using API.Extensions;
using API.SignalR;
using Flurl.Http;
using Kavita.Common.EnvironmentInfo;
@ -30,7 +33,7 @@ internal class GithubReleaseMetadata
/// </summary>
public required string Body { get; init; }
/// <summary>
/// Url of the release on Github
/// Url of the release on GitHub
/// </summary>
// ReSharper disable once InconsistentNaming
public required string Html_Url { get; init; }
@ -45,11 +48,12 @@ public interface IVersionUpdaterService
{
Task<UpdateNotificationDto?> CheckForUpdate();
Task PushUpdate(UpdateNotificationDto update);
Task<IList<UpdateNotificationDto>> GetAllReleases();
Task<IList<UpdateNotificationDto>> GetAllReleases(int count = 0);
Task<int> GetNumberOfReleasesBehind();
}
public class VersionUpdaterService : IVersionUpdaterService
public partial class VersionUpdaterService : IVersionUpdaterService
{
private readonly ILogger<VersionUpdaterService> _logger;
private readonly IEventHub _eventHub;
@ -57,37 +61,220 @@ public class VersionUpdaterService : IVersionUpdaterService
#pragma warning disable S1075
private const string GithubLatestReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases/latest";
private const string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases";
private const string GithubPullsUrl = "https://api.github.com/repos/Kareadita/Kavita/pulls/";
private const string GithubBranchCommitsUrl = "https://api.github.com/repos/Kareadita/Kavita/commits?sha=develop";
#pragma warning restore S1075
public VersionUpdaterService(ILogger<VersionUpdaterService> logger, IEventHub eventHub)
[GeneratedRegex(@"^\n*(.*?)\n+#{1,2}\s", RegexOptions.Singleline)]
private static partial Regex BlogPartRegex();
private static string _cacheFilePath;
private static readonly TimeSpan CacheDuration = TimeSpan.FromHours(1);
public VersionUpdaterService(ILogger<VersionUpdaterService> logger, IEventHub eventHub, IDirectoryService directoryService)
{
_logger = logger;
_eventHub = eventHub;
_cacheFilePath = Path.Combine(directoryService.LongTermCacheDirectory, "github_releases_cache.json");
FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlHttp.ConfigureClient(GithubAllReleasesUrl, cli =>
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
FlurlConfiguration.ConfigureClientForUrl(GithubLatestReleasesUrl);
FlurlConfiguration.ConfigureClientForUrl(GithubAllReleasesUrl);
}
/// <summary>
/// Fetches the latest release from Github
/// Fetches the latest (stable) release from GitHub. Does not do any extra nightly release parsing.
/// </summary>
/// <returns>Latest update</returns>
public async Task<UpdateNotificationDto?> CheckForUpdate()
{
var update = await GetGithubRelease();
return CreateDto(update);
var dto = CreateDto(update);
return dto;
}
public async Task<IList<UpdateNotificationDto>> GetAllReleases()
private async Task EnrichWithNightlyInfo(List<UpdateNotificationDto> dtos)
{
var dto = dtos[0]; // Latest version
try
{
var currentVersion = new Version(dto.CurrentVersion);
var nightlyReleases = await GetNightlyReleases(currentVersion, Version.Parse(dto.UpdateVersion));
if (nightlyReleases.Count == 0) return;
// Create new DTOs for each nightly release and insert them at the beginning of the list
var nightlyDtos = new List<UpdateNotificationDto>();
foreach (var nightly in nightlyReleases)
{
var prInfo = await FetchPullRequestInfo(nightly.PrNumber);
if (prInfo == null) continue;
var sections = ParseReleaseBody(prInfo.Body);
var blogPart = ExtractBlogPart(prInfo.Body);
var nightlyDto = new UpdateNotificationDto
{
UpdateTitle = $"Nightly Release {nightly.Version} - {prInfo.Title}",
UpdateVersion = nightly.Version,
CurrentVersion = dto.CurrentVersion,
UpdateUrl = prInfo.Html_Url,
PublishDate = prInfo.Merged_At,
IsDocker = true, // Nightlies are always Docker Only
IsReleaseEqual = IsVersionEqualToBuildVersion(Version.Parse(nightly.Version)),
IsReleaseNewer = true, // Since we already filtered these in GetNightlyReleases
IsPrerelease = true, // All Nightlies are considered prerelease
Added = sections.TryGetValue("Added", out var added) ? added : [],
Changed = sections.TryGetValue("Changed", out var changed) ? changed : [],
Fixed = sections.TryGetValue("Fixed", out var bugfixes) ? bugfixes : [],
Removed = sections.TryGetValue("Removed", out var removed) ? removed : [],
Theme = sections.TryGetValue("Theme", out var theme) ? theme : [],
Developer = sections.TryGetValue("Developer", out var developer) ? developer : [],
Api = sections.TryGetValue("Api", out var api) ? api : [],
BlogPart = _markdown.Transform(blogPart.Trim()),
UpdateBody = _markdown.Transform(prInfo.Body.Trim())
};
nightlyDtos.Add(nightlyDto);
}
// Insert nightly releases at the beginning of the list
var sortedNightlyDtos = nightlyDtos.OrderByDescending(x => x.PublishDate).ToList();
dtos.InsertRange(0, sortedNightlyDtos);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to enrich nightly release information");
}
}
private async Task<PullRequestInfo?> FetchPullRequestInfo(int prNumber)
{
try
{
return await $"{GithubPullsUrl}{prNumber}"
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<PullRequestInfo>();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to fetch PR information for #{PrNumber}", prNumber);
return null;
}
}
private async Task<List<NightlyInfo>> GetNightlyReleases(Version currentVersion, Version latestStableVersion)
{
try
{
var nightlyReleases = new List<NightlyInfo>();
var commits = await GithubBranchCommitsUrl
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<IList<CommitInfo>>();
var commitList = commits.ToList();
bool foundLastStable = false;
for (var i = 0; i < commitList.Count - 1; i++)
{
var commit = commitList[i];
var message = commit.Commit.Message.Split('\n')[0]; // Take first line only
// Skip [skip ci] commits
if (message.Contains("[skip ci]")) continue;
// Check if this is a stable release
if (message.StartsWith('v'))
{
var stableMatch = Regex.Match(message, @"v(\d+\.\d+\.\d+\.\d+)");
if (stableMatch.Success)
{
var stableVersion = new Version(stableMatch.Groups[1].Value);
// If we find a stable version lower than current, we've gone too far back
if (stableVersion <= currentVersion)
{
foundLastStable = true;
break;
}
}
continue;
}
// Look for version bumps that follow PRs
if (!foundLastStable && message == "Bump versions by dotnet-bump-version.")
{
// Get the PR commit that triggered this version bump
if (i + 1 < commitList.Count)
{
var prCommit = commitList[i + 1];
var prMessage = prCommit.Commit.Message.Split('\n')[0];
// Extract PR number using improved regex
var prMatch = Regex.Match(prMessage, @"(?:^|\s)\(#(\d+)\)|\s#(\d+)");
if (!prMatch.Success) continue;
var prNumber = int.Parse(prMatch.Groups[1].Value != "" ?
prMatch.Groups[1].Value : prMatch.Groups[2].Value);
// Get the version from AssemblyInfo.cs in this commit
var version = await GetVersionFromCommit(commit.Sha);
if (version == null) continue;
// Parse version and compare with current version
if (Version.TryParse(version, out var parsedVersion) &&
parsedVersion > latestStableVersion)
{
nightlyReleases.Add(new NightlyInfo
{
Version = version,
PrNumber = prNumber,
Date = DateTime.Parse(commit.Commit.Author.Date)
});
}
}
}
}
return nightlyReleases.OrderByDescending(x => x.Date).ToList();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to get nightly releases");
return [];
}
}
public async Task<IList<UpdateNotificationDto>> GetAllReleases(int count = 0)
{
// Attempt to fetch from cache
var cachedReleases = await TryGetCachedReleases();
if (cachedReleases != null)
{
if (count > 0)
{
// NOTE: We may want to allow the admin to clear Github cache
return cachedReleases.Take(count).ToList();
}
return cachedReleases;
}
var updates = await GetGithubReleases();
var updateDtos = updates.Select(CreateDto)
var query = updates.Select(CreateDto)
.Where(d => d != null)
.OrderByDescending(d => d!.PublishDate)
.Select(d => d!)
.ToList();
.Select(d => d!);
var updateDtos = query.ToList();
// If we're on a nightly build, enrich the information
if (updateDtos.Count != 0 && BuildInfo.Version > new Version(updateDtos[0].UpdateVersion))
{
await EnrichWithNightlyInfo(updateDtos);
}
// Find the latest dto
var latestRelease = updateDtos[0]!;
@ -103,26 +290,56 @@ public class VersionUpdaterService : IVersionUpdaterService
latestRelease.IsOnNightlyInRelease = isNightly;
// Cache the fetched data
if (updateDtos.Count > 0)
{
await CacheReleasesAsync(updateDtos);
}
if (count > 0)
{
return updateDtos.Take(count).ToList();
}
return updateDtos;
}
private static bool IsVersionEqualToBuildVersion(Version updateVersion)
private static async Task<IList<UpdateNotificationDto>?> TryGetCachedReleases()
{
return updateVersion.Revision < 0 && BuildInfo.Version.Revision == 0 &&
CompareWithoutRevision(BuildInfo.Version, updateVersion);
if (!File.Exists(_cacheFilePath)) return null;
var fileInfo = new FileInfo(_cacheFilePath);
if (DateTime.UtcNow - fileInfo.LastWriteTimeUtc <= CacheDuration)
{
var cachedData = await File.ReadAllTextAsync(_cacheFilePath);
return System.Text.Json.JsonSerializer.Deserialize<IList<UpdateNotificationDto>>(cachedData);
}
return null;
}
private static bool CompareWithoutRevision(Version v1, Version v2)
private async Task CacheReleasesAsync(IList<UpdateNotificationDto> updates)
{
if (v1.Major != v2.Major)
return v1.Major == v2.Major;
if (v1.Minor != v2.Minor)
return v1.Minor == v2.Minor;
if (v1.Build != v2.Build)
return v1.Build == v2.Build;
return true;
try
{
var json = System.Text.Json.JsonSerializer.Serialize(updates, new System.Text.Json.JsonSerializerOptions { WriteIndented = true });
await File.WriteAllTextAsync(_cacheFilePath, json);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to cache releases");
}
}
private static bool IsVersionEqualToBuildVersion(Version updateVersion)
{
return updateVersion == BuildInfo.Version || (updateVersion.Revision < 0 && BuildInfo.Version.Revision == 0 &&
BuildInfo.Version.CompareWithoutRevision(updateVersion));
}
public async Task<int> GetNumberOfReleasesBehind()
{
var updates = await GetAllReleases();
@ -135,18 +352,30 @@ public class VersionUpdaterService : IVersionUpdaterService
var updateVersion = new Version(update.Tag_Name.Replace("v", string.Empty));
var currentVersion = BuildInfo.Version.ToString(4);
var bodyHtml = _markdown.Transform(update.Body.Trim());
var parsedSections = ParseReleaseBody(update.Body);
var blogPart = _markdown.Transform(ExtractBlogPart(update.Body).Trim());
return new UpdateNotificationDto()
{
CurrentVersion = currentVersion,
UpdateVersion = updateVersion.ToString(),
UpdateBody = _markdown.Transform(update.Body.Trim()),
UpdateBody = bodyHtml,
UpdateTitle = update.Name,
UpdateUrl = update.Html_Url,
IsDocker = OsInfo.IsDocker,
PublishDate = update.Published_At,
IsReleaseEqual = IsVersionEqualToBuildVersion(updateVersion),
IsReleaseNewer = BuildInfo.Version < updateVersion,
Added = parsedSections.TryGetValue("Added", out var added) ? added : [],
Removed = parsedSections.TryGetValue("Removed", out var removed) ? removed : [],
Changed = parsedSections.TryGetValue("Changed", out var changed) ? changed : [],
Fixed = parsedSections.TryGetValue("Fixed", out var fixes) ? fixes : [],
Theme = parsedSections.TryGetValue("Theme", out var theme) ? theme : [],
Developer = parsedSections.TryGetValue("Developer", out var developer) ? developer : [],
Api = parsedSections.TryGetValue("Api", out var api) ? api : [],
BlogPart = blogPart
};
}
@ -165,6 +394,26 @@ public class VersionUpdaterService : IVersionUpdaterService
}
}
private async Task<string?> GetVersionFromCommit(string commitSha)
{
try
{
// Use the raw GitHub URL format for the csproj file
var content = await $"https://raw.githubusercontent.com/Kareadita/Kavita/{commitSha}/Kavita.Common/Kavita.Common.csproj"
.WithHeader("User-Agent", "Kavita")
.GetStringAsync();
var versionMatch = Regex.Match(content, @"<AssemblyVersion>([0-9\.]+)</AssemblyVersion>");
return versionMatch.Success ? versionMatch.Groups[1].Value : null;
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to get version from commit {Sha}: {Message}", commitSha, ex.Message);
return null;
}
}
private static async Task<GithubReleaseMetadata> GetGithubRelease()
{
@ -176,13 +425,103 @@ public class VersionUpdaterService : IVersionUpdaterService
return update;
}
private static async Task<IEnumerable<GithubReleaseMetadata>> GetGithubReleases()
private static async Task<IList<GithubReleaseMetadata>> GetGithubReleases()
{
var update = await GithubAllReleasesUrl
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.GetJsonAsync<IEnumerable<GithubReleaseMetadata>>();
.GetJsonAsync<IList<GithubReleaseMetadata>>();
return update;
}
private static string ExtractBlogPart(string body)
{
if (body.StartsWith('#')) return string.Empty;
var match = BlogPartRegex().Match(body);
return match.Success ? match.Groups[1].Value.Trim() : body.Trim();
}
private static Dictionary<string, List<string>> ParseReleaseBody(string body)
{
var sections = new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase);
var lines = body.Split('\n');
string currentSection = null;
foreach (var line in lines)
{
var trimmedLine = line.Trim();
// Check for section headers (case-insensitive)
if (trimmedLine.StartsWith('#'))
{
currentSection = trimmedLine.TrimStart('#').Trim();
sections[currentSection] = [];
continue;
}
// Parse items under a section
if (currentSection != null &&
trimmedLine.StartsWith("- ") &&
!string.IsNullOrWhiteSpace(trimmedLine))
{
// Remove "Fixed:", "Added:" etc. if present
var cleanedItem = CleanSectionItem(trimmedLine);
// Only add non-empty items
if (!string.IsNullOrWhiteSpace(cleanedItem))
{
sections[currentSection].Add(cleanedItem);
}
}
}
return sections;
}
private static string CleanSectionItem(string item)
{
// Remove everything up to and including the first ":"
var colonIndex = item.IndexOf(':');
if (colonIndex != -1)
{
item = item.Substring(colonIndex + 1).Trim();
}
return item;
}
sealed class PullRequestInfo
{
public required string Title { get; init; }
public required string Body { get; init; }
public required string Html_Url { get; init; }
public required string Merged_At { get; init; }
public required int Number { get; init; }
}
sealed class CommitInfo
{
public required string Sha { get; init; }
public required CommitDetail Commit { get; init; }
public required string Html_Url { get; init; }
}
sealed class CommitDetail
{
public required string Message { get; init; }
public required CommitAuthor Author { get; init; }
}
sealed class CommitAuthor
{
public required string Date { get; init; }
}
sealed class NightlyInfo
{
public required string Version { get; init; }
public required int PrNumber { get; init; }
public required DateTime Date { get; init; }
}
}

View file

@ -8,6 +8,7 @@ using System.Threading.Tasks;
using API.Data;
using API.DTOs.Account;
using API.Entities;
using API.Helpers;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
@ -143,12 +144,6 @@ public class TokenService : ITokenService
public static bool HasTokenExpired(string? token)
{
if (string.IsNullOrEmpty(token)) return true;
var tokenHandler = new JwtSecurityTokenHandler();
var tokenContent = tokenHandler.ReadJwtToken(token);
var validToUtc = tokenContent.ValidTo.ToUniversalTime();
return validToUtc < DateTime.UtcNow;
return !JwtHelper.IsTokenValid(token);
}
}