Hashing (#634)
* Added the ability to see when a scan started at. * filename based hashing now uses last write time as well to ensure if the underlying file changes it sends a new copy * Fixed a bug where we would reset dark mode on the book reader to dark mode if our site was on dark mode, despite user setting light mode. * Added a single feed entry when some sort of collection, reading list, etc doesn't have anything in it. * Allow + in the normalization to prevent some series that use + to denote the sequel from not getting merged together.
This commit is contained in:
parent
0dbd45a41d
commit
c5b62d00d0
10 changed files with 61 additions and 28 deletions
|
|
@ -170,14 +170,14 @@ namespace API.Controllers
|
|||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
||||
var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
|
||||
|
||||
IEnumerable <CollectionTagDto> tags;
|
||||
IList<CollectionTagDto> tags;
|
||||
if (isAdmin)
|
||||
{
|
||||
tags = await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync();
|
||||
tags = (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()).ToList();
|
||||
}
|
||||
else
|
||||
{
|
||||
tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync();
|
||||
tags = (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync()).ToList();
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -199,6 +199,14 @@ namespace API.Controllers
|
|||
});
|
||||
}
|
||||
|
||||
if (tags.Count == 0)
|
||||
{
|
||||
feed.Entries.Add(new FeedEntry()
|
||||
{
|
||||
Title = "Nothing here",
|
||||
});
|
||||
}
|
||||
|
||||
return CreateXmlResult(SerializeXml(feed));
|
||||
}
|
||||
|
||||
|
|
@ -298,13 +306,13 @@ namespace API.Controllers
|
|||
|
||||
var feed = CreateFeed(readingList.Title + " Reading List", $"{apiKey}/reading-list/{readingListId}", apiKey);
|
||||
|
||||
var items = await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId);
|
||||
var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId)).ToList();
|
||||
foreach (var item in items)
|
||||
{
|
||||
feed.Entries.Add(new FeedEntry()
|
||||
{
|
||||
Id = item.ChapterId.ToString(),
|
||||
Title = "Chapter " + item.ChapterNumber,
|
||||
Title = $"{item.SeriesName} Chapter {item.ChapterNumber}",
|
||||
Links = new List<FeedLink>()
|
||||
{
|
||||
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{item.SeriesId}/volume/{item.VolumeId}/chapter/{item.ChapterId}"),
|
||||
|
|
@ -313,6 +321,14 @@ namespace API.Controllers
|
|||
});
|
||||
}
|
||||
|
||||
if (items.Count == 0)
|
||||
{
|
||||
feed.Entries.Add(new FeedEntry()
|
||||
{
|
||||
Title = "Nothing here",
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
return CreateXmlResult(SerializeXml(feed));
|
||||
|
|
@ -371,6 +387,14 @@ namespace API.Controllers
|
|||
feed.Entries.Add(CreateSeries(seriesDto, apiKey));
|
||||
}
|
||||
|
||||
if (recentlyAdded.Count == 0)
|
||||
{
|
||||
feed.Entries.Add(new FeedEntry()
|
||||
{
|
||||
Title = "Nothing here",
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return CreateXmlResult(SerializeXml(feed));
|
||||
}
|
||||
|
|
@ -402,6 +426,14 @@ namespace API.Controllers
|
|||
feed.Entries.Add(CreateSeries(seriesDto, apiKey));
|
||||
}
|
||||
|
||||
if (pagedList.Count == 0)
|
||||
{
|
||||
feed.Entries.Add(new FeedEntry()
|
||||
{
|
||||
Title = "Nothing here",
|
||||
});
|
||||
}
|
||||
|
||||
return CreateXmlResult(SerializeXml(feed));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using API.Helpers;
|
||||
|
|
@ -41,8 +42,9 @@ namespace API.Extensions
|
|||
public static void AddCacheHeader(this HttpResponse response, string filename)
|
||||
{
|
||||
if (filename == null || filename.Length <= 0) return;
|
||||
var hashContent = filename + File.GetLastWriteTimeUtc(filename);
|
||||
using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider();
|
||||
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(filename)).Select(x => x.ToString("X2"))));
|
||||
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2"))));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ namespace API.Parser
|
|||
MatchOptions,
|
||||
RegexTimeout);
|
||||
|
||||
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]",
|
||||
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9\+]",
|
||||
MatchOptions,
|
||||
RegexTimeout);
|
||||
|
||||
|
|
|
|||
|
|
@ -139,6 +139,8 @@ namespace API.Services
|
|||
{
|
||||
var madeUpdate = false;
|
||||
if (series == null) return false;
|
||||
|
||||
// NOTE: This will fail if we replace the cover of the first volume on a first scan. Because the series will already have a cover image
|
||||
if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked))
|
||||
{
|
||||
series.Volumes ??= new List<Volume>();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
using System.Threading;
|
||||
using System;
|
||||
using API.DTOs.Update;
|
||||
|
||||
namespace API.SignalR
|
||||
|
|
@ -46,19 +46,6 @@ namespace API.SignalR
|
|||
};
|
||||
}
|
||||
|
||||
public static SignalRMessage ScanLibraryEvent(int libraryId, string stage)
|
||||
{
|
||||
return new SignalRMessage()
|
||||
{
|
||||
Name = SignalREvents.ScanLibrary,
|
||||
Body = new
|
||||
{
|
||||
LibraryId = libraryId,
|
||||
Stage = stage
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static SignalRMessage ScanLibraryProgressEvent(int libraryId, float progress)
|
||||
{
|
||||
return new SignalRMessage()
|
||||
|
|
@ -68,6 +55,7 @@ namespace API.SignalR
|
|||
{
|
||||
LibraryId = libraryId,
|
||||
Progress = progress,
|
||||
EventTime = DateTime.Now
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue