Compare commits
2 commits
develop
...
feature/cs
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8f1df0d4cd | ||
![]() |
cd1d1e15c5 |
|
@ -8,4 +8,10 @@
|
|||
# You can see what browsers were selected by your queries by running:
|
||||
# npx browserslist
|
||||
|
||||
defaults
|
||||
last 1 Chrome version
|
||||
last 1 Firefox version
|
||||
last 2 Edge major versions
|
||||
last 2 Safari major versions
|
||||
last 2 iOS major versions
|
||||
Firefox ESR
|
||||
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# Editor configuration, see https://editorconfig.org
|
||||
root = true
|
||||
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
|
@ -23,7 +22,3 @@ indent_size = 2
|
|||
|
||||
[*.csproj]
|
||||
indent_size = 2
|
||||
|
||||
[*.cs]
|
||||
# Disable SonarLint warning S1075 (Don't use hardcoded url)
|
||||
dotnet_diagnostic.S1075.severity = none
|
||||
|
|
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
@ -25,10 +25,10 @@ body:
|
|||
- type: dropdown
|
||||
id: version
|
||||
attributes:
|
||||
label: Kavita Version Number - If you don't see your version number listed, please update Kavita and see if your issue still persists.
|
||||
label: Kavita Version Number - If you don not see your version number listed, please update Kavita and see if your issue still persists.
|
||||
multiple: false
|
||||
options:
|
||||
- 0.8.7 - Stable
|
||||
- 0.8.1 - Stable
|
||||
- Nightly Testing Branch
|
||||
validations:
|
||||
required: true
|
||||
|
|
2
.github/workflows/build-and-test.yml
vendored
|
@ -17,7 +17,7 @@ jobs:
|
|||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
shell: powershell
|
||||
|
|
12
.github/workflows/canary-workflow.yml
vendored
|
@ -9,7 +9,7 @@ on:
|
|||
jobs:
|
||||
build:
|
||||
name: Upload Kavita.Common for Version Bump
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -24,7 +24,7 @@ jobs:
|
|||
version:
|
||||
name: Bump version
|
||||
needs: [ build ]
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
@ -33,7 +33,7 @@ jobs:
|
|||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
- name: Bump versions
|
||||
uses: SiqiLu/dotnet-bump-version@2.0.0
|
||||
|
@ -45,7 +45,7 @@ jobs:
|
|||
canary:
|
||||
name: Build Canary Docker
|
||||
needs: [ build, version ]
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
@ -98,10 +98,10 @@ jobs:
|
|||
- name: Compile dotnet app
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- run: ./monorepo-build.sh
|
||||
|
||||
|
|
17
.github/workflows/codeql.yml
vendored
|
@ -13,7 +13,7 @@ name: "CodeQL"
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ "develop"]
|
||||
branches: [ "develop", "main" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "develop" ]
|
||||
|
@ -38,7 +38,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'csharp', 'javascript-typescript' ]
|
||||
language: [ 'csharp', 'javascript-typescript', 'python' ]
|
||||
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
||||
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
|
@ -48,14 +48,13 @@ jobs:
|
|||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
- name: Install Swashbuckle CLI
|
||||
shell: bash
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
@ -69,7 +68,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
@ -82,6 +81,6 @@ jobs:
|
|||
dotnet build Kavita.sln
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
|
32
.github/workflows/develop-workflow.yml
vendored
|
@ -7,7 +7,7 @@ on:
|
|||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Debug Info
|
||||
run: |
|
||||
|
@ -17,7 +17,7 @@ jobs:
|
|||
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
||||
build:
|
||||
name: Upload Kavita.Common for Version Bump
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
|
@ -33,7 +33,7 @@ jobs:
|
|||
version:
|
||||
name: Bump version
|
||||
needs: [ build ]
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -43,7 +43,7 @@ jobs:
|
|||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
- name: Bump versions
|
||||
uses: majora2007/dotnet-bump-version@v0.0.10
|
||||
|
@ -55,7 +55,7 @@ jobs:
|
|||
develop:
|
||||
name: Build Nightly Docker
|
||||
needs: [ build, version ]
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
permissions:
|
||||
packages: write
|
||||
|
@ -128,16 +128,15 @@ jobs:
|
|||
- name: Compile dotnet app
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
dotnet-version: 8.0.x
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- run: ./monorepo-build.sh
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
@ -156,33 +155,20 @@ jobs:
|
|||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: docker_meta_nightly
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=nightly
|
||||
type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
images: |
|
||||
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta_nightly.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta_nightly.outputs.labels }}
|
||||
tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Notify Discord
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||
with:
|
||||
severity: info
|
||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||
|
|
68
.github/workflows/openapi-gen.yml
vendored
|
@ -1,68 +0,0 @@
|
|||
name: Generate OpenAPI Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'develop', '!release/**' ]
|
||||
paths:
|
||||
- '**/*.cs'
|
||||
- '**/*.csproj'
|
||||
pull_request:
|
||||
branches: [ 'develop', '!release/**' ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-openapi:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on direct pushes to develop, not PRs
|
||||
if: (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && github.repository_owner == 'Kareadita'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build project
|
||||
run: dotnet build API/API.csproj --configuration Debug
|
||||
|
||||
- name: Get Swashbuckle version
|
||||
id: swashbuckle-version
|
||||
run: |
|
||||
VERSION=$(grep -o '<PackageReference Include="Swashbuckle.AspNetCore" Version="[^"]*"' API/API.csproj | grep -o 'Version="[^"]*"' | cut -d'"' -f2)
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Found Swashbuckle.AspNetCore version: $VERSION"
|
||||
|
||||
- name: Install matching Swashbuckle CLI tool
|
||||
run: |
|
||||
dotnet new tool-manifest --force
|
||||
dotnet tool install Swashbuckle.AspNetCore.Cli --version ${{ steps.swashbuckle-version.outputs.VERSION }}
|
||||
|
||||
- name: Generate OpenAPI file
|
||||
run: dotnet swagger tofile --output openapi.json API/bin/Debug/net9.0/API.dll v1
|
||||
|
||||
- name: Check for changes
|
||||
id: git-check
|
||||
run: |
|
||||
git add openapi.json
|
||||
git diff --staged --quiet openapi.json || echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and push if changed
|
||||
if: steps.git-check.outputs.has_changes == 'true'
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
|
||||
git commit -m "Update OpenAPI documentation" openapi.json
|
||||
|
||||
# Pull latest changes with rebase to avoid merge commits
|
||||
git pull --rebase origin develop
|
||||
|
||||
git push
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.REPO_GHA_PAT }}
|
4
.github/workflows/pr-check.yml
vendored
|
@ -1,13 +1,15 @@
|
|||
name: Validate PR Body
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: '**'
|
||||
pull_request:
|
||||
branches: [ main, develop, canary ]
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
check_pr:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
|
|
63
.github/workflows/release-workflow.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Debug Info
|
||||
run: |
|
||||
|
@ -20,13 +20,13 @@ jobs:
|
|||
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
||||
if_merged:
|
||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo The PR was merged
|
||||
build:
|
||||
name: Upload Kavita.Common for Version Bump
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
|
@ -43,7 +43,7 @@ jobs:
|
|||
name: Build Stable and Nightly Docker if Release
|
||||
needs: [ build ]
|
||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
@ -106,15 +106,14 @@ jobs:
|
|||
- name: Compile dotnet app
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
dotnet-version: 8.0.x
|
||||
- name: Install Swashbuckle CLI
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- run: ./monorepo-build.sh
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
@ -133,50 +132,44 @@ jobs:
|
|||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: docker_meta_stable
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=raw,value=${{ steps.parse-version.outputs.VERSION }}
|
||||
images: |
|
||||
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Build and push stable
|
||||
id: docker_build_stable
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta_stable.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta_stable.outputs.labels }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: docker_meta_nightly
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=nightly
|
||||
type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
images: |
|
||||
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: jvmilazz0/kavita:latest, jvmilazz0/kavita:${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:latest, ghcr.io/kareadita/kavita:${{ steps.parse-version.outputs.VERSION }}
|
||||
|
||||
- name: Build and push nightly
|
||||
id: docker_build_nightly
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta_nightly.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta_nightly.outputs.labels }}
|
||||
tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.docker_build_stable.outputs.digest }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.docker_build_nightly.outputs.digest }}
|
||||
|
||||
- name: Notify Discord
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
with:
|
||||
severity: info
|
||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||
details: '${{ steps.findPr.outputs.body }}'
|
||||
text: <@&939225192553644133> A new stable build has been released.
|
||||
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
|
||||
|
||||
- name: Notify Discord
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
with:
|
||||
severity: info
|
||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||
details: '${{ steps.findPr.outputs.body }}'
|
||||
text: <@&939225459156217917> <@&939225350775406643> A new nightly build has been released for docker.
|
||||
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
|
||||
|
|
11
.gitignore
vendored
|
@ -513,7 +513,6 @@ UI/Web/dist/
|
|||
/API/config/stats/
|
||||
/API/config/bookmarks/
|
||||
/API/config/favicons/
|
||||
/API/config/cache-long/
|
||||
/API/config/kavita.db
|
||||
/API/config/kavita.db-shm
|
||||
/API/config/kavita.db-wal
|
||||
|
@ -525,7 +524,6 @@ UI/Web/dist/
|
|||
/API/config/Hangfire.db
|
||||
/API/config/Hangfire-log.db
|
||||
API/config/covers/
|
||||
API/config/images/*
|
||||
API/config/stats/*
|
||||
API/config/stats/app_stats.json
|
||||
API/config/pre-metadata/
|
||||
|
@ -538,9 +536,6 @@ UI/Web/.angular/
|
|||
BenchmarkDotNet.Artifacts
|
||||
|
||||
|
||||
API.Tests/Services/Test Data/ImageService/**/*_output*
|
||||
API.Tests/Services/Test Data/ImageService/**/*_baseline*
|
||||
API.Tests/Services/Test Data/ImageService/**/*.html
|
||||
|
||||
|
||||
API.Tests/Services/Test Data/ScannerService/ScanTests/**/*
|
||||
API.Tests/Services/Test Data/ImageService/Covers/*_output*
|
||||
API.Tests/Services/Test Data/ImageService/Covers/*_baseline*
|
||||
API.Tests/Services/Test Data/ImageService/Covers/index.html
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
</PropertyGroup>
|
||||
|
||||
|
@ -10,9 +10,9 @@
|
|||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" Version="0.15.1" />
|
||||
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.15.1" />
|
||||
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||
<PackageReference Include="BenchmarkDotNet" Version="0.13.12" />
|
||||
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.13.12" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -26,10 +26,5 @@
|
|||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Update="Data\AesopsFables.epub">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
|
|
@ -32,7 +32,7 @@ public class ArchiveServiceBenchmark
|
|||
public ArchiveServiceBenchmark()
|
||||
{
|
||||
_directoryService = new DirectoryService(null, new FileSystem());
|
||||
_imageService = new ImageService(null, _directoryService);
|
||||
_imageService = new ImageService(null, _directoryService, Substitute.For<IEasyCachingProviderFactory>());
|
||||
_archiveService = new ArchiveService(new NullLogger<ArchiveService>(), _directoryService, _imageService, Substitute.For<IMediaErrorService>());
|
||||
}
|
||||
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
using API.Helpers.Builders;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Order;
|
||||
using System;
|
||||
using API.Entities.Enums;
|
||||
|
||||
namespace API.Benchmark
|
||||
{
|
||||
[StopOnFirstError]
|
||||
[MemoryDiagnoser]
|
||||
[RankColumn]
|
||||
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
|
||||
[SimpleJob(launchCount: 1, warmupCount: 5, invocationCount: 20)]
|
||||
public class KoreaderHashBenchmark
|
||||
{
|
||||
private const string sourceEpub = "./Data/AesopsFables.epub";
|
||||
|
||||
[Benchmark(Baseline = true)]
|
||||
public void TestBuildManga_baseline()
|
||||
{
|
||||
var file = new MangaFileBuilder(sourceEpub, MangaFormat.Epub)
|
||||
.Build();
|
||||
if (file == null)
|
||||
{
|
||||
throw new Exception("Failed to build manga file");
|
||||
}
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public void TestBuildManga_withHash()
|
||||
{
|
||||
var file = new MangaFileBuilder(sourceEpub, MangaFormat.Epub)
|
||||
.WithHash()
|
||||
.Build();
|
||||
if (file == null)
|
||||
{
|
||||
throw new Exception("Failed to build manga file");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,22 +1,22 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.6" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="22.0.14" />
|
||||
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="22.0.14" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.1">
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="8.0.6" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="21.0.22" />
|
||||
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="21.0.22" />
|
||||
<PackageReference Include="xunit" Version="2.8.1" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.1">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
|
@ -28,18 +28,12 @@
|
|||
|
||||
<ItemGroup>
|
||||
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
|
||||
<Folder Include="Services\Test Data\CoverDbService\" />
|
||||
<Folder Include="Services\Test Data\ImageService\Covers\" />
|
||||
<Folder Include="Services\Test Data\ScannerService\Manga" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Remove="Extensions\Test Data\modified on run.txt" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Update="Data\AesopsFables.epub">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
|
@ -9,7 +10,7 @@ using API.Helpers;
|
|||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using AutoMapper;
|
||||
using Hangfire;
|
||||
using Microsoft.AspNetCore.Identity;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
|
@ -18,34 +19,37 @@ using NSubstitute;
|
|||
|
||||
namespace API.Tests;
|
||||
|
||||
public abstract class AbstractDbTest : AbstractFsTest , IDisposable
|
||||
public abstract class AbstractDbTest
|
||||
{
|
||||
protected readonly DataContext Context;
|
||||
protected readonly IUnitOfWork UnitOfWork;
|
||||
protected readonly IMapper Mapper;
|
||||
private readonly DbConnection _connection;
|
||||
private bool _disposed;
|
||||
protected readonly DbConnection _connection;
|
||||
protected readonly DataContext _context;
|
||||
protected readonly IUnitOfWork _unitOfWork;
|
||||
|
||||
|
||||
protected const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
protected const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
protected const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
protected const string LogDirectory = "C:/kavita/config/logs/";
|
||||
protected const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
|
||||
protected const string SiteThemeDirectory = "C:/kavita/config/themes/";
|
||||
protected const string TempDirectory = "C:/kavita/config/temp/";
|
||||
protected const string DataDirectory = "C:/data/";
|
||||
|
||||
protected AbstractDbTest()
|
||||
{
|
||||
var contextOptions = new DbContextOptionsBuilder<DataContext>()
|
||||
var contextOptions = new DbContextOptionsBuilder()
|
||||
.UseSqlite(CreateInMemoryDatabase())
|
||||
.EnableSensitiveDataLogging()
|
||||
.Options;
|
||||
|
||||
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
||||
|
||||
Context = new DataContext(contextOptions);
|
||||
|
||||
Context.Database.EnsureCreated(); // Ensure DB schema is created
|
||||
|
||||
_context = new DataContext(contextOptions);
|
||||
Task.Run(SeedDb).GetAwaiter().GetResult();
|
||||
|
||||
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
|
||||
Mapper = config.CreateMapper();
|
||||
var mapper = config.CreateMapper();
|
||||
|
||||
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||
UnitOfWork = new UnitOfWork(Context, Mapper, null);
|
||||
|
||||
_unitOfWork = new UnitOfWork(_context, mapper, null);
|
||||
}
|
||||
|
||||
private static DbConnection CreateInMemoryDatabase()
|
||||
|
@ -58,79 +62,47 @@ public abstract class AbstractDbTest : AbstractFsTest , IDisposable
|
|||
|
||||
private async Task<bool> SeedDb()
|
||||
{
|
||||
try
|
||||
{
|
||||
await Context.Database.EnsureCreatedAsync();
|
||||
var filesystem = CreateFileSystem();
|
||||
await _context.Database.MigrateAsync();
|
||||
var filesystem = CreateFileSystem();
|
||||
|
||||
await Seed.SeedSettings(Context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
||||
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
||||
|
||||
var setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
||||
setting.Value = CacheDirectory;
|
||||
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
||||
setting.Value = CacheDirectory;
|
||||
|
||||
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
||||
setting.Value = BackupDirectory;
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
||||
setting.Value = BackupDirectory;
|
||||
|
||||
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
|
||||
setting.Value = BookmarkDirectory;
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
|
||||
setting.Value = BookmarkDirectory;
|
||||
|
||||
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
|
||||
setting.Value = "10";
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
|
||||
setting.Value = "10";
|
||||
|
||||
Context.ServerSetting.Update(setting);
|
||||
_context.ServerSetting.Update(setting);
|
||||
|
||||
|
||||
Context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithAllowMetadataMatching(true)
|
||||
.WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
|
||||
.Build());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
await Seed.SeedMetadataSettings(Context);
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($"[SeedDb] Error: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
||||
protected abstract Task ResetDb();
|
||||
|
||||
public void Dispose()
|
||||
protected static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
Dispose(true);
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory(SiteThemeDirectory);
|
||||
fileSystem.AddDirectory(LogDirectory);
|
||||
fileSystem.AddDirectory(TempDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
|
||||
protected virtual void Dispose(bool disposing)
|
||||
{
|
||||
if (_disposed) return;
|
||||
|
||||
if (disposing)
|
||||
{
|
||||
Context?.Dispose();
|
||||
_connection?.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add a role to an existing User. Commits.
|
||||
/// </summary>
|
||||
/// <param name="userId"></param>
|
||||
/// <param name="roleName"></param>
|
||||
protected async Task AddUserWithRole(int userId, string roleName)
|
||||
{
|
||||
var role = new AppRole { Id = userId, Name = roleName, NormalizedName = roleName.ToUpper() };
|
||||
|
||||
await Context.Roles.AddAsync(role);
|
||||
await Context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
return fileSystem;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
|
||||
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Tests;
|
||||
|
||||
public abstract class AbstractFsTest
|
||||
{
|
||||
|
||||
protected static readonly string Root = Parser.NormalizePath(Path.GetPathRoot(Directory.GetCurrentDirectory()));
|
||||
protected static readonly string ConfigDirectory = Root + "kavita/config/";
|
||||
protected static readonly string CacheDirectory = ConfigDirectory + "cache/";
|
||||
protected static readonly string CacheLongDirectory = ConfigDirectory + "cache-long/";
|
||||
protected static readonly string CoverImageDirectory = ConfigDirectory + "covers/";
|
||||
protected static readonly string BackupDirectory = ConfigDirectory + "backups/";
|
||||
protected static readonly string LogDirectory = ConfigDirectory + "logs/";
|
||||
protected static readonly string BookmarkDirectory = ConfigDirectory + "bookmarks/";
|
||||
protected static readonly string SiteThemeDirectory = ConfigDirectory + "themes/";
|
||||
protected static readonly string TempDirectory = ConfigDirectory + "temp/";
|
||||
protected static readonly string ThemesDirectory = ConfigDirectory + "theme";
|
||||
protected static readonly string DataDirectory = Root + "data/";
|
||||
|
||||
protected static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory(Root + "kavita/");
|
||||
fileSystem.AddDirectory(Root + "kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CacheLongDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory(SiteThemeDirectory);
|
||||
fileSystem.AddDirectory(LogDirectory);
|
||||
fileSystem.AddDirectory(TempDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
fileSystem.AddDirectory(ThemesDirectory);
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
using API.Helpers.Converters;
|
||||
using Hangfire;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Converters;
|
||||
|
|
|
@ -142,7 +142,7 @@ public class ChapterListExtensionsTests
|
|||
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
||||
};
|
||||
|
||||
Assert.Equal(chapterList[0], chapterList.GetFirstChapterWithFiles());
|
||||
Assert.Equal(chapterList.First(), chapterList.GetFirstChapterWithFiles());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -150,13 +150,13 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("darker than black", Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
||||
};
|
||||
|
||||
chapterList[0].Files = new List<MangaFile>();
|
||||
chapterList.First().Files = new List<MangaFile>();
|
||||
|
||||
Assert.Equal(chapterList[^1], chapterList.GetFirstChapterWithFiles());
|
||||
Assert.Equal(chapterList.Last(), chapterList.GetFirstChapterWithFiles());
|
||||
}
|
||||
|
||||
|
||||
|
@ -181,7 +181,7 @@ public class ChapterListExtensionsTests
|
|||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
chapterList[0].ReleaseDate = new DateTime(10, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
chapterList[0].ReleaseDate = new DateTime(10, 1, 1);
|
||||
chapterList[1].ReleaseDate = DateTime.MinValue;
|
||||
|
||||
Assert.Equal(0, chapterList.MinimumReleaseYear());
|
||||
|
@ -196,8 +196,8 @@ public class ChapterListExtensionsTests
|
|||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
chapterList[1].ReleaseDate = new DateTime(2012, 2, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1);
|
||||
chapterList[1].ReleaseDate = new DateTime(2012, 2, 1);
|
||||
|
||||
Assert.Equal(2002, chapterList.MinimumReleaseYear());
|
||||
}
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
||||
public class EncodeFormatExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void GetExtension_ShouldReturnCorrectExtensionForAllValues()
|
||||
{
|
||||
// Arrange
|
||||
var expectedExtensions = new Dictionary<EncodeFormat, string>
|
||||
{
|
||||
{ EncodeFormat.PNG, ".png" },
|
||||
{ EncodeFormat.WEBP, ".webp" },
|
||||
{ EncodeFormat.AVIF, ".avif" }
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
foreach (var format in Enum.GetValues(typeof(EncodeFormat)).Cast<EncodeFormat>())
|
||||
{
|
||||
var extension = format.GetExtension();
|
||||
Assert.Equal(expectedExtensions[format], extension);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -7,6 +7,7 @@ using API.Extensions;
|
|||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using API.Tests.Helpers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.Data.Misc;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Person;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Extensions.QueryExtensions;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
@ -67,7 +69,7 @@ public class QueryableExtensionsTests
|
|||
|
||||
[Theory]
|
||||
[InlineData(true, 2)]
|
||||
[InlineData(false, 2)]
|
||||
[InlineData(false, 1)]
|
||||
public void RestrictAgainstAgeRestriction_Genre_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||
{
|
||||
var items = new List<Genre>()
|
||||
|
@ -94,7 +96,7 @@ public class QueryableExtensionsTests
|
|||
|
||||
[Theory]
|
||||
[InlineData(true, 2)]
|
||||
[InlineData(false, 2)]
|
||||
[InlineData(false, 1)]
|
||||
public void RestrictAgainstAgeRestriction_Tag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||
{
|
||||
var items = new List<Tag>()
|
||||
|
@ -121,46 +123,29 @@ public class QueryableExtensionsTests
|
|||
|
||||
[Theory]
|
||||
[InlineData(true, 2)]
|
||||
[InlineData(false, 2)]
|
||||
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedPeopleCount)
|
||||
[InlineData(false, 1)]
|
||||
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||
{
|
||||
// Arrange
|
||||
var items = new List<Person>
|
||||
var items = new List<Person>()
|
||||
{
|
||||
CreatePersonWithSeriesMetadata("Test1", AgeRating.Teen),
|
||||
CreatePersonWithSeriesMetadata("Test2", AgeRating.Unknown, AgeRating.Teen), // 2 series on this person, restrict will still allow access
|
||||
CreatePersonWithSeriesMetadata("Test3", AgeRating.X18Plus)
|
||||
new PersonBuilder("Test", PersonRole.Character)
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
||||
.Build(),
|
||||
new PersonBuilder("Test", PersonRole.Character)
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
||||
.Build(),
|
||||
new PersonBuilder("Test", PersonRole.Character)
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
var ageRestriction = new AgeRestriction
|
||||
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
|
||||
{
|
||||
AgeRating = AgeRating.Teen,
|
||||
IncludeUnknowns = includeUnknowns
|
||||
};
|
||||
|
||||
// Act
|
||||
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(ageRestriction);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedPeopleCount, filtered.Count());
|
||||
}
|
||||
|
||||
private static Person CreatePersonWithSeriesMetadata(string name, params AgeRating[] ageRatings)
|
||||
{
|
||||
var person = new PersonBuilder(name).Build();
|
||||
|
||||
foreach (var ageRating in ageRatings)
|
||||
{
|
||||
var seriesMetadata = new SeriesMetadataBuilder().WithAgeRating(ageRating).Build();
|
||||
person.SeriesMetadataPeople.Add(new SeriesMetadataPeople
|
||||
{
|
||||
SeriesMetadata = seriesMetadata,
|
||||
Person = person,
|
||||
Role = PersonRole.Character // Role is now part of the relationship
|
||||
});
|
||||
}
|
||||
|
||||
return person;
|
||||
});
|
||||
Assert.Equal(expectedCount, filtered.Count());
|
||||
}
|
||||
|
||||
[Theory]
|
||||
|
|
|
@ -185,35 +185,6 @@ public class SeriesExtensionsTests
|
|||
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_JustVolumes_ButVolume0()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName("Volume 0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 0")
|
||||
.Build())
|
||||
.Build())
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithName("Volume 1")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 1")
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_JustSpecials_WithDecimal()
|
||||
{
|
||||
|
|
|
@ -1,81 +0,0 @@
|
|||
using System;
|
||||
using API.Extensions;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
||||
public class VersionHelperTests
|
||||
{
|
||||
[Fact]
|
||||
public void CompareWithoutRevision_ShouldReturnTrue_WhenMajorMinorBuildMatch()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = new Version(1, 2, 3, 4);
|
||||
var v2 = new Version(1, 2, 3, 5);
|
||||
|
||||
// Act
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareWithoutRevision_ShouldHandleBuildlessVersions()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = new Version(1, 2);
|
||||
var v2 = new Version(1, 2);
|
||||
|
||||
// Act
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1, 2, 3, 1, 2, 4)]
|
||||
[InlineData(1, 2, 3, 1, 2, 0)]
|
||||
public void CompareWithoutRevision_ShouldReturnFalse_WhenBuildDiffers(
|
||||
int major1, int minor1, int build1,
|
||||
int major2, int minor2, int build2)
|
||||
{
|
||||
var v1 = new Version(major1, minor1, build1);
|
||||
var v2 = new Version(major2, minor2, build2);
|
||||
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1, 2, 3, 1, 3, 3)]
|
||||
[InlineData(1, 2, 3, 1, 0, 3)]
|
||||
public void CompareWithoutRevision_ShouldReturnFalse_WhenMinorDiffers(
|
||||
int major1, int minor1, int build1,
|
||||
int major2, int minor2, int build2)
|
||||
{
|
||||
var v1 = new Version(major1, minor1, build1);
|
||||
var v2 = new Version(major2, minor2, build2);
|
||||
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1, 2, 3, 2, 2, 3)]
|
||||
[InlineData(1, 2, 3, 0, 2, 3)]
|
||||
public void CompareWithoutRevision_ShouldReturnFalse_WhenMajorDiffers(
|
||||
int major1, int minor1, int build1,
|
||||
int major2, int minor2, int build2)
|
||||
{
|
||||
var v1 = new Version(major1, minor1, build1);
|
||||
var v2 = new Version(major2, minor2, build2);
|
||||
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
}
|
|
@ -3,6 +3,7 @@ using API.Entities;
|
|||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Tests.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
|
|
@ -1,178 +0,0 @@
|
|||
using API.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class BookSortTitlePrefixHelperTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("The Avengers", "Avengers")]
|
||||
[InlineData("A Game of Thrones", "Game of Thrones")]
|
||||
[InlineData("An American Tragedy", "American Tragedy")]
|
||||
public void TestEnglishPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("El Quijote", "Quijote")]
|
||||
[InlineData("La Casa de Papel", "Casa de Papel")]
|
||||
[InlineData("Los Miserables", "Miserables")]
|
||||
[InlineData("Las Vegas", "Vegas")]
|
||||
[InlineData("Un Mundo Feliz", "Mundo Feliz")]
|
||||
[InlineData("Una Historia", "Historia")]
|
||||
public void TestSpanishPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Le Petit Prince", "Petit Prince")]
|
||||
[InlineData("La Belle et la Bête", "Belle et la Bête")]
|
||||
[InlineData("Les Misérables", "Misérables")]
|
||||
[InlineData("Un Amour de Swann", "Amour de Swann")]
|
||||
[InlineData("Une Vie", "Vie")]
|
||||
[InlineData("Des Souris et des Hommes", "Souris et des Hommes")]
|
||||
public void TestFrenchPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Der Herr der Ringe", "Herr der Ringe")]
|
||||
[InlineData("Die Verwandlung", "Verwandlung")]
|
||||
[InlineData("Das Kapital", "Kapital")]
|
||||
[InlineData("Ein Sommernachtstraum", "Sommernachtstraum")]
|
||||
[InlineData("Eine Geschichte", "Geschichte")]
|
||||
public void TestGermanPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Il Nome della Rosa", "Nome della Rosa")]
|
||||
[InlineData("La Divina Commedia", "Divina Commedia")]
|
||||
[InlineData("Lo Hobbit", "Hobbit")]
|
||||
[InlineData("Gli Ultimi", "Ultimi")]
|
||||
[InlineData("Le Città Invisibili", "Città Invisibili")]
|
||||
[InlineData("Un Giorno", "Giorno")]
|
||||
[InlineData("Una Notte", "Notte")]
|
||||
public void TestItalianPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("O Alquimista", "Alquimista")]
|
||||
[InlineData("A Moreninha", "Moreninha")]
|
||||
[InlineData("Os Lusíadas", "Lusíadas")]
|
||||
[InlineData("As Meninas", "Meninas")]
|
||||
[InlineData("Um Defeito de Cor", "Defeito de Cor")]
|
||||
[InlineData("Uma História", "História")]
|
||||
public void TestPortuguesePrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("", "")] // Empty string returns empty
|
||||
[InlineData("Book", "Book")] // Single word, no change
|
||||
[InlineData("Avengers", "Avengers")] // No prefix, no change
|
||||
public void TestNoPrefixCases(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("The", "The")] // Just a prefix word alone
|
||||
[InlineData("A", "A")] // Just single letter prefix alone
|
||||
[InlineData("Le", "Le")] // French prefix alone
|
||||
public void TestPrefixWordAlone(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("THE AVENGERS", "AVENGERS")] // All caps
|
||||
[InlineData("the avengers", "avengers")] // All lowercase
|
||||
[InlineData("The AVENGERS", "AVENGERS")] // Mixed case
|
||||
[InlineData("tHe AvEnGeRs", "AvEnGeRs")] // Random case
|
||||
public void TestCaseInsensitivity(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Then Came You", "Then Came You")] // "The" + "n" = not a prefix
|
||||
[InlineData("And Then There Were None", "And Then There Were None")] // "An" + "d" = not a prefix
|
||||
[InlineData("Elsewhere", "Elsewhere")] // "El" + "sewhere" = not a prefix (no space)
|
||||
[InlineData("Lesson Plans", "Lesson Plans")] // "Les" + "son" = not a prefix (no space)
|
||||
[InlineData("Theory of Everything", "Theory of Everything")] // "The" + "ory" = not a prefix
|
||||
public void TestFalsePositivePrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("The ", "The ")] // Prefix with only space after - returns original
|
||||
[InlineData("La ", "La ")] // Same for other languages
|
||||
[InlineData("El ", "El ")] // Same for Spanish
|
||||
public void TestPrefixWithOnlySpaceAfter(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("The Multiple Spaces", " Multiple Spaces")] // Doesn't trim extra spaces from remainder
|
||||
[InlineData("Le Petit Prince", " Petit Prince")] // Leading space preserved in remainder
|
||||
public void TestSpaceHandling(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("The The Matrix", "The Matrix")] // Removes first "The", leaves second
|
||||
[InlineData("A A Clockwork Orange", "A Clockwork Orange")] // Removes first "A", leaves second
|
||||
[InlineData("El El Cid", "El Cid")] // Spanish version
|
||||
public void TestRepeatedPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("L'Étranger", "L'Étranger")] // French contraction - no space, no change
|
||||
[InlineData("D'Artagnan", "D'Artagnan")] // Contraction - no space, no change
|
||||
[InlineData("The-Matrix", "The-Matrix")] // Hyphen instead of space - no change
|
||||
[InlineData("The.Avengers", "The.Avengers")] // Period instead of space - no change
|
||||
public void TestNonSpaceSeparators(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("三国演义", "三国演义")] // Chinese - no processing due to CJK detection
|
||||
[InlineData("한국어", "한국어")] // Korean - not in CJK range, would be processed normally
|
||||
public void TestCjkLanguages(string inputString, string expected)
|
||||
{
|
||||
// NOTE: These don't do anything, I am waiting for user input on if these are needed
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("नमस्ते दुनिया", "नमस्ते दुनिया")] // Hindi - not CJK, processed normally
|
||||
[InlineData("مرحبا بالعالم", "مرحبا بالعالم")] // Arabic - not CJK, processed normally
|
||||
[InlineData("שלום עולם", "שלום עולם")] // Hebrew - not CJK, processed normally
|
||||
public void TestNonLatinNonCjkScripts(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("в мире", "мире")] // Russian "в" (in) - should be removed
|
||||
[InlineData("на столе", "столе")] // Russian "на" (on) - should be removed
|
||||
[InlineData("с друзьями", "друзьями")] // Russian "с" (with) - should be removed
|
||||
public void TestRussianPrefixes(string inputString, string expected)
|
||||
{
|
||||
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
|
@ -10,9 +11,9 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class CacheHelperTests: AbstractFsTest
|
||||
public class CacheHelperTests
|
||||
{
|
||||
private static readonly string TestCoverImageDirectory = Root;
|
||||
private const string TestCoverImageDirectory = @"c:\";
|
||||
private const string TestCoverImageFile = "thumbnail.jpg";
|
||||
private readonly string _testCoverPath = Path.Join(TestCoverImageDirectory, TestCoverImageFile);
|
||||
private const string TestCoverArchive = @"file in folder.zip";
|
||||
|
@ -36,29 +37,24 @@ public class CacheHelperTests: AbstractFsTest
|
|||
|
||||
[Theory]
|
||||
[InlineData("", false)]
|
||||
[InlineData("C:/", false)]
|
||||
[InlineData(null, false)]
|
||||
public void CoverImageExists_DoesFileExist(string coverImage, bool exists)
|
||||
{
|
||||
Assert.Equal(exists, _cacheHelper.CoverImageExists(coverImage));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CoverImageExists_DoesFileExistRoot()
|
||||
{
|
||||
Assert.False(_cacheHelper.CoverImageExists(Root));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CoverImageExists_FileExists()
|
||||
{
|
||||
Assert.True(_cacheHelper.CoverImageExists(Path.Join(TestCoverImageDirectory, TestCoverArchive)));
|
||||
Assert.True(_cacheHelper.CoverImageExists(TestCoverArchive));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldUpdateCoverImage_OnFirstRun()
|
||||
{
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.True(_cacheHelper.ShouldUpdateCoverImage(null, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -69,7 +65,7 @@ public class CacheHelperTests: AbstractFsTest
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -80,7 +76,7 @@ public class CacheHelperTests: AbstractFsTest
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked_2()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now,
|
||||
|
@ -91,7 +87,7 @@ public class CacheHelperTests: AbstractFsTest
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -102,7 +98,7 @@ public class CacheHelperTests: AbstractFsTest
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked_Modified()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -126,7 +122,7 @@ public class CacheHelperTests: AbstractFsTest
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var created = DateTime.Now.Subtract(TimeSpan.FromHours(1));
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(1)))
|
||||
.Build();
|
||||
|
||||
|
@ -137,10 +133,9 @@ public class CacheHelperTests: AbstractFsTest
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceCreated()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime =now,
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -152,12 +147,12 @@ public class CacheHelperTests: AbstractFsTest
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithCreated(now.DateTime)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
}
|
||||
|
@ -165,10 +160,9 @@ public class CacheHelperTests: AbstractFsTest
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = now,
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -180,12 +174,12 @@ public class CacheHelperTests: AbstractFsTest
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithCreated(now.DateTime)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
|
||||
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
|
@ -194,10 +188,9 @@ public class CacheHelperTests: AbstractFsTest
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified_ForceUpdate()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = now.DateTime,
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -209,12 +202,12 @@ public class CacheHelperTests: AbstractFsTest
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithCreated(now.DateTime)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file));
|
||||
}
|
||||
|
@ -222,11 +215,10 @@ public class CacheHelperTests: AbstractFsTest
|
|||
[Fact]
|
||||
public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = now.DateTime,
|
||||
CreationTime = now.DateTime
|
||||
LastWriteTime = DateTimeOffset.Now,
|
||||
CreationTime = DateTimeOffset.Now
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -242,8 +234,8 @@ public class CacheHelperTests: AbstractFsTest
|
|||
.WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
}
|
||||
|
@ -251,10 +243,9 @@ public class CacheHelperTests: AbstractFsTest
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan_ButLastModifiedSame()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime =now.DateTime
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -271,7 +262,7 @@ public class CacheHelperTests: AbstractFsTest
|
|||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.Build();
|
||||
|
||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
|
|
128
API.Tests/Helpers/GenreHelperTests.cs
Normal file
|
@ -0,0 +1,128 @@
|
|||
using System.Collections.Generic;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class GenreHelperTests
|
||||
{
|
||||
[Fact]
|
||||
public void UpdateGenre_ShouldAddNewGenre()
|
||||
{
|
||||
var allGenres = new Dictionary<string, Genre>
|
||||
{
|
||||
{"Action".ToNormalized(), new GenreBuilder("Action").Build()},
|
||||
{"Sci-fi".ToNormalized(), new GenreBuilder("Sci-fi").Build()}
|
||||
};
|
||||
var genreAdded = new List<Genre>();
|
||||
var addedCount = 0;
|
||||
|
||||
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Adventure"}, (genre, isNew) =>
|
||||
{
|
||||
if (isNew)
|
||||
{
|
||||
addedCount++;
|
||||
}
|
||||
genreAdded.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Equal(2, genreAdded.Count);
|
||||
Assert.Equal(1, addedCount);
|
||||
Assert.Equal(3, allGenres.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdateGenre_ShouldNotAddDuplicateGenre()
|
||||
{
|
||||
var allGenres = new Dictionary<string, Genre>
|
||||
{
|
||||
{"Action".ToNormalized(), new GenreBuilder("Action").Build()},
|
||||
{"Sci-fi".ToNormalized(), new GenreBuilder("Sci-fi").Build()}
|
||||
};
|
||||
var genreAdded = new List<Genre>();
|
||||
var addedCount = 0;
|
||||
|
||||
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Scifi"}, (genre, isNew) =>
|
||||
{
|
||||
if (isNew)
|
||||
{
|
||||
addedCount++;
|
||||
}
|
||||
genreAdded.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Equal(0, addedCount);
|
||||
Assert.Equal(2, genreAdded.Count);
|
||||
Assert.Equal(2, allGenres.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddGenre_ShouldAddOnlyNonExistingGenre()
|
||||
{
|
||||
var existingGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
|
||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Action").Build());
|
||||
Assert.Equal(3, existingGenres.Count);
|
||||
|
||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("action").Build());
|
||||
Assert.Equal(3, existingGenres.Count);
|
||||
|
||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Shonen").Build());
|
||||
Assert.Equal(4, existingGenres.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void KeepOnlySamePeopleBetweenLists()
|
||||
{
|
||||
var existingGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
};
|
||||
|
||||
var genreRemoved = new List<Genre>();
|
||||
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
|
||||
peopleFromChapters, genre =>
|
||||
{
|
||||
genreRemoved.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Single(genreRemoved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemoveEveryoneIfNothingInRemoveAllExcept()
|
||||
{
|
||||
var existingGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Genre>();
|
||||
|
||||
var genreRemoved = new List<Genre>();
|
||||
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
|
||||
peopleFromChapters, genre =>
|
||||
{
|
||||
genreRemoved.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Equal(2, genreRemoved.Count);
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
using API.DTOs.Koreader;
|
||||
using API.DTOs.Progress;
|
||||
using API.Helpers;
|
||||
using System.Runtime.CompilerServices;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
|
||||
public class KoreaderHelperTests
|
||||
{
|
||||
|
||||
[Theory]
|
||||
[InlineData("/body/DocFragment[11]/body/div/a", 10, null)]
|
||||
[InlineData("/body/DocFragment[1]/body/div/p[40]", 0, 40)]
|
||||
[InlineData("/body/DocFragment[8]/body/div/p[28]/text().264", 7, 28)]
|
||||
public void GetEpubPositionDto(string koreaderPosition, int page, int? pNumber)
|
||||
{
|
||||
var expected = EmptyProgressDto();
|
||||
expected.BookScrollId = pNumber.HasValue ? $"//html[1]/BODY/APP-ROOT[1]/DIV[1]/DIV[1]/DIV[1]/APP-BOOK-READER[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/DIV[1]/P[{pNumber}]" : null;
|
||||
expected.PageNum = page;
|
||||
var actual = EmptyProgressDto();
|
||||
|
||||
KoreaderHelper.UpdateProgressDto(actual, koreaderPosition);
|
||||
Assert.Equal(expected.BookScrollId, actual.BookScrollId);
|
||||
Assert.Equal(expected.PageNum, actual.PageNum);
|
||||
}
|
||||
|
||||
|
||||
[Theory]
|
||||
[InlineData("//html[1]/BODY/APP-ROOT[1]/DIV[1]/DIV[1]/DIV[1]/APP-BOOK-READER[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/DIV[1]/P[20]", 5, "/body/DocFragment[6]/body/div/p[20]")]
|
||||
[InlineData(null, 10, "/body/DocFragment[11]/body/div/a")]
|
||||
public void GetKoreaderPosition(string scrollId, int page, string koreaderPosition)
|
||||
{
|
||||
var given = EmptyProgressDto();
|
||||
given.BookScrollId = scrollId;
|
||||
given.PageNum = page;
|
||||
|
||||
Assert.Equal(koreaderPosition, KoreaderHelper.GetKoreaderPosition(given));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("./Data/AesopsFables.epub", "8795ACA4BF264B57C1EEDF06A0CEE688")]
|
||||
public void GetKoreaderHash(string filePath, string hash)
|
||||
{
|
||||
Assert.Equal(KoreaderHelper.HashContents(filePath), hash);
|
||||
}
|
||||
|
||||
private ProgressDto EmptyProgressDto()
|
||||
{
|
||||
return new ProgressDto
|
||||
{
|
||||
ChapterId = 0,
|
||||
PageNum = 0,
|
||||
VolumeId = 0,
|
||||
SeriesId = 0,
|
||||
LibraryId = 0
|
||||
};
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Entities;
|
||||
using API.Helpers;
|
||||
|
@ -50,14 +49,17 @@ public class OrderableHelperTests
|
|||
[Fact]
|
||||
public void ReorderItems_InvalidPosition_NoChange()
|
||||
{
|
||||
// Arrange
|
||||
var items = new List<AppUserSideNavStream>
|
||||
{
|
||||
new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
|
||||
new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
|
||||
};
|
||||
|
||||
// Act
|
||||
OrderableHelper.ReorderItems(items, 2, 3); // Position 3 is out of range
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
|
||||
Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
|
||||
}
|
||||
|
@ -78,6 +80,7 @@ public class OrderableHelperTests
|
|||
[Fact]
|
||||
public void ReorderItems_DoubleMove()
|
||||
{
|
||||
// Arrange
|
||||
var items = new List<AppUserSideNavStream>
|
||||
{
|
||||
new AppUserSideNavStream { Id = 1, Order = 0, Name = "0" },
|
||||
|
@ -91,6 +94,7 @@ public class OrderableHelperTests
|
|||
// Move 4 -> 1
|
||||
OrderableHelper.ReorderItems(items, 5, 1);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, items[0].Id);
|
||||
Assert.Equal(0, items[0].Order);
|
||||
Assert.Equal(5, items[1].Id);
|
||||
|
@ -105,98 +109,4 @@ public class OrderableHelperTests
|
|||
|
||||
Assert.Equal("034125", string.Join("", items.Select(s => s.Name)));
|
||||
}
|
||||
|
||||
private static List<ReadingListItem> CreateTestReadingListItems(int count = 4)
|
||||
{
|
||||
var items = new List<ReadingListItem>();
|
||||
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
items.Add(new ReadingListItem() { Id = i + 1, Order = count, ReadingListId = i + 1});
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToBeginning_CorrectOrder()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 3, 0);
|
||||
|
||||
Assert.Equal(3, items[0].Id);
|
||||
Assert.Equal(1, items[1].Id);
|
||||
Assert.Equal(2, items[2].Id);
|
||||
Assert.Equal(4, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToEnd_CorrectOrder()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 1, 3);
|
||||
|
||||
Assert.Equal(2, items[0].Id);
|
||||
Assert.Equal(3, items[1].Id);
|
||||
Assert.Equal(4, items[2].Id);
|
||||
Assert.Equal(1, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToMiddle_CorrectOrder()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 4, 2);
|
||||
|
||||
Assert.Equal(1, items[0].Id);
|
||||
Assert.Equal(2, items[1].Id);
|
||||
Assert.Equal(4, items[2].Id);
|
||||
Assert.Equal(3, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToOutOfBoundsPosition_MovesToEnd()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 2, 10);
|
||||
|
||||
Assert.Equal(1, items[0].Id);
|
||||
Assert.Equal(3, items[1].Id);
|
||||
Assert.Equal(4, items[2].Id);
|
||||
Assert.Equal(2, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_NegativePosition_ThrowsArgumentException()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
OrderableHelper.ReorderItems(items, 2, -1)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
using System.Collections.Generic;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner;
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.DTOs;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
|
@ -8,219 +11,405 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class PersonHelperTests : AbstractDbTest
|
||||
public class PersonHelperTests
|
||||
{
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Series.RemoveRange(Context.Series.ToList());
|
||||
Context.Person.RemoveRange(Context.Person.ToList());
|
||||
Context.Library.RemoveRange(Context.Library.ToList());
|
||||
Context.Series.RemoveRange(Context.Series.ToList());
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
// 1. Test adding new people and keeping existing ones
|
||||
#region UpdatePeople
|
||||
[Fact]
|
||||
public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
|
||||
public void UpdatePeople_ShouldAddNewPeople()
|
||||
{
|
||||
await ResetDb();
|
||||
var allPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleAdded = new List<Person>();
|
||||
|
||||
var library = new LibraryBuilder("My Library")
|
||||
.Build();
|
||||
PersonHelper.UpdatePeople(allPeople, new[] {"Joseph Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
||||
{
|
||||
peopleAdded.Add(person);
|
||||
});
|
||||
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var existingPerson = new PersonBuilder("Joe Shmo").Build();
|
||||
var chapter = new ChapterBuilder("1").Build();
|
||||
|
||||
// Create an existing person and assign them to the series with a role
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithPerson(existingPerson, PersonRole.Editor)
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
|
||||
.Build();
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Call UpdateChapterPeopleAsync with one existing and one new person
|
||||
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo", "New Person" }, PersonRole.Editor, UnitOfWork);
|
||||
|
||||
// Assert existing person retained and new person added
|
||||
var people = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Contains(people, p => p.Name == "Joe Shmo");
|
||||
Assert.Contains(people, p => p.Name == "New Person");
|
||||
|
||||
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||
Assert.Contains("Joe Shmo", chapterPeople);
|
||||
Assert.Contains("New Person", chapterPeople);
|
||||
}
|
||||
|
||||
// 2. Test removing a person no longer in the list
|
||||
[Fact]
|
||||
public async Task UpdateChapterPeopleAsync_RemovePeople()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var library = new LibraryBuilder("My Library")
|
||||
.Build();
|
||||
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
|
||||
var existingPerson2 = new PersonBuilder("Jane Doe").Build();
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithPerson(existingPerson1, PersonRole.Editor)
|
||||
.WithPerson(existingPerson2, PersonRole.Editor)
|
||||
.Build();
|
||||
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(chapter)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Call UpdateChapterPeopleAsync with only one person
|
||||
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
|
||||
|
||||
// PersonHelper does not remove the Person from the global DbSet itself
|
||||
await UnitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
|
||||
|
||||
var people = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
|
||||
|
||||
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||
Assert.Contains("Joe Shmo", chapterPeople);
|
||||
Assert.DoesNotContain("Jane Doe", chapterPeople);
|
||||
}
|
||||
|
||||
// 3. Test no changes when the list of people is the same
|
||||
[Fact]
|
||||
public async Task UpdateChapterPeopleAsync_NoChanges()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var library = new LibraryBuilder("My Library")
|
||||
.Build();
|
||||
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var existingPerson = new PersonBuilder("Joe Shmo").Build();
|
||||
var chapter = new ChapterBuilder("1").WithPerson(existingPerson, PersonRole.Editor).Build();
|
||||
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(chapter)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Call UpdateChapterPeopleAsync with the same list
|
||||
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
|
||||
|
||||
var people = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Contains(people, p => p.Name == "Joe Shmo");
|
||||
|
||||
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||
Assert.Contains("Joe Shmo", chapterPeople);
|
||||
Assert.Single(chapter.People); // No duplicate entries
|
||||
}
|
||||
|
||||
// 4. Test multiple roles for a person
|
||||
[Fact]
|
||||
public async Task UpdateChapterPeopleAsync_MultipleRoles()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var library = new LibraryBuilder("My Library")
|
||||
.Build();
|
||||
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var person = new PersonBuilder("Joe Shmo").Build();
|
||||
var chapter = new ChapterBuilder("1").WithPerson(person, PersonRole.Writer).Build();
|
||||
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(chapter)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Add same person as Editor
|
||||
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
|
||||
|
||||
// Ensure that the same person is assigned with two roles
|
||||
var chapterPeople = chapter
|
||||
.People
|
||||
.Where(cp =>
|
||||
cp.Person.Name == "Joe Shmo")
|
||||
.ToList();
|
||||
Assert.Equal(2, chapterPeople.Count); // One for each role
|
||||
Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Writer);
|
||||
Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Editor);
|
||||
Assert.Equal(2, peopleAdded.Count);
|
||||
Assert.Equal(4, allPeople.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateChapterPeopleAsync_MatchOnAlias_NoChanges()
|
||||
public void UpdatePeople_ShouldNotAddDuplicatePeople()
|
||||
{
|
||||
await ResetDb();
|
||||
var allPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Sally Ann", PersonRole.CoverArtist).Build(),
|
||||
|
||||
var library = new LibraryBuilder("My Library")
|
||||
.Build();
|
||||
};
|
||||
var peopleAdded = new List<Person>();
|
||||
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
PersonHelper.UpdatePeople(allPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.CoverArtist, person =>
|
||||
{
|
||||
peopleAdded.Add(person);
|
||||
});
|
||||
|
||||
var person = new PersonBuilder("Joe Doe")
|
||||
.WithAlias("Jonny Doe")
|
||||
.Build();
|
||||
Assert.Equal(3, allPeople.Count);
|
||||
}
|
||||
#endregion
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithPerson(person, PersonRole.Editor)
|
||||
.Build();
|
||||
#region UpdatePeopleList
|
||||
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(chapter)
|
||||
.Build())
|
||||
.Build();
|
||||
[Fact]
|
||||
public void UpdatePeopleList_NullTags_NoChanges()
|
||||
{
|
||||
// Arrange
|
||||
ICollection<PersonDto> tags = null;
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var allTags = new List<Person>();
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
await UnitOfWork.CommitAsync();
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Writer, tags, series, allTags, p => handleAddCalled = true, () => onModifiedCalled = true);
|
||||
|
||||
// Add on Name
|
||||
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Doe" }, PersonRole.Editor, UnitOfWork);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Single(allPeople);
|
||||
|
||||
// Add on alias
|
||||
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Jonny Doe" }, PersonRole.Editor, UnitOfWork);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Single(allPeople);
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.False(onModifiedCalled);
|
||||
}
|
||||
|
||||
// TODO: Unit tests for series
|
||||
[Fact]
|
||||
public void UpdatePeopleList_AddNewTag_TagAddedAndOnModifiedCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>
|
||||
{
|
||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
||||
};
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var allTags = new List<Person>();
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.True(handleAddCalled);
|
||||
Assert.True(onModifiedCalled);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_RemoveExistingTag_TagRemovedAndOnModifiedCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>();
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var person = new PersonBuilder("John Doe", role).Build();
|
||||
person.Id = 1;
|
||||
series.Metadata.People.Add(person);
|
||||
var allTags = new List<Person>
|
||||
{
|
||||
person
|
||||
};
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.True(onModifiedCalled);
|
||||
Assert.Empty(series.Metadata.People);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_UpdateExistingTag_OnModifiedCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>
|
||||
{
|
||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
||||
};
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var person = new PersonBuilder("John Doe", role).Build();
|
||||
person.Id = 1;
|
||||
series.Metadata.People.Add(person);
|
||||
var allTags = new List<Person>
|
||||
{
|
||||
person
|
||||
};
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.False(onModifiedCalled);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_NoChanges_HandleAddAndOnModifiedNotCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>
|
||||
{
|
||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
||||
};
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var person = new PersonBuilder("John Doe", role).Build();
|
||||
person.Id = 1;
|
||||
series.Metadata.People.Add(person);
|
||||
var allTags = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Doe", role).Build()
|
||||
};
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.False(onModifiedCalled);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region RemovePeople
|
||||
[Fact]
|
||||
public void RemovePeople_ShouldRemovePeopleOfSameRole()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
||||
Assert.Single(peopleRemoved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemovePeople_ShouldRemovePeopleFromBothRoles()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
||||
Assert.Single(peopleRemoved);
|
||||
|
||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo"}, PersonRole.CoverArtist, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.Empty(existingPeople);
|
||||
Assert.Equal(2, peopleRemoved.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemovePeople_ShouldRemovePeopleOfSameRole_WhenNothingPassed()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.RemovePeople(existingPeople, new List<string>(), PersonRole.Writer, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
||||
Assert.Equal(2, peopleRemoved.Count);
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region KeepOnlySamePeopleBetweenLists
|
||||
[Fact]
|
||||
public void KeepOnlySamePeopleBetweenLists()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Sally", PersonRole.Writer).Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
};
|
||||
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.KeepOnlySamePeopleBetweenLists(existingPeople,
|
||||
peopleFromChapters, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.Equal(2, peopleRemoved.Count);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region AddPeople
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>();
|
||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Single(metadataPeople);
|
||||
Assert.Contains(person, metadataPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonAlreadyExists()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Smith", PersonRole.Character)
|
||||
.WithId(1)
|
||||
.Build()
|
||||
};
|
||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Single(metadataPeople);
|
||||
Assert.NotNull(metadataPeople.SingleOrDefault(p =>
|
||||
p.Name.Equals(person.Name) && p.Role == person.Role && p.NormalizedName == person.NormalizedName));
|
||||
Assert.Equal(1, metadataPeople.First().Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonNameIsNullOrEmpty()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>();
|
||||
var person2 = new PersonBuilder(string.Empty, PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person2);
|
||||
|
||||
// Assert
|
||||
Assert.Empty(metadataPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsDifferentButRoleIsSame()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Smith", PersonRole.Character).Build()
|
||||
};
|
||||
var person = new PersonBuilder("John Doe", PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, metadataPeople.Count);
|
||||
Assert.Contains(person, metadataPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsSameButRoleIsDifferent()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Doe", PersonRole.Writer).Build()
|
||||
};
|
||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, metadataPeople.Count);
|
||||
Assert.Contains(person, metadataPeople);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
[Fact]
|
||||
public void AddPeople_ShouldAddOnlyNonExistingPeople()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Sally", PersonRole.Writer).Build(),
|
||||
};
|
||||
|
||||
|
||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build());
|
||||
Assert.Equal(3, existingPeople.Count);
|
||||
|
||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.Writer).Build());
|
||||
Assert.Equal(3, existingPeople.Count);
|
||||
|
||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo Two", PersonRole.CoverArtist).Build());
|
||||
Assert.Equal(4, existingPeople.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
||||
|
|
|
@ -1,124 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class RandfHelper
|
||||
{
|
||||
private static readonly Random Random = new ();
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if all simple fields are equal
|
||||
/// </summary>
|
||||
/// <param name="obj1"></param>
|
||||
/// <param name="obj2"></param>
|
||||
/// <param name="ignoreFields">fields to ignore, note that the names are very weird sometimes</param>
|
||||
/// <returns></returns>
|
||||
/// <exception cref="ArgumentNullException"></exception>
|
||||
/// <exception cref="ArgumentException"></exception>
|
||||
public static bool AreSimpleFieldsEqual(object obj1, object obj2, IList<string> ignoreFields)
|
||||
{
|
||||
if (obj1 == null || obj2 == null)
|
||||
throw new ArgumentNullException("Neither object can be null.");
|
||||
|
||||
Type type1 = obj1.GetType();
|
||||
Type type2 = obj2.GetType();
|
||||
|
||||
if (type1 != type2)
|
||||
throw new ArgumentException("Objects must be of the same type.");
|
||||
|
||||
FieldInfo[] fields = type1.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.NonPublic);
|
||||
|
||||
foreach (var field in fields)
|
||||
{
|
||||
if (field.IsInitOnly) continue;
|
||||
if (ignoreFields.Contains(field.Name)) continue;
|
||||
|
||||
Type fieldType = field.FieldType;
|
||||
|
||||
if (IsRelevantType(fieldType))
|
||||
{
|
||||
object value1 = field.GetValue(obj1);
|
||||
object value2 = field.GetValue(obj2);
|
||||
|
||||
if (!Equals(value1, value2))
|
||||
{
|
||||
throw new ArgumentException("Fields must be of the same type: " + field.Name + " was " + value1 + " and " + value2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool IsRelevantType(Type type)
|
||||
{
|
||||
return type.IsPrimitive
|
||||
|| type == typeof(string)
|
||||
|| type.IsEnum;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets all simple fields of the given object to a random value
|
||||
/// </summary>
|
||||
/// <param name="obj"></param>
|
||||
/// <remarks>Simple is, primitive, string, or enum</remarks>
|
||||
/// <exception cref="ArgumentNullException"></exception>
|
||||
public static void SetRandomValues(object obj)
|
||||
{
|
||||
if (obj == null) throw new ArgumentNullException(nameof(obj));
|
||||
|
||||
Type type = obj.GetType();
|
||||
FieldInfo[] fields = type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic);
|
||||
|
||||
foreach (var field in fields)
|
||||
{
|
||||
if (field.IsInitOnly) continue; // Skip readonly fields
|
||||
|
||||
object value = GenerateRandomValue(field.FieldType);
|
||||
if (value != null)
|
||||
{
|
||||
field.SetValue(obj, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static object GenerateRandomValue(Type type)
|
||||
{
|
||||
if (type == typeof(int))
|
||||
return Random.Next();
|
||||
if (type == typeof(float))
|
||||
return (float)Random.NextDouble() * 100;
|
||||
if (type == typeof(double))
|
||||
return Random.NextDouble() * 100;
|
||||
if (type == typeof(bool))
|
||||
return Random.Next(2) == 1;
|
||||
if (type == typeof(char))
|
||||
return (char)Random.Next('A', 'Z' + 1);
|
||||
if (type == typeof(byte))
|
||||
return (byte)Random.Next(0, 256);
|
||||
if (type == typeof(short))
|
||||
return (short)Random.Next(short.MinValue, short.MaxValue);
|
||||
if (type == typeof(long))
|
||||
return (long)(Random.NextDouble() * long.MaxValue);
|
||||
if (type == typeof(string))
|
||||
return GenerateRandomString(10);
|
||||
if (type.IsEnum)
|
||||
{
|
||||
var values = Enum.GetValues(type);
|
||||
return values.GetValue(Random.Next(values.Length));
|
||||
}
|
||||
|
||||
// Unsupported type
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string GenerateRandomString(int length)
|
||||
{
|
||||
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
return new string(Enumerable.Repeat(chars, length)
|
||||
.Select(s => s[Random.Next(s.Length)]).ToArray());
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using API.Helpers;
|
||||
using Xunit;
|
||||
|
||||
|
@ -34,7 +33,7 @@ public class RateLimiterTests
|
|||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireTokens_Refill()
|
||||
public void AcquireTokens_Refill()
|
||||
{
|
||||
// Arrange
|
||||
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
|
||||
|
@ -44,14 +43,14 @@ public class RateLimiterTests
|
|||
limiter.TryAcquire("test_key");
|
||||
|
||||
// Wait for refill
|
||||
await Task.Delay(1100);
|
||||
System.Threading.Thread.Sleep(1100);
|
||||
|
||||
// Assert
|
||||
Assert.True(limiter.TryAcquire("test_key"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireTokens_Refill_WithOff()
|
||||
public void AcquireTokens_Refill_WithOff()
|
||||
{
|
||||
// Arrange
|
||||
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
|
||||
|
@ -61,7 +60,7 @@ public class RateLimiterTests
|
|||
limiter.TryAcquire("test_key");
|
||||
|
||||
// Wait for refill
|
||||
await Task.Delay(2100);
|
||||
System.Threading.Thread.Sleep(2100);
|
||||
|
||||
// Assert
|
||||
Assert.False(limiter.TryAcquire("test_key"));
|
||||
|
|
|
@ -1,258 +0,0 @@
|
|||
using API.Helpers;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Xunit;
|
||||
using API.DTOs.SeriesDetail;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class ReviewHelperTests
|
||||
{
|
||||
#region SelectSpectrumOfReviews Tests
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WhenLessThan10Reviews_ReturnsAllReviews()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(8);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(8, result.Count);
|
||||
Assert.Equal(reviews, result.OrderByDescending(r => r.Score));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WhenMoreThan10Reviews_Returns10Reviews()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(20);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(10, result.Count);
|
||||
Assert.Equal(reviews[0], result.First());
|
||||
Assert.Equal(reviews[19], result.Last());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WithExactly10Reviews_ReturnsAllReviews()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(10);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(10, result.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WithLargeNumberOfReviews_ReturnsCorrectSpectrum()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(100);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(10, result.Count);
|
||||
Assert.Contains(reviews[0], result);
|
||||
Assert.Contains(reviews[1], result);
|
||||
Assert.Contains(reviews[98], result);
|
||||
Assert.Contains(reviews[99], result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WithEmptyList_ReturnsEmptyList()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = new List<UserReviewDto>();
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_ResultsOrderedByScoreDescending()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = new List<UserReviewDto>
|
||||
{
|
||||
new UserReviewDto { Tagline = "1", Score = 3 },
|
||||
new UserReviewDto { Tagline = "2", Score = 5 },
|
||||
new UserReviewDto { Tagline = "3", Score = 1 },
|
||||
new UserReviewDto { Tagline = "4", Score = 4 },
|
||||
new UserReviewDto { Tagline = "5", Score = 2 }
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(5, result.Count);
|
||||
Assert.Equal(5, result[0].Score);
|
||||
Assert.Equal(4, result[1].Score);
|
||||
Assert.Equal(3, result[2].Score);
|
||||
Assert.Equal(2, result[3].Score);
|
||||
Assert.Equal(1, result[4].Score);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetCharacters Tests
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithNullBody_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
string body = null;
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithEmptyBody_ReturnsEmptyString()
|
||||
{
|
||||
// Arrange
|
||||
var body = string.Empty;
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithNoTextNodes_ReturnsEmptyString()
|
||||
{
|
||||
// Arrange
|
||||
const string body = "<div></div>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithLessCharactersThanLimit_ReturnsFullText()
|
||||
{
|
||||
// Arrange
|
||||
var body = "<p>This is a short review.</p>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("This is a short review.…", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithMoreCharactersThanLimit_TruncatesText()
|
||||
{
|
||||
// Arrange
|
||||
var body = "<p>" + new string('a', 200) + "</p>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(new string('a', 175) + "…", result);
|
||||
Assert.Equal(176, result.Length); // 175 characters + ellipsis
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_IgnoresScriptTags()
|
||||
{
|
||||
// Arrange
|
||||
const string body = "<p>Visible text</p><script>console.log('hidden');</script>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("Visible text…", result);
|
||||
Assert.DoesNotContain("hidden", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_RemovesMarkdownSymbols()
|
||||
{
|
||||
// Arrange
|
||||
const string body = "<p>This is **bold** and _italic_ text with [link](url).</p>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("This is bold and italic text with link.…", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_HandlesComplexMarkdownAndHtml()
|
||||
{
|
||||
// Arrange
|
||||
const string body = """
|
||||
|
||||
<div>
|
||||
<h1># Header</h1>
|
||||
<p>This is ~~strikethrough~~ and __underlined__ text</p>
|
||||
<p>~~~code block~~~</p>
|
||||
<p>+++highlighted+++</p>
|
||||
<p>img123(image.jpg)</p>
|
||||
</div>
|
||||
""";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.DoesNotContain("~~", result);
|
||||
Assert.DoesNotContain("__", result);
|
||||
Assert.DoesNotContain("~~~", result);
|
||||
Assert.DoesNotContain("+++", result);
|
||||
Assert.DoesNotContain("img123(", result);
|
||||
Assert.Contains("Header", result);
|
||||
Assert.Contains("strikethrough", result);
|
||||
Assert.Contains("underlined", result);
|
||||
Assert.Contains("code block", result);
|
||||
Assert.Contains("highlighted", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static List<UserReviewDto> CreateReviewList(int count)
|
||||
{
|
||||
var reviews = new List<UserReviewDto>();
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
reviews.Add(new UserReviewDto
|
||||
{
|
||||
Tagline = $"{i + 1}",
|
||||
Score = count - i // This makes them ordered by score descending initially
|
||||
});
|
||||
}
|
||||
return reviews;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
|
@ -1,208 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading.Tasks;
|
||||
using System.Xml;
|
||||
using System.Xml.Serialization;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.Services.Tasks;
|
||||
using API.Services.Tasks.Metadata;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
#nullable enable
|
||||
|
||||
public class ScannerHelper
|
||||
{
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
|
||||
private readonly string _testcasesDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/TestCases");
|
||||
private readonly string _imagePath = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/1x1.png");
|
||||
private static readonly string[] ComicInfoExtensions = new[] { ".cbz", ".cbr", ".zip", ".rar" };
|
||||
|
||||
public ScannerHelper(IUnitOfWork unitOfWork, ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
public async Task<Library> GenerateScannerData(string testcase, Dictionary<string, ComicInfo> comicInfos = null)
|
||||
{
|
||||
var testDirectoryPath = await GenerateTestDirectory(Path.Join(_testcasesDirectory, testcase), comicInfos);
|
||||
|
||||
var (publisher, type) = SplitPublisherAndLibraryType(Path.GetFileNameWithoutExtension(testcase));
|
||||
|
||||
var library = new LibraryBuilder(publisher, type)
|
||||
.WithFolders([new FolderPath() {Path = testDirectoryPath}])
|
||||
.Build();
|
||||
|
||||
var admin = new AppUserBuilder("admin", "admin@kavita.com", Seed.DefaultThemes[0])
|
||||
.WithLibrary(library)
|
||||
.Build();
|
||||
|
||||
_unitOfWork.UserRepository.Add(admin); // Admin is needed for generating collections/reading lists
|
||||
_unitOfWork.LibraryRepository.Add(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
return library;
|
||||
}
|
||||
|
||||
public ScannerService CreateServices(DirectoryService ds = null, IFileSystem fs = null)
|
||||
{
|
||||
fs ??= new FileSystem();
|
||||
ds ??= new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var archiveService = new ArchiveService(Substitute.For<ILogger<ArchiveService>>(), ds,
|
||||
Substitute.For<IImageService>(), Substitute.For<IMediaErrorService>());
|
||||
var readingItemService = new ReadingItemService(archiveService, Substitute.For<IBookService>(),
|
||||
Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>());
|
||||
|
||||
|
||||
var processSeries = new ProcessSeries(_unitOfWork, Substitute.For<ILogger<ProcessSeries>>(),
|
||||
Substitute.For<IEventHub>(),
|
||||
ds, Substitute.For<ICacheHelper>(), readingItemService, new FileService(fs),
|
||||
Substitute.For<IMetadataService>(),
|
||||
Substitute.For<IWordCountAnalyzerService>(),
|
||||
Substitute.For<IReadingListService>(),
|
||||
Substitute.For<IExternalMetadataService>());
|
||||
|
||||
var scanner = new ScannerService(_unitOfWork, Substitute.For<ILogger<ScannerService>>(),
|
||||
Substitute.For<IMetadataService>(),
|
||||
Substitute.For<ICacheService>(), Substitute.For<IEventHub>(), ds,
|
||||
readingItemService, processSeries, Substitute.For<IWordCountAnalyzerService>());
|
||||
return scanner;
|
||||
}
|
||||
|
||||
private static (string Publisher, LibraryType Type) SplitPublisherAndLibraryType(string input)
|
||||
{
|
||||
// Split the input string based on " - "
|
||||
var parts = input.Split(" - ", StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
if (parts.Length != 2)
|
||||
{
|
||||
throw new ArgumentException("Input must be in the format 'Publisher - LibraryType'");
|
||||
}
|
||||
|
||||
var publisher = parts[0].Trim();
|
||||
var libraryTypeString = parts[1].Trim();
|
||||
|
||||
// Try to parse the right-hand side as a LibraryType enum
|
||||
if (!Enum.TryParse<LibraryType>(libraryTypeString, out var libraryType))
|
||||
{
|
||||
throw new ArgumentException($"'{libraryTypeString}' is not a valid LibraryType");
|
||||
}
|
||||
|
||||
return (publisher, libraryType);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private async Task<string> GenerateTestDirectory(string mapPath, Dictionary<string, ComicInfo> comicInfos = null)
|
||||
{
|
||||
// Read the map file
|
||||
var mapContent = await File.ReadAllTextAsync(mapPath);
|
||||
|
||||
// Deserialize the JSON content into a list of strings using System.Text.Json
|
||||
var filePaths = JsonSerializer.Deserialize<List<string>>(mapContent);
|
||||
|
||||
// Create a test directory
|
||||
var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(mapPath));
|
||||
if (Directory.Exists(testDirectory))
|
||||
{
|
||||
Directory.Delete(testDirectory, true);
|
||||
}
|
||||
Directory.CreateDirectory(testDirectory);
|
||||
|
||||
// Generate the files and folders
|
||||
await Scaffold(testDirectory, filePaths, comicInfos);
|
||||
|
||||
_testOutputHelper.WriteLine($"Test Directory Path: {testDirectory}");
|
||||
|
||||
return Path.GetFullPath(testDirectory);
|
||||
}
|
||||
|
||||
|
||||
public async Task Scaffold(string testDirectory, List<string> filePaths, Dictionary<string, ComicInfo> comicInfos = null)
|
||||
{
|
||||
foreach (var relativePath in filePaths)
|
||||
{
|
||||
var fullPath = Path.Combine(testDirectory, relativePath);
|
||||
var fileDir = Path.GetDirectoryName(fullPath);
|
||||
|
||||
// Create the directory if it doesn't exist
|
||||
if (!Directory.Exists(fileDir))
|
||||
{
|
||||
Directory.CreateDirectory(fileDir);
|
||||
Console.WriteLine($"Created directory: {fileDir}");
|
||||
}
|
||||
|
||||
var ext = Path.GetExtension(fullPath).ToLower();
|
||||
if (ComicInfoExtensions.Contains(ext) && comicInfos != null && comicInfos.TryGetValue(Path.GetFileName(relativePath), out var info))
|
||||
{
|
||||
CreateMinimalCbz(fullPath, info);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Create an empty file
|
||||
await File.Create(fullPath).DisposeAsync();
|
||||
Console.WriteLine($"Created empty file: {fullPath}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void CreateMinimalCbz(string filePath, ComicInfo? comicInfo = null)
|
||||
{
|
||||
using (var archive = ZipFile.Open(filePath, ZipArchiveMode.Create))
|
||||
{
|
||||
// Add the 1x1 image to the archive
|
||||
archive.CreateEntryFromFile(_imagePath, "1x1.png");
|
||||
|
||||
if (comicInfo != null)
|
||||
{
|
||||
// Serialize ComicInfo object to XML
|
||||
var comicInfoXml = SerializeComicInfoToXml(comicInfo);
|
||||
|
||||
// Create an entry for ComicInfo.xml in the archive
|
||||
var entry = archive.CreateEntry("ComicInfo.xml");
|
||||
using var entryStream = entry.Open();
|
||||
using var writer = new StreamWriter(entryStream, Encoding.UTF8);
|
||||
|
||||
// Write the XML to the archive
|
||||
writer.Write(comicInfoXml);
|
||||
}
|
||||
|
||||
}
|
||||
Console.WriteLine($"Created minimal CBZ archive: {filePath} with{(comicInfo != null ? "" : "out")} metadata.");
|
||||
}
|
||||
|
||||
|
||||
private static string SerializeComicInfoToXml(ComicInfo comicInfo)
|
||||
{
|
||||
var xmlSerializer = new XmlSerializer(typeof(ComicInfo));
|
||||
using var stringWriter = new StringWriter();
|
||||
using (var xmlWriter = XmlWriter.Create(stringWriter, new XmlWriterSettings { Indent = true, Encoding = new UTF8Encoding(false), OmitXmlDeclaration = false}))
|
||||
{
|
||||
xmlSerializer.Serialize(xmlWriter, comicInfo);
|
||||
}
|
||||
|
||||
// For the love of god, I spent 2 hours trying to get utf-8 with no BOM
|
||||
return stringWriter.ToString().Replace("""<?xml version="1.0" encoding="utf-16"?>""",
|
||||
@"<?xml version='1.0' encoding='utf-8'?>");
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
using API.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class StringHelperTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> <br><br><br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>",
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>"
|
||||
)]
|
||||
[InlineData(
|
||||
"<p><a href=\"https://blog.goo.ne.jp/tamakiya_web\">Blog</a> | <a href=\"https://twitter.com/tamakinozomu\">Twitter</a> | <a href=\"https://www.pixiv.net/member.php?id=68961\">Pixiv</a> | <a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>",
|
||||
"<p><a href=\"https://blog.goo.ne.jp/tamakiya_web\">Blog</a> | <a href=\"https://twitter.com/tamakinozomu\">Twitter</a> | <a href=\"https://www.pixiv.net/member.php?id=68961\">Pixiv</a> | <a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>"
|
||||
)]
|
||||
public void TestSquashBreaklines(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, StringHelper.SquashBreaklines(input));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> (Source: Anime News Network)</p>",
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
|
||||
)]
|
||||
[InlineData(
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>(Source: Anime News Network)",
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
|
||||
)]
|
||||
public void TestRemoveSourceInDescription(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
|
||||
}
|
||||
|
||||
|
||||
[Theory]
|
||||
[InlineData(
|
||||
"""<a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>""",
|
||||
"""<a href=\"https://pawoo.net/@tamakiya\">Pawoo</a></p>"""
|
||||
)]
|
||||
public void TestCorrectUrls(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, StringHelper.CorrectUrls(input));
|
||||
}
|
||||
}
|
128
API.Tests/Helpers/TagHelperTests.cs
Normal file
|
@ -0,0 +1,128 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class TagHelperTests
|
||||
{
|
||||
[Fact]
|
||||
public void UpdateTag_ShouldAddNewTag()
|
||||
{
|
||||
var allTags = new Dictionary<string, Tag>
|
||||
{
|
||||
{"Action".ToNormalized(), new TagBuilder("Action").Build()},
|
||||
{"Sci-fi".ToNormalized(), new TagBuilder("Sci-fi").Build()}
|
||||
};
|
||||
var tagCalled = new List<Tag>();
|
||||
var addedCount = 0;
|
||||
|
||||
TagHelper.UpdateTag(allTags, new[] {"Action", "Adventure"}, (tag, added) =>
|
||||
{
|
||||
if (added)
|
||||
{
|
||||
addedCount++;
|
||||
}
|
||||
tagCalled.Add(tag);
|
||||
});
|
||||
|
||||
Assert.Equal(1, addedCount);
|
||||
Assert.Equal(2, tagCalled.Count());
|
||||
Assert.Equal(3, allTags.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdateTag_ShouldNotAddDuplicateTag()
|
||||
{
|
||||
var allTags = new Dictionary<string, Tag>
|
||||
{
|
||||
{"Action".ToNormalized(), new TagBuilder("Action").Build()},
|
||||
{"Sci-fi".ToNormalized(), new TagBuilder("Sci-fi").Build()}
|
||||
};
|
||||
var tagCalled = new List<Tag>();
|
||||
var addedCount = 0;
|
||||
|
||||
TagHelper.UpdateTag(allTags, new[] {"Action", "Scifi"}, (tag, added) =>
|
||||
{
|
||||
if (added)
|
||||
{
|
||||
addedCount++;
|
||||
}
|
||||
tagCalled.Add(tag);
|
||||
});
|
||||
|
||||
Assert.Equal(2, allTags.Count);
|
||||
Assert.Equal(0, addedCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddTag_ShouldAddOnlyNonExistingTag()
|
||||
{
|
||||
var existingTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
|
||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Action").Build());
|
||||
Assert.Equal(3, existingTags.Count);
|
||||
|
||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("action").Build());
|
||||
Assert.Equal(3, existingTags.Count);
|
||||
|
||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Shonen").Build());
|
||||
Assert.Equal(4, existingTags.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void KeepOnlySamePeopleBetweenLists()
|
||||
{
|
||||
var existingTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
};
|
||||
|
||||
var tagRemoved = new List<Tag>();
|
||||
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
|
||||
peopleFromChapters, tag =>
|
||||
{
|
||||
tagRemoved.Add(tag);
|
||||
});
|
||||
|
||||
Assert.Single(tagRemoved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemoveEveryoneIfNothingInRemoveAllExcept()
|
||||
{
|
||||
var existingTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Tag>();
|
||||
|
||||
var tagRemoved = new List<Tag>();
|
||||
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
|
||||
peopleFromChapters, tag =>
|
||||
{
|
||||
tagRemoved.Add(tag);
|
||||
});
|
||||
|
||||
Assert.Equal(2, tagRemoved.Count);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
@ -9,54 +8,59 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class BasicParserTests : AbstractFsTest
|
||||
public class BasicParserTests
|
||||
{
|
||||
private readonly BasicParser _parser;
|
||||
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private readonly string _rootDirectory;
|
||||
private const string RootDirectory = "C:/Books/";
|
||||
|
||||
public BasicParserTests()
|
||||
{
|
||||
var fileSystem = CreateFileSystem();
|
||||
_rootDirectory = Path.Join(DataDirectory, "Books/");
|
||||
fileSystem.AddDirectory(_rootDirectory);
|
||||
fileSystem.AddFile($"{_rootDirectory}Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory("C:/Books/");
|
||||
fileSystem.AddFile("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
|
||||
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1 Chapter 2.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Chapter 3.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("$\"{RootDirectory}Accel World/Accel World Gaiden SP01.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Accel World/Accel World - Volume 1.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Accel World/Accel World - Volume 1 Chapter 2.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Accel World/Accel World - Chapter 3.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Accel World/Accel World Gaiden SP01.cbz", new MockFileData(""));
|
||||
|
||||
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/cover.png", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Accel World/cover.png", new MockFileData(""));
|
||||
|
||||
fileSystem.AddFile($"{_rootDirectory}Batman/Batman #1.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Batman/Batman #1.cbz", new MockFileData(""));
|
||||
|
||||
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||
_parser = new BasicParser(ds, new ImageParser(ds));
|
||||
}
|
||||
|
||||
#region Parse_Books
|
||||
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region Parse_Manga
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
|
||||
/// Tests that when there is a loose leaf cover in the manga library, that it is ignored
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Accel World/cover.png", $"{_rootDirectory}Accel World/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse(@"C:/Books/Accel World/cover.png", "C:/Books/Accel World/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
|
||||
/// Tests that when there is a loose leaf cover in the manga library, that it is ignored
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_OtherImage_ShouldReturnNull()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Accel World/page 01.png", $"{_rootDirectory}Accel World/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse(@"C:/Books/Accel World/page 01.png", "C:/Books/Accel World/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
}
|
||||
|
||||
|
@ -66,8 +70,8 @@ public class BasicParserTests : AbstractFsTest
|
|||
[Fact]
|
||||
public void Parse_MangaLibrary_VolumeAndChapterInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz", $"{_rootDirectory}Mujaki no Rakuen/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz", "C:/Books/Mujaki no Rakuen/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Mujaki no Rakuen", actual.Series);
|
||||
|
@ -82,9 +86,9 @@ public class BasicParserTests : AbstractFsTest
|
|||
[Fact]
|
||||
public void Parse_MangaLibrary_JustVolumeInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz",
|
||||
$"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz",
|
||||
"C:/Books/Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", actual.Series);
|
||||
|
@ -99,9 +103,9 @@ public class BasicParserTests : AbstractFsTest
|
|||
[Fact]
|
||||
public void Parse_MangaLibrary_JustChapterInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Beelzebub/Beelzebub_01_[Noodles].zip",
|
||||
$"{_rootDirectory}Beelzebub/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Beelzebub/Beelzebub_01_[Noodles].zip",
|
||||
"C:/Books/Beelzebub/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Beelzebub", actual.Series);
|
||||
|
@ -116,9 +120,9 @@ public class BasicParserTests : AbstractFsTest
|
|||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialMarkerInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr",
|
||||
$"{_rootDirectory}Summer Time Rendering/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr",
|
||||
"C:/Books/Summer Time Rendering/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Summer Time Rendering", actual.Series);
|
||||
|
@ -129,54 +133,18 @@ public class BasicParserTests : AbstractFsTest
|
|||
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when the filename parses as a special, it appropriately parses
|
||||
/// Tests that when the filename parses as a speical, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Volume SP01.cbr",
|
||||
$"{_rootDirectory}Summer Time Rendering/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Summer Time Rendering/Specials/Volume Omake.cbr",
|
||||
"C:/Books/Summer Time Rendering/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Summer Time Rendering", actual.Series);
|
||||
Assert.Equal("Volume", actual.Title);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when the filename parses as a special, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialInFilename2()
|
||||
{
|
||||
var actual = _parser.Parse("M:/Kimi wa Midara na Boku no Joou/Specials/[Renzokusei] Special 1 SP02.zip",
|
||||
"M:/Kimi wa Midara na Boku no Joou/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Kimi wa Midara na Boku no Joou", actual.Series);
|
||||
Assert.Equal("[Renzokusei] Special 1", actual.Title);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when the filename parses as a special, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialInFilename_StrangeNaming()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}My Dress-Up Darling/SP01 1. Special Name.cbz",
|
||||
_rootDirectory,
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("My Dress-Up Darling", actual.Series);
|
||||
Assert.Equal("1. Special Name", actual.Title);
|
||||
Assert.Equal("Volume Omake", actual.Title);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
|
@ -188,9 +156,9 @@ public class BasicParserTests : AbstractFsTest
|
|||
[Fact]
|
||||
public void Parse_MangaLibrary_EditionInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz",
|
||||
$"{_rootDirectory}Air Gear/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz",
|
||||
"C:/Books/Air Gear/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Air Gear", actual.Series);
|
||||
|
@ -209,9 +177,9 @@ public class BasicParserTests : AbstractFsTest
|
|||
[Fact]
|
||||
public void Parse_MangaBooks_JustVolumeInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Epubs/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub",
|
||||
$"{_rootDirectory}Epubs/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
var actual = _parser.Parse("C:/Books/Epubs/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub",
|
||||
"C:/Books/Epubs/",
|
||||
RootDirectory, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Harrison, Kim - The Good, The Bad, and the Undead - Hollows", actual.Series);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
|
|
@ -36,7 +36,7 @@ public class ComicVineParserTests
|
|||
public void Parse_SeriesWithComicInfo()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
|
||||
RootDirectory, LibraryType.ComicVine, true, new ComicInfo()
|
||||
RootDirectory, LibraryType.ComicVine, new ComicInfo()
|
||||
{
|
||||
Series = "Birds of Prey",
|
||||
Volume = "2002"
|
||||
|
@ -54,7 +54,7 @@ public class ComicVineParserTests
|
|||
public void Parse_SeriesWithDirectoryNameAsSeriesYear()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
|
||||
RootDirectory, LibraryType.ComicVine, true, null);
|
||||
RootDirectory, LibraryType.ComicVine, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey (2002)", actual.Series);
|
||||
|
@ -69,7 +69,7 @@ public class ComicVineParserTests
|
|||
public void Parse_SeriesWithADirectoryNameAsSeriesYear()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", "C:/Comics/DC Comics/",
|
||||
RootDirectory, LibraryType.ComicVine, true, null);
|
||||
RootDirectory, LibraryType.ComicVine, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey (1999)", actual.Series);
|
||||
|
@ -84,7 +84,7 @@ public class ComicVineParserTests
|
|||
public void Parse_FallbackToDirectoryNameOnly()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", "C:/Comics/DC Comics/",
|
||||
RootDirectory, LibraryType.ComicVine, true, null);
|
||||
RootDirectory, LibraryType.ComicVine, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Blood Syndicate", actual.Series);
|
||||
|
|
|
@ -33,7 +33,7 @@ public class DefaultParserTests
|
|||
[InlineData("C:/", "C:/Something Random/Mujaki no Rakuen SP01.cbz", "Something Random")]
|
||||
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
|
||||
{
|
||||
var actual = _defaultParser.Parse(inputPath, rootDir, rootDir, LibraryType.Manga, true, null);
|
||||
var actual = _defaultParser.Parse(inputPath, rootDir, rootDir, LibraryType.Manga, null);
|
||||
if (actual == null)
|
||||
{
|
||||
Assert.NotNull(actual);
|
||||
|
@ -74,7 +74,7 @@ public class DefaultParserTests
|
|||
fs.AddFile(inputFile, new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, true, null);
|
||||
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, null);
|
||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||
Assert.Equal(expectedParseInfo, actual.Series);
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ public class DefaultParserTests
|
|||
fs.AddFile(inputFile, new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, true, null);
|
||||
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, null);
|
||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||
Assert.Equal(expectedParseInfo, actual.Series);
|
||||
}
|
||||
|
@ -251,7 +251,7 @@ public class DefaultParserTests
|
|||
foreach (var file in expected.Keys)
|
||||
{
|
||||
var expectedInfo = expected[file];
|
||||
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Manga, true, null);
|
||||
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Manga, null);
|
||||
if (expectedInfo == null)
|
||||
{
|
||||
Assert.Null(actual);
|
||||
|
@ -289,7 +289,7 @@ public class DefaultParserTests
|
|||
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, true, null);
|
||||
var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -315,7 +315,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, true, null);
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -341,7 +341,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, true, null);
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -383,7 +383,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath
|
||||
};
|
||||
|
||||
var actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, true, null);
|
||||
var actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
|
@ -408,11 +408,11 @@ public class DefaultParserTests
|
|||
expected = new ParserInfo
|
||||
{
|
||||
Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
|
||||
Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
|
||||
Chapters = "50", Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
};
|
||||
|
||||
actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, true, null);
|
||||
actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expected.Format, actual.Format);
|
||||
|
@ -475,7 +475,7 @@ public class DefaultParserTests
|
|||
foreach (var file in expected.Keys)
|
||||
{
|
||||
var expectedInfo = expected[file];
|
||||
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Comic, true, null);
|
||||
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Comic, null);
|
||||
if (expectedInfo == null)
|
||||
{
|
||||
Assert.Null(actual);
|
||||
|
|
|
@ -34,7 +34,7 @@ public class ImageParserTests
|
|||
public void Parse_SeriesWithDirectoryName()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01/01.jpg", "C:/Comics/Birds of Prey/",
|
||||
RootDirectory, LibraryType.Image, true, null);
|
||||
RootDirectory, LibraryType.Image, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey", actual.Series);
|
||||
|
@ -48,7 +48,7 @@ public class ImageParserTests
|
|||
public void Parse_SeriesWithNoNestedChapter()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01 page 01.jpg", "C:/Comics/",
|
||||
RootDirectory, LibraryType.Image, true, null);
|
||||
RootDirectory, LibraryType.Image, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey", actual.Series);
|
||||
|
@ -62,7 +62,7 @@ public class ImageParserTests
|
|||
public void Parse_SeriesWithLooseImages()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey/page 01.jpg", "C:/Comics/",
|
||||
RootDirectory, LibraryType.Image, true, null);
|
||||
RootDirectory, LibraryType.Image, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey", actual.Series);
|
||||
|
|
|
@ -35,7 +35,7 @@ public class PdfParserTests
|
|||
{
|
||||
var actual = _parser.Parse("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf",
|
||||
"C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/",
|
||||
RootDirectory, LibraryType.Book, true, null);
|
||||
RootDirectory, LibraryType.Book, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("A Dictionary of Japanese Food - Ingredients and Culture", actual.Series);
|
||||
|
|
|
@ -21,4 +21,24 @@ public class BookParsingTests
|
|||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Book));
|
||||
}
|
||||
|
||||
// [Theory]
|
||||
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")]
|
||||
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")]
|
||||
// public void ReplaceFontSrcUrl(string input, string expected)
|
||||
// {
|
||||
// var apiBase = "TEST/";
|
||||
// var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
|
||||
// Assert.Equal(expected, actual);
|
||||
// }
|
||||
//
|
||||
// [Theory]
|
||||
// [InlineData("@import url('font.css');", "@import url('TEST/font.css');")]
|
||||
// public void ReplaceImportSrcUrl(string input, string expected)
|
||||
// {
|
||||
// var apiBase = "TEST/";
|
||||
// var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
|
||||
// Assert.Equal(expected, actual);
|
||||
// }
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
|
@ -51,15 +56,15 @@ public class ComicParsingTests
|
|||
[InlineData("Demon 012 (Sep 1973) c2c", "Demon")]
|
||||
[InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")]
|
||||
[InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")]
|
||||
[InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire Special - Adam Strange")]
|
||||
[InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis Extra - Rags Morales Sketches")]
|
||||
[InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")]
|
||||
[InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")]
|
||||
[InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")]
|
||||
[InlineData("Batgirl T2000 #57", "Batgirl")]
|
||||
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
|
||||
[InlineData("Conquistador_-Tome_2", "Conquistador")]
|
||||
[InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")]
|
||||
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
|
||||
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Bd Fr-Aldebaran-Antares")]
|
||||
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")]
|
||||
[InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")]
|
||||
[InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")]
|
||||
[InlineData("Kebab Том 1 Глава 1", "Kebab")]
|
||||
|
@ -68,41 +73,41 @@ public class ComicParsingTests
|
|||
[InlineData("SKY WORLD สกายเวิลด์ เล่มที่ 1", "SKY WORLD สกายเวิลด์")]
|
||||
public void ParseComicSeriesTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, Parser.ParseComicSeries(filename));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", Parser.LooseLeafVolume)]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Daredevil - King of New York", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Robin the Teen Wonder #0", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Wildcat (1 of 3)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman And Superman World's Finest #01", Parser.LooseLeafVolume)]
|
||||
[InlineData("Babe 01", Parser.LooseLeafVolume)]
|
||||
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", Parser.LooseLeafVolume)]
|
||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Daredevil - King of New York", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Robin the Teen Wonder #0", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Wildcat (1 of 3)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman And Superman World's Finest #01", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Babe 01", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
|
||||
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Superman v1 024 (09-10 1943)", "1")]
|
||||
[InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
|
||||
[InlineData("Amazing Man Comics chapter 25", Parser.LooseLeafVolume)]
|
||||
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", Parser.LooseLeafVolume)]
|
||||
[InlineData("spawn-123", Parser.LooseLeafVolume)]
|
||||
[InlineData("spawn-chapter-123", Parser.LooseLeafVolume)]
|
||||
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 04 (of 6) (1999)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 001 (2012)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 2.0 001 (2013)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Amazing Man Comics chapter 25", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("spawn-123", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("spawn-chapter-123", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 04 (of 6) (1999)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 001 (2012)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 2.0 001 (2013)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.LooseLeafVolume)]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
|
||||
[InlineData("Batgirl V2000 #57", "2000")]
|
||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", Parser.LooseLeafVolume)]
|
||||
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
|
||||
[InlineData("Daredevil - v6.5", "6.5")]
|
||||
// Tome Tests
|
||||
|
@ -112,25 +117,25 @@ public class ComicParsingTests
|
|||
[InlineData("Conquistador_Tome_2", "2")]
|
||||
[InlineData("Max_l_explorateur-_Tome_0", "0")]
|
||||
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
|
||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
|
||||
// Russian Tests
|
||||
[InlineData("Kebab Том 1 Глава 3", "1")]
|
||||
[InlineData("Манга Глава 2", Parser.LooseLeafVolume)]
|
||||
[InlineData("Манга Глава 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("ย้อนเวลากลับมาร้าย เล่ม 1", "1")]
|
||||
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "1")]
|
||||
[InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", Parser.LooseLeafVolume)]
|
||||
[InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
public void ParseComicVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, Parser.ParseComicVolume(filename));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.DefaultChapter)]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.DefaultChapter)]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
|
||||
[InlineData("Batman & Daredevil - King of New York", Parser.DefaultChapter)]
|
||||
[InlineData("Batman & Daredevil - King of New York", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
|
||||
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
|
||||
[InlineData("Batman & Wildcat (1 of 3)", "1")]
|
||||
|
@ -154,8 +159,8 @@ public class ComicParsingTests
|
|||
[InlineData("Batman Beyond 001 (2012)", "1")]
|
||||
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
|
||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
|
||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", Parser.DefaultChapter)]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.DefaultChapter)]
|
||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
|
||||
[InlineData("Batgirl V2000 #57", "57")]
|
||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
|
||||
|
@ -164,7 +169,7 @@ public class ComicParsingTests
|
|||
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
|
||||
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
|
||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
|
||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", Parser.DefaultChapter)]
|
||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Kebab Том 1 Глава 3", "3")]
|
||||
[InlineData("Манга Глава 2", "2")]
|
||||
[InlineData("Манга 2 Глава", "2")]
|
||||
|
@ -174,35 +179,35 @@ public class ComicParsingTests
|
|||
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
|
||||
public void ParseComicChapterTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Comic));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Comic));
|
||||
}
|
||||
|
||||
|
||||
[Theory]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", false)]
|
||||
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", false)]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
|
||||
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
|
||||
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
|
||||
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", false)]
|
||||
[InlineData("Boule et Bill - THS -Bill à disparu", false)]
|
||||
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", false)]
|
||||
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", false)]
|
||||
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
|
||||
[InlineData("Boule et Bill - THS -Bill à disparu", true)]
|
||||
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)]
|
||||
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)]
|
||||
[InlineData("laughs", false)]
|
||||
[InlineData("Annual Days of Summer", false)]
|
||||
[InlineData("Adventure Time 2013 Annual #001 (2013)", false)]
|
||||
[InlineData("Adventure Time 2013_Annual_#001 (2013)", false)]
|
||||
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", false)]
|
||||
[InlineData("Annual Days of Summer", true)]
|
||||
[InlineData("Adventure Time 2013 Annual #001 (2013)", true)]
|
||||
[InlineData("Adventure Time 2013_Annual_#001 (2013)", true)]
|
||||
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)]
|
||||
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
|
||||
[InlineData("Mazebook 001", false)]
|
||||
[InlineData("X-23 One Shot (2010)", false)]
|
||||
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", false)]
|
||||
[InlineData("Batman Beyond Annual", false)]
|
||||
[InlineData("Batman Beyond Bonus", false)]
|
||||
[InlineData("Batman Beyond OneShot", false)]
|
||||
[InlineData("Batman Beyond Specials", false)]
|
||||
[InlineData("Batman Beyond Omnibus (1999)", false)]
|
||||
[InlineData("Batman Beyond Omnibus", false)]
|
||||
[InlineData("01 Annual Batman Beyond", false)]
|
||||
[InlineData("Blood Syndicate Annual #001", false)]
|
||||
[InlineData("X-23 One Shot (2010)", true)]
|
||||
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)]
|
||||
[InlineData("Batman Beyond Annual", true)]
|
||||
[InlineData("Batman Beyond Bonus", true)]
|
||||
[InlineData("Batman Beyond OneShot", true)]
|
||||
[InlineData("Batman Beyond Specials", true)]
|
||||
[InlineData("Batman Beyond Omnibus (1999)", true)]
|
||||
[InlineData("Batman Beyond Omnibus", true)]
|
||||
[InlineData("01 Annual Batman Beyond", true)]
|
||||
[InlineData("Blood Syndicate Annual #001", true)]
|
||||
public void IsComicSpecialTest(string input, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, Parser.IsSpecial(input, LibraryType.Comic));
|
||||
|
|
|
@ -34,7 +34,7 @@ public class ImageParsingTests
|
|||
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
var actual2 = _parser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Image, true, null);
|
||||
var actual2 = _parser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Image, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -60,7 +60,7 @@ public class ImageParsingTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, true, null);
|
||||
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -86,7 +86,7 @@ public class ImageParsingTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, true, null);
|
||||
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
|
|
@ -1,10 +1,18 @@
|
|||
using API.Entities.Enums;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class MangaParsingTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
public MangaParsingTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
||||
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
|
||||
|
@ -68,6 +76,7 @@ public class MangaParsingTests
|
|||
[InlineData("Манга Тома 1-4", "1-4")]
|
||||
[InlineData("Манга Том 1-4", "1-4")]
|
||||
[InlineData("조선왕조실톡 106화", "106")]
|
||||
[InlineData("죽음 13회", "13")]
|
||||
[InlineData("동의보감 13장", "13")]
|
||||
[InlineData("몰?루 아카이브 7.5권", "7.5")]
|
||||
[InlineData("63권#200", "63")]
|
||||
|
@ -75,7 +84,6 @@ public class MangaParsingTests
|
|||
[InlineData("Accel World Chapter 001 Volume 002", "2")]
|
||||
[InlineData("Accel World Volume 2", "2")]
|
||||
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.31 Omake", "30")]
|
||||
[InlineData("Zom 100 - Bucket List of the Dead v01", "1")]
|
||||
public void ParseVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Manga));
|
||||
|
@ -131,6 +139,7 @@ public class MangaParsingTests
|
|||
[InlineData("Vagabond_v03", "Vagabond")]
|
||||
[InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")]
|
||||
[InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")]
|
||||
[InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")]
|
||||
[InlineData("Baketeriya ch01-05.zip", "Baketeriya")]
|
||||
[InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")]
|
||||
[InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")]
|
||||
|
@ -203,9 +212,6 @@ public class MangaParsingTests
|
|||
[InlineData("หนึ่งความคิด นิจนิรันดร์ เล่ม 2", "หนึ่งความคิด นิจนิรันดร์")]
|
||||
[InlineData("不安の種\uff0b - 01", "不安の種\uff0b")]
|
||||
[InlineData("Giant Ojou-sama - Ch. 33.5 - Volume 04 Bonus Chapter", "Giant Ojou-sama")]
|
||||
[InlineData("[218565]-(C92) [BRIO (Puyocha)] Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", "")]
|
||||
[InlineData("Monster #8 Ch. 001", "Monster #8")]
|
||||
[InlineData("Zom 100 - Bucket List of the Dead v01", "Zom 100 - Bucket List of the Dead")]
|
||||
public void ParseSeriesTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Manga));
|
||||
|
@ -298,7 +304,6 @@ public class MangaParsingTests
|
|||
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
|
||||
[InlineData("Max Level Returner ตอนที่ 5", "5")]
|
||||
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
|
||||
[InlineData("Monster #8 Ch. 001", "1")]
|
||||
public void ParseChaptersTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Manga));
|
||||
|
@ -321,18 +326,18 @@ public class MangaParsingTests
|
|||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
|
||||
}
|
||||
[Theory]
|
||||
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
|
||||
[InlineData("Beelzebub_Omake_June_2012_RHS", false)]
|
||||
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
|
||||
[InlineData("Beelzebub_Omake_June_2012_RHS", true)]
|
||||
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", false)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", false)]
|
||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", false)]
|
||||
[InlineData("Ani-Hina Art Collection.cbz", false)]
|
||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", false)]
|
||||
[InlineData("A Town Where You Live - Bonus Chapter.zip", false)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
|
||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
|
||||
[InlineData("Ani-Hina Art Collection.cbz", true)]
|
||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
|
||||
[InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
|
||||
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
|
||||
[InlineData("Beastars - SP01", true)]
|
||||
[InlineData("Beastars SP01", true)]
|
||||
[InlineData("Beastars - SP01", false)]
|
||||
[InlineData("Beastars SP01", false)]
|
||||
[InlineData("The League of Extraordinary Gentlemen", false)]
|
||||
[InlineData("The League of Extra-ordinary Gentlemen", false)]
|
||||
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]
|
||||
|
|
|
@ -11,14 +11,14 @@ public class ParserInfoTests
|
|||
{
|
||||
var p1 = new ParserInfo()
|
||||
{
|
||||
Chapters = Parser.DefaultChapter,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/darker than black.cbz",
|
||||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var p2 = new ParserInfo()
|
||||
|
@ -30,7 +30,7 @@ public class ParserInfoTests
|
|||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "Darker Than Black",
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var expected = new ParserInfo()
|
||||
|
@ -42,7 +42,7 @@ public class ParserInfoTests
|
|||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
p1.Merge(p2);
|
||||
|
||||
|
@ -62,12 +62,12 @@ public class ParserInfoTests
|
|||
IsSpecial = true,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var p2 = new ParserInfo()
|
||||
{
|
||||
Chapters = Parser.DefaultChapter,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/darker than black.cbz",
|
||||
|
|
|
@ -10,25 +10,11 @@ public class ParsingTests
|
|||
[Fact]
|
||||
public void ShouldWork()
|
||||
{
|
||||
var s = 6.5f.ToString(CultureInfo.InvariantCulture);
|
||||
var s = 6.5f + "";
|
||||
var a = float.Parse(s, CultureInfo.InvariantCulture);
|
||||
Assert.Equal(6.5f, a);
|
||||
|
||||
s = 6.5f + "";
|
||||
a = float.Parse(s, CultureInfo.CurrentCulture);
|
||||
Assert.Equal(6.5f, a);
|
||||
}
|
||||
|
||||
// [Theory]
|
||||
// [InlineData("de-DE")]
|
||||
// [InlineData("en-US")]
|
||||
// public void ShouldParse(string culture)
|
||||
// {
|
||||
// var s = 6.5f + "";
|
||||
// var a = float.Parse(s, CultureInfo.CreateSpecificCulture(culture));
|
||||
// Assert.Equal(6.5f, a);
|
||||
// }
|
||||
|
||||
[Theory]
|
||||
[InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")]
|
||||
[InlineData("Shmo, Joe", "Shmo, Joe")]
|
||||
|
@ -43,7 +29,6 @@ public class ParsingTests
|
|||
[InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")]
|
||||
[InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")]
|
||||
[InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")]
|
||||
[InlineData("SP01 1. DEAD Tube Prologue", "1. DEAD Tube Prologue")]
|
||||
public void CleanSpecialTitleTest(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, CleanSpecialTitle(input));
|
||||
|
@ -98,8 +83,7 @@ public class ParsingTests
|
|||
[InlineData("-The Title", false, "The Title")]
|
||||
[InlineData("- The Title", false, "The Title")]
|
||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)",
|
||||
true, "Batman - Detective Comics - Rebirth Deluxe Edition Book 04")]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")]
|
||||
[InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")]
|
||||
[InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")]
|
||||
[InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")]
|
||||
|
@ -251,7 +235,6 @@ public class ParsingTests
|
|||
[InlineData("ch1/backcover.png", false)]
|
||||
[InlineData("backcover.png", false)]
|
||||
[InlineData("back_cover.png", false)]
|
||||
[InlineData("LD Blacklands #1 35 (back cover).png", false)]
|
||||
public void IsCoverImageTest(string inputPath, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, IsCoverImage(inputPath));
|
||||
|
@ -267,7 +250,6 @@ public class ParsingTests
|
|||
[InlineData("@recycle/Love Hina/", true)]
|
||||
[InlineData("E:/Test/__MACOSX/Love Hina/", true)]
|
||||
[InlineData("E:/Test/.caltrash/Love Hina/", true)]
|
||||
[InlineData("E:/Test/.yacreaderlibrary/Love Hina/", true)]
|
||||
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
|
||||
|
|
|
@ -15,6 +15,7 @@ using Microsoft.EntityFrameworkCore;
|
|||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Repository;
|
||||
|
||||
|
|
|
@ -1,280 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.DTOs.Metadata.Browse;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Repository;
|
||||
|
||||
public class GenreRepositoryTests : AbstractDbTest
|
||||
{
|
||||
private AppUser _fullAccess;
|
||||
private AppUser _restrictedAccess;
|
||||
private AppUser _restrictedAgeAccess;
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Genre.RemoveRange(Context.Genre);
|
||||
Context.Library.RemoveRange(Context.Library);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private TestGenreSet CreateTestGenres()
|
||||
{
|
||||
return new TestGenreSet
|
||||
{
|
||||
SharedSeriesChaptersGenre = new GenreBuilder("Shared Series Chapter Genre").Build(),
|
||||
SharedSeriesGenre = new GenreBuilder("Shared Series Genre").Build(),
|
||||
SharedChaptersGenre = new GenreBuilder("Shared Chapters Genre").Build(),
|
||||
Lib0SeriesChaptersGenre = new GenreBuilder("Lib0 Series Chapter Genre").Build(),
|
||||
Lib0SeriesGenre = new GenreBuilder("Lib0 Series Genre").Build(),
|
||||
Lib0ChaptersGenre = new GenreBuilder("Lib0 Chapters Genre").Build(),
|
||||
Lib1SeriesChaptersGenre = new GenreBuilder("Lib1 Series Chapter Genre").Build(),
|
||||
Lib1SeriesGenre = new GenreBuilder("Lib1 Series Genre").Build(),
|
||||
Lib1ChaptersGenre = new GenreBuilder("Lib1 Chapters Genre").Build(),
|
||||
Lib1ChapterAgeGenre = new GenreBuilder("Lib1 Chapter Age Genre").Build()
|
||||
};
|
||||
}
|
||||
|
||||
private async Task SeedDbWithGenres(TestGenreSet genres)
|
||||
{
|
||||
await CreateTestUsers();
|
||||
await AddGenresToContext(genres);
|
||||
await CreateLibrariesWithGenres(genres);
|
||||
await AssignLibrariesToUsers();
|
||||
}
|
||||
|
||||
private async Task CreateTestUsers()
|
||||
{
|
||||
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
|
||||
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
|
||||
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
|
||||
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
|
||||
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
|
||||
|
||||
Context.Users.Add(_fullAccess);
|
||||
Context.Users.Add(_restrictedAccess);
|
||||
Context.Users.Add(_restrictedAgeAccess);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task AddGenresToContext(TestGenreSet genres)
|
||||
{
|
||||
var allGenres = genres.GetAllGenres();
|
||||
Context.Genre.AddRange(allGenres);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task CreateLibrariesWithGenres(TestGenreSet genres)
|
||||
{
|
||||
var lib0 = new LibraryBuilder("lib0")
|
||||
.WithSeries(new SeriesBuilder("lib0-s0")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib0SeriesChaptersGenre, genres.Lib0SeriesGenre])
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib0SeriesChaptersGenre, genres.Lib0ChaptersGenre])
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var lib1 = new LibraryBuilder("lib1")
|
||||
.WithSeries(new SeriesBuilder("lib1-s0")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1SeriesGenre])
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre, genres.Lib1ChapterAgeGenre])
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.WithSeries(new SeriesBuilder("lib1-s1")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1SeriesGenre])
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
Context.Library.Add(lib0);
|
||||
Context.Library.Add(lib1);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task AssignLibrariesToUsers()
|
||||
{
|
||||
var lib0 = Context.Library.First(l => l.Name == "lib0");
|
||||
var lib1 = Context.Library.First(l => l.Name == "lib1");
|
||||
|
||||
_fullAccess.Libraries.Add(lib0);
|
||||
_fullAccess.Libraries.Add(lib1);
|
||||
_restrictedAccess.Libraries.Add(lib1);
|
||||
_restrictedAgeAccess.Libraries.Add(lib1);
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static Predicate<BrowseGenreDto> ContainsGenreCheck(Genre genre)
|
||||
{
|
||||
return g => g.Id == genre.Id;
|
||||
}
|
||||
|
||||
private static void AssertGenrePresent(IEnumerable<BrowseGenreDto> genres, Genre expectedGenre)
|
||||
{
|
||||
Assert.Contains(genres, ContainsGenreCheck(expectedGenre));
|
||||
}
|
||||
|
||||
private static void AssertGenreNotPresent(IEnumerable<BrowseGenreDto> genres, Genre expectedGenre)
|
||||
{
|
||||
Assert.DoesNotContain(genres, ContainsGenreCheck(expectedGenre));
|
||||
}
|
||||
|
||||
private static BrowseGenreDto GetGenreDto(IEnumerable<BrowseGenreDto> genres, Genre genre)
|
||||
{
|
||||
return genres.First(dto => dto.Id == genre.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowseableGenre_FullAccess_ReturnsAllGenresWithCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
await ResetDb();
|
||||
var genres = CreateTestGenres();
|
||||
await SeedDbWithGenres(genres);
|
||||
|
||||
// Act
|
||||
var fullAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_fullAccess.Id, new UserParams());
|
||||
|
||||
// Assert
|
||||
Assert.Equal(genres.GetAllGenres().Count, fullAccessGenres.TotalCount);
|
||||
|
||||
foreach (var genre in genres.GetAllGenres())
|
||||
{
|
||||
AssertGenrePresent(fullAccessGenres, genre);
|
||||
}
|
||||
|
||||
// Verify counts - 1 lib0 series, 2 lib1 series = 3 total series
|
||||
Assert.Equal(3, GetGenreDto(fullAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
|
||||
Assert.Equal(6, GetGenreDto(fullAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
|
||||
Assert.Equal(1, GetGenreDto(fullAccessGenres, genres.Lib0SeriesGenre).SeriesCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowseableGenre_RestrictedAccess_ReturnsOnlyAccessibleGenres()
|
||||
{
|
||||
// Arrange
|
||||
await ResetDb();
|
||||
var genres = CreateTestGenres();
|
||||
await SeedDbWithGenres(genres);
|
||||
|
||||
// Act
|
||||
var restrictedAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAccess.Id, new UserParams());
|
||||
|
||||
// Assert - Should see: 3 shared + 4 library 1 specific = 7 genres
|
||||
Assert.Equal(7, restrictedAccessGenres.TotalCount);
|
||||
|
||||
// Verify shared and Library 1 genres are present
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.SharedSeriesChaptersGenre);
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.SharedSeriesGenre);
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.SharedChaptersGenre);
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.Lib1SeriesChaptersGenre);
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.Lib1SeriesGenre);
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.Lib1ChaptersGenre);
|
||||
AssertGenrePresent(restrictedAccessGenres, genres.Lib1ChapterAgeGenre);
|
||||
|
||||
// Verify Library 0 specific genres are not present
|
||||
AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0SeriesChaptersGenre);
|
||||
AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0SeriesGenre);
|
||||
AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0ChaptersGenre);
|
||||
|
||||
// Verify counts - 2 lib1 series
|
||||
Assert.Equal(2, GetGenreDto(restrictedAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
|
||||
Assert.Equal(4, GetGenreDto(restrictedAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
|
||||
Assert.Equal(2, GetGenreDto(restrictedAccessGenres, genres.Lib1SeriesGenre).SeriesCount);
|
||||
Assert.Equal(4, GetGenreDto(restrictedAccessGenres, genres.Lib1ChaptersGenre).ChapterCount);
|
||||
Assert.Equal(1, GetGenreDto(restrictedAccessGenres, genres.Lib1ChapterAgeGenre).ChapterCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowseableGenre_RestrictedAgeAccess_FiltersAgeRestrictedContent()
|
||||
{
|
||||
// Arrange
|
||||
await ResetDb();
|
||||
var genres = CreateTestGenres();
|
||||
await SeedDbWithGenres(genres);
|
||||
|
||||
// Act
|
||||
var restrictedAgeAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAgeAccess.Id, new UserParams());
|
||||
|
||||
// Assert - Should see: 3 shared + 3 lib1 specific = 6 genres (age-restricted genre filtered out)
|
||||
Assert.Equal(6, restrictedAgeAccessGenres.TotalCount);
|
||||
|
||||
// Verify accessible genres are present
|
||||
AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre);
|
||||
AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedSeriesGenre);
|
||||
AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedChaptersGenre);
|
||||
AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1SeriesChaptersGenre);
|
||||
AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1SeriesGenre);
|
||||
AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1ChaptersGenre);
|
||||
|
||||
// Verify age-restricted genre is filtered out
|
||||
AssertGenreNotPresent(restrictedAgeAccessGenres, genres.Lib1ChapterAgeGenre);
|
||||
|
||||
// Verify counts - 1 series lib1 (age-restricted series filtered out)
|
||||
Assert.Equal(1, GetGenreDto(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
|
||||
Assert.Equal(1, GetGenreDto(restrictedAgeAccessGenres, genres.Lib1SeriesGenre).SeriesCount);
|
||||
|
||||
// These values represent a bug - chapters are not properly filtered when their series is age-restricted
|
||||
// Should be 2, but currently returns 3 due to the filtering issue
|
||||
Assert.Equal(3, GetGenreDto(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
|
||||
Assert.Equal(3, GetGenreDto(restrictedAgeAccessGenres, genres.Lib1ChaptersGenre).ChapterCount);
|
||||
}
|
||||
|
||||
private class TestGenreSet
|
||||
{
|
||||
public Genre SharedSeriesChaptersGenre { get; set; }
|
||||
public Genre SharedSeriesGenre { get; set; }
|
||||
public Genre SharedChaptersGenre { get; set; }
|
||||
public Genre Lib0SeriesChaptersGenre { get; set; }
|
||||
public Genre Lib0SeriesGenre { get; set; }
|
||||
public Genre Lib0ChaptersGenre { get; set; }
|
||||
public Genre Lib1SeriesChaptersGenre { get; set; }
|
||||
public Genre Lib1SeriesGenre { get; set; }
|
||||
public Genre Lib1ChaptersGenre { get; set; }
|
||||
public Genre Lib1ChapterAgeGenre { get; set; }
|
||||
|
||||
public List<Genre> GetAllGenres()
|
||||
{
|
||||
return
|
||||
[
|
||||
SharedSeriesChaptersGenre, SharedSeriesGenre, SharedChaptersGenre,
|
||||
Lib0SeriesChaptersGenre, Lib0SeriesGenre, Lib0ChaptersGenre,
|
||||
Lib1SeriesChaptersGenre, Lib1SeriesGenre, Lib1ChaptersGenre, Lib1ChapterAgeGenre
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,342 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.DTOs.Metadata.Browse;
|
||||
using API.DTOs.Metadata.Browse.Requests;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Person;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Repository;
|
||||
|
||||
public class PersonRepositoryTests : AbstractDbTest
|
||||
{
|
||||
private AppUser _fullAccess;
|
||||
private AppUser _restrictedAccess;
|
||||
private AppUser _restrictedAgeAccess;
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Person.RemoveRange(Context.Person.ToList());
|
||||
Context.Library.RemoveRange(Context.Library.ToList());
|
||||
Context.AppUser.RemoveRange(Context.AppUser.ToList());
|
||||
await UnitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
private async Task SeedDb()
|
||||
{
|
||||
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
|
||||
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
|
||||
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
|
||||
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
|
||||
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
|
||||
|
||||
Context.AppUser.Add(_fullAccess);
|
||||
Context.AppUser.Add(_restrictedAccess);
|
||||
Context.AppUser.Add(_restrictedAgeAccess);
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
var people = CreateTestPeople();
|
||||
Context.Person.AddRange(people);
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
var libraries = CreateTestLibraries(people);
|
||||
Context.Library.AddRange(libraries);
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
_fullAccess.Libraries.Add(libraries[0]); // lib0
|
||||
_fullAccess.Libraries.Add(libraries[1]); // lib1
|
||||
_restrictedAccess.Libraries.Add(libraries[1]); // lib1 only
|
||||
_restrictedAgeAccess.Libraries.Add(libraries[1]); // lib1 only
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static List<Person> CreateTestPeople()
|
||||
{
|
||||
return new List<Person>
|
||||
{
|
||||
new PersonBuilder("Shared Series Chapter Person").Build(),
|
||||
new PersonBuilder("Shared Series Person").Build(),
|
||||
new PersonBuilder("Shared Chapters Person").Build(),
|
||||
new PersonBuilder("Lib0 Series Chapter Person").Build(),
|
||||
new PersonBuilder("Lib0 Series Person").Build(),
|
||||
new PersonBuilder("Lib0 Chapters Person").Build(),
|
||||
new PersonBuilder("Lib1 Series Chapter Person").Build(),
|
||||
new PersonBuilder("Lib1 Series Person").Build(),
|
||||
new PersonBuilder("Lib1 Chapters Person").Build(),
|
||||
new PersonBuilder("Lib1 Chapter Age Person").Build()
|
||||
};
|
||||
}
|
||||
|
||||
private static List<Library> CreateTestLibraries(List<Person> people)
|
||||
{
|
||||
var lib0 = new LibraryBuilder("lib0")
|
||||
.WithSeries(new SeriesBuilder("lib0-s0")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Writer)
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Writer)
|
||||
.WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Writer)
|
||||
.WithPerson(GetPersonByName(people, "Lib0 Series Person"), PersonRole.Writer)
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Colorist)
|
||||
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Colorist)
|
||||
.WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Colorist)
|
||||
.WithPerson(GetPersonByName(people, "Lib0 Chapters Person"), PersonRole.Colorist)
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Editor)
|
||||
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Editor)
|
||||
.WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Editor)
|
||||
.WithPerson(GetPersonByName(people, "Lib0 Chapters Person"), PersonRole.Editor)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var lib1 = new LibraryBuilder("lib1")
|
||||
.WithSeries(new SeriesBuilder("lib1-s0")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Letterer)
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Letterer)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Letterer)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Person"), PersonRole.Letterer)
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Imprint)
|
||||
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Imprint)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Imprint)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Imprint)
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.CoverArtist)
|
||||
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.CoverArtist)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.CoverArtist)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.CoverArtist)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Chapter Age Person"), PersonRole.CoverArtist)
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.WithSeries(new SeriesBuilder("lib1-s1")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Inker)
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Inker)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Inker)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Person"), PersonRole.Inker)
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Team)
|
||||
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Team)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Team)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Team)
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Translator)
|
||||
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Translator)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Translator)
|
||||
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Translator)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
return new List<Library> { lib0, lib1 };
|
||||
}
|
||||
|
||||
private static Person GetPersonByName(List<Person> people, string name)
|
||||
{
|
||||
return people.First(p => p.Name == name);
|
||||
}
|
||||
|
||||
private Person GetPersonByName(string name)
|
||||
{
|
||||
return Context.Person.First(p => p.Name == name);
|
||||
}
|
||||
|
||||
private static Predicate<BrowsePersonDto> ContainsPersonCheck(Person person)
|
||||
{
|
||||
return p => p.Id == person.Id;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowsePersonDtos()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedDb();
|
||||
|
||||
// Get people from database for assertions
|
||||
var sharedSeriesChaptersPerson = GetPersonByName("Shared Series Chapter Person");
|
||||
var lib0SeriesPerson = GetPersonByName("Lib0 Series Person");
|
||||
var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
|
||||
var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
|
||||
var allPeople = Context.Person.ToList();
|
||||
|
||||
var fullAccessPeople =
|
||||
await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_fullAccess.Id, new BrowsePersonFilterDto(),
|
||||
new UserParams());
|
||||
Assert.Equal(allPeople.Count, fullAccessPeople.TotalCount);
|
||||
|
||||
foreach (var person in allPeople)
|
||||
Assert.Contains(fullAccessPeople, ContainsPersonCheck(person));
|
||||
|
||||
// 1 series in lib0, 2 series in lib1
|
||||
Assert.Equal(3, fullAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
|
||||
// 3 series with each 2 chapters
|
||||
Assert.Equal(6, fullAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).ChapterCount);
|
||||
// 1 series in lib0
|
||||
Assert.Equal(1, fullAccessPeople.First(dto => dto.Id == lib0SeriesPerson.Id).SeriesCount);
|
||||
// 2 series in lib1
|
||||
Assert.Equal(2, fullAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
|
||||
|
||||
var restrictedAccessPeople =
|
||||
await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAccess.Id, new BrowsePersonFilterDto(),
|
||||
new UserParams());
|
||||
|
||||
Assert.Equal(7, restrictedAccessPeople.TotalCount);
|
||||
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Chapter Person")));
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Person")));
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Chapters Person")));
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Chapter Person")));
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Person")));
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapters Person")));
|
||||
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapter Age Person")));
|
||||
|
||||
// 2 series in lib1, no series in lib0
|
||||
Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
|
||||
// 2 series with each 2 chapters
|
||||
Assert.Equal(4, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).ChapterCount);
|
||||
// 2 series in lib1
|
||||
Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
|
||||
|
||||
var restrictedAgeAccessPeople = await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAgeAccess.Id,
|
||||
new BrowsePersonFilterDto(), new UserParams());
|
||||
|
||||
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
|
||||
Assert.Equal(6, restrictedAgeAccessPeople.TotalCount);
|
||||
|
||||
// No access to the age restricted chapter
|
||||
Assert.DoesNotContain(restrictedAgeAccessPeople, ContainsPersonCheck(lib1ChapterAgePerson));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetRolesForPersonByName()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedDb();
|
||||
|
||||
var sharedSeriesPerson = GetPersonByName("Shared Series Person");
|
||||
var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
|
||||
var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
|
||||
|
||||
var sharedSeriesRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _fullAccess.Id);
|
||||
var chapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _fullAccess.Id);
|
||||
var ageChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _fullAccess.Id);
|
||||
Assert.Equal(3, sharedSeriesRoles.Count());
|
||||
Assert.Equal(6, chapterRoles.Count());
|
||||
Assert.Single(ageChapterRoles);
|
||||
|
||||
var restrictedRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAccess.Id);
|
||||
var restrictedChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAccess.Id);
|
||||
var restrictedAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAccess.Id);
|
||||
Assert.Equal(2, restrictedRoles.Count());
|
||||
Assert.Equal(4, restrictedChapterRoles.Count());
|
||||
Assert.Single(restrictedAgePersonChapterRoles);
|
||||
|
||||
var restrictedAgeRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
|
||||
var restrictedAgeChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAgeAccess.Id);
|
||||
var restrictedAgeAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAgeAccess.Id);
|
||||
Assert.Single(restrictedAgeRoles);
|
||||
Assert.Equal(2, restrictedAgeChapterRoles.Count());
|
||||
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
|
||||
Assert.Empty(restrictedAgeAgePersonChapterRoles);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPersonDtoByName()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedDb();
|
||||
|
||||
var allPeople = Context.Person.ToList();
|
||||
|
||||
foreach (var person in allPeople)
|
||||
{
|
||||
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName(person.Name, _fullAccess.Id));
|
||||
}
|
||||
|
||||
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAccess.Id));
|
||||
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Shared Series Person", _restrictedAccess.Id));
|
||||
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAccess.Id));
|
||||
|
||||
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAgeAccess.Id));
|
||||
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAgeAccess.Id));
|
||||
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
|
||||
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Chapter Age Person", _restrictedAgeAccess.Id));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetSeriesKnownFor()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedDb();
|
||||
|
||||
var sharedSeriesPerson = GetPersonByName("Shared Series Person");
|
||||
var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
|
||||
|
||||
var series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _fullAccess.Id);
|
||||
Assert.Equal(3, series.Count());
|
||||
|
||||
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAccess.Id);
|
||||
Assert.Equal(2, series.Count());
|
||||
|
||||
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
|
||||
Assert.Single(series);
|
||||
|
||||
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(lib1SeriesPerson.Id, _restrictedAgeAccess.Id);
|
||||
Assert.Single(series);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetChaptersForPersonByRole()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedDb();
|
||||
|
||||
var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
|
||||
|
||||
// Lib0
|
||||
var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Colorist);
|
||||
var restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Colorist);
|
||||
var restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Colorist);
|
||||
Assert.Single(chapters);
|
||||
Assert.Empty(restrictedChapters);
|
||||
Assert.Empty(restrictedAgeChapters);
|
||||
|
||||
// Lib1 - age restricted series
|
||||
chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Imprint);
|
||||
restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Imprint);
|
||||
restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Imprint);
|
||||
Assert.Single(chapters);
|
||||
Assert.Single(restrictedChapters);
|
||||
Assert.Empty(restrictedAgeChapters);
|
||||
|
||||
// Lib1 - not age restricted series
|
||||
chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Team);
|
||||
restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Team);
|
||||
restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Team);
|
||||
Assert.Single(chapters);
|
||||
Assert.Single(restrictedChapters);
|
||||
Assert.Single(restrictedAgeChapters);
|
||||
}
|
||||
}
|
|
@ -6,6 +6,7 @@ using System.Threading.Tasks;
|
|||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
|
@ -158,6 +159,4 @@ public class SeriesRepositoryTests
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: GetSeriesDtoForLibraryIdV2Async Tests (On Deck)
|
||||
|
||||
}
|
||||
|
|
|
@ -1,278 +0,0 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.DTOs.Metadata.Browse;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Repository;
|
||||
|
||||
public class TagRepositoryTests : AbstractDbTest
|
||||
{
|
||||
private AppUser _fullAccess;
|
||||
private AppUser _restrictedAccess;
|
||||
private AppUser _restrictedAgeAccess;
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Tag.RemoveRange(Context.Tag);
|
||||
Context.Library.RemoveRange(Context.Library);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private TestTagSet CreateTestTags()
|
||||
{
|
||||
return new TestTagSet
|
||||
{
|
||||
SharedSeriesChaptersTag = new TagBuilder("Shared Series Chapter Tag").Build(),
|
||||
SharedSeriesTag = new TagBuilder("Shared Series Tag").Build(),
|
||||
SharedChaptersTag = new TagBuilder("Shared Chapters Tag").Build(),
|
||||
Lib0SeriesChaptersTag = new TagBuilder("Lib0 Series Chapter Tag").Build(),
|
||||
Lib0SeriesTag = new TagBuilder("Lib0 Series Tag").Build(),
|
||||
Lib0ChaptersTag = new TagBuilder("Lib0 Chapters Tag").Build(),
|
||||
Lib1SeriesChaptersTag = new TagBuilder("Lib1 Series Chapter Tag").Build(),
|
||||
Lib1SeriesTag = new TagBuilder("Lib1 Series Tag").Build(),
|
||||
Lib1ChaptersTag = new TagBuilder("Lib1 Chapters Tag").Build(),
|
||||
Lib1ChapterAgeTag = new TagBuilder("Lib1 Chapter Age Tag").Build()
|
||||
};
|
||||
}
|
||||
|
||||
private async Task SeedDbWithTags(TestTagSet tags)
|
||||
{
|
||||
await CreateTestUsers();
|
||||
await AddTagsToContext(tags);
|
||||
await CreateLibrariesWithTags(tags);
|
||||
await AssignLibrariesToUsers();
|
||||
}
|
||||
|
||||
private async Task CreateTestUsers()
|
||||
{
|
||||
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
|
||||
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
|
||||
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
|
||||
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
|
||||
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
|
||||
|
||||
Context.Users.Add(_fullAccess);
|
||||
Context.Users.Add(_restrictedAccess);
|
||||
Context.Users.Add(_restrictedAgeAccess);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task AddTagsToContext(TestTagSet tags)
|
||||
{
|
||||
var allTags = tags.GetAllTags();
|
||||
Context.Tag.AddRange(allTags);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task CreateLibrariesWithTags(TestTagSet tags)
|
||||
{
|
||||
var lib0 = new LibraryBuilder("lib0")
|
||||
.WithSeries(new SeriesBuilder("lib0-s0")
|
||||
.WithMetadata(new SeriesMetadata
|
||||
{
|
||||
Tags = [tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib0SeriesChaptersTag, tags.Lib0SeriesTag]
|
||||
})
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib0SeriesChaptersTag, tags.Lib0ChaptersTag])
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var lib1 = new LibraryBuilder("lib1")
|
||||
.WithSeries(new SeriesBuilder("lib1-s0")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib1SeriesChaptersTag, tags.Lib1SeriesTag])
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag, tags.Lib1ChapterAgeTag])
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.WithSeries(new SeriesBuilder("lib1-s1")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib1SeriesChaptersTag, tags.Lib1SeriesTag])
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||
.WithAgeRating(AgeRating.Mature17Plus)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
Context.Library.Add(lib0);
|
||||
Context.Library.Add(lib1);
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private async Task AssignLibrariesToUsers()
|
||||
{
|
||||
var lib0 = Context.Library.First(l => l.Name == "lib0");
|
||||
var lib1 = Context.Library.First(l => l.Name == "lib1");
|
||||
|
||||
_fullAccess.Libraries.Add(lib0);
|
||||
_fullAccess.Libraries.Add(lib1);
|
||||
_restrictedAccess.Libraries.Add(lib1);
|
||||
_restrictedAgeAccess.Libraries.Add(lib1);
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static Predicate<BrowseTagDto> ContainsTagCheck(Tag tag)
|
||||
{
|
||||
return t => t.Id == tag.Id;
|
||||
}
|
||||
|
||||
private static void AssertTagPresent(IEnumerable<BrowseTagDto> tags, Tag expectedTag)
|
||||
{
|
||||
Assert.Contains(tags, ContainsTagCheck(expectedTag));
|
||||
}
|
||||
|
||||
private static void AssertTagNotPresent(IEnumerable<BrowseTagDto> tags, Tag expectedTag)
|
||||
{
|
||||
Assert.DoesNotContain(tags, ContainsTagCheck(expectedTag));
|
||||
}
|
||||
|
||||
private static BrowseTagDto GetTagDto(IEnumerable<BrowseTagDto> tags, Tag tag)
|
||||
{
|
||||
return tags.First(dto => dto.Id == tag.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowseableTag_FullAccess_ReturnsAllTagsWithCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
await ResetDb();
|
||||
var tags = CreateTestTags();
|
||||
await SeedDbWithTags(tags);
|
||||
|
||||
// Act
|
||||
var fullAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_fullAccess.Id, new UserParams());
|
||||
|
||||
// Assert
|
||||
Assert.Equal(tags.GetAllTags().Count, fullAccessTags.TotalCount);
|
||||
|
||||
foreach (var tag in tags.GetAllTags())
|
||||
{
|
||||
AssertTagPresent(fullAccessTags, tag);
|
||||
}
|
||||
|
||||
// Verify counts - 1 series lib0, 2 series lib1 = 3 total series
|
||||
Assert.Equal(3, GetTagDto(fullAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
|
||||
Assert.Equal(6, GetTagDto(fullAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
|
||||
Assert.Equal(1, GetTagDto(fullAccessTags, tags.Lib0SeriesTag).SeriesCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowseableTag_RestrictedAccess_ReturnsOnlyAccessibleTags()
|
||||
{
|
||||
// Arrange
|
||||
await ResetDb();
|
||||
var tags = CreateTestTags();
|
||||
await SeedDbWithTags(tags);
|
||||
|
||||
// Act
|
||||
var restrictedAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAccess.Id, new UserParams());
|
||||
|
||||
// Assert - Should see: 3 shared + 4 library 1 specific = 7 tags
|
||||
Assert.Equal(7, restrictedAccessTags.TotalCount);
|
||||
|
||||
// Verify shared and Library 1 tags are present
|
||||
AssertTagPresent(restrictedAccessTags, tags.SharedSeriesChaptersTag);
|
||||
AssertTagPresent(restrictedAccessTags, tags.SharedSeriesTag);
|
||||
AssertTagPresent(restrictedAccessTags, tags.SharedChaptersTag);
|
||||
AssertTagPresent(restrictedAccessTags, tags.Lib1SeriesChaptersTag);
|
||||
AssertTagPresent(restrictedAccessTags, tags.Lib1SeriesTag);
|
||||
AssertTagPresent(restrictedAccessTags, tags.Lib1ChaptersTag);
|
||||
AssertTagPresent(restrictedAccessTags, tags.Lib1ChapterAgeTag);
|
||||
|
||||
// Verify Library 0 specific tags are not present
|
||||
AssertTagNotPresent(restrictedAccessTags, tags.Lib0SeriesChaptersTag);
|
||||
AssertTagNotPresent(restrictedAccessTags, tags.Lib0SeriesTag);
|
||||
AssertTagNotPresent(restrictedAccessTags, tags.Lib0ChaptersTag);
|
||||
|
||||
// Verify counts - 2 series lib1
|
||||
Assert.Equal(2, GetTagDto(restrictedAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
|
||||
Assert.Equal(4, GetTagDto(restrictedAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
|
||||
Assert.Equal(2, GetTagDto(restrictedAccessTags, tags.Lib1SeriesTag).SeriesCount);
|
||||
Assert.Equal(4, GetTagDto(restrictedAccessTags, tags.Lib1ChaptersTag).ChapterCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBrowseableTag_RestrictedAgeAccess_FiltersAgeRestrictedContent()
|
||||
{
|
||||
// Arrange
|
||||
await ResetDb();
|
||||
var tags = CreateTestTags();
|
||||
await SeedDbWithTags(tags);
|
||||
|
||||
// Act
|
||||
var restrictedAgeAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAgeAccess.Id, new UserParams());
|
||||
|
||||
// Assert - Should see: 3 shared + 3 lib1 specific = 6 tags (age-restricted tag filtered out)
|
||||
Assert.Equal(6, restrictedAgeAccessTags.TotalCount);
|
||||
|
||||
// Verify accessible tags are present
|
||||
AssertTagPresent(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag);
|
||||
AssertTagPresent(restrictedAgeAccessTags, tags.SharedSeriesTag);
|
||||
AssertTagPresent(restrictedAgeAccessTags, tags.SharedChaptersTag);
|
||||
AssertTagPresent(restrictedAgeAccessTags, tags.Lib1SeriesChaptersTag);
|
||||
AssertTagPresent(restrictedAgeAccessTags, tags.Lib1SeriesTag);
|
||||
AssertTagPresent(restrictedAgeAccessTags, tags.Lib1ChaptersTag);
|
||||
|
||||
// Verify age-restricted tag is filtered out
|
||||
AssertTagNotPresent(restrictedAgeAccessTags, tags.Lib1ChapterAgeTag);
|
||||
|
||||
// Verify counts - 1 series lib1 (age-restricted series filtered out)
|
||||
Assert.Equal(1, GetTagDto(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
|
||||
Assert.Equal(2, GetTagDto(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
|
||||
Assert.Equal(1, GetTagDto(restrictedAgeAccessTags, tags.Lib1SeriesTag).SeriesCount);
|
||||
Assert.Equal(2, GetTagDto(restrictedAgeAccessTags, tags.Lib1ChaptersTag).ChapterCount);
|
||||
}
|
||||
|
||||
private class TestTagSet
|
||||
{
|
||||
public Tag SharedSeriesChaptersTag { get; set; }
|
||||
public Tag SharedSeriesTag { get; set; }
|
||||
public Tag SharedChaptersTag { get; set; }
|
||||
public Tag Lib0SeriesChaptersTag { get; set; }
|
||||
public Tag Lib0SeriesTag { get; set; }
|
||||
public Tag Lib0ChaptersTag { get; set; }
|
||||
public Tag Lib1SeriesChaptersTag { get; set; }
|
||||
public Tag Lib1SeriesTag { get; set; }
|
||||
public Tag Lib1ChaptersTag { get; set; }
|
||||
public Tag Lib1ChapterAgeTag { get; set; }
|
||||
|
||||
public List<Tag> GetAllTags()
|
||||
{
|
||||
return
|
||||
[
|
||||
SharedSeriesChaptersTag, SharedSeriesTag, SharedChaptersTag,
|
||||
Lib0SeriesChaptersTag, Lib0SeriesTag, Lib0ChaptersTag,
|
||||
Lib1SeriesChaptersTag, Lib1SeriesTag, Lib1ChaptersTag, Lib1ChapterAgeTag
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,6 +7,7 @@ using System.Linq;
|
|||
using API.Archive;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using EasyCaching.Core;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NetVips;
|
||||
using NSubstitute;
|
||||
|
@ -28,7 +29,7 @@ public class ArchiveServiceTests
|
|||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
_archiveService = new ArchiveService(_logger, _directoryService,
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService),
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService, Substitute.For<IEasyCachingProviderFactory>()),
|
||||
Substitute.For<IMediaErrorService>());
|
||||
}
|
||||
|
||||
|
@ -166,7 +167,7 @@ public class ArchiveServiceTests
|
|||
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
|
||||
{
|
||||
var ds = Substitute.For<DirectoryService>(_directoryServiceLogger, new FileSystem());
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds);
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds, Substitute.For<IEasyCachingProviderFactory>());
|
||||
var archiveService = Substitute.For<ArchiveService>(_logger, ds, imageService, Substitute.For<IMediaErrorService>());
|
||||
|
||||
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
|
||||
|
@ -197,7 +198,7 @@ public class ArchiveServiceTests
|
|||
[InlineData("sorting.zip", "sorting.expected.png")]
|
||||
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
|
||||
{
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService, Substitute.For<IEasyCachingProviderFactory>());
|
||||
var archiveService = Substitute.For<ArchiveService>(_logger,
|
||||
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService,
|
||||
Substitute.For<IMediaErrorService>());
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
using System.Data.Common;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
|
@ -19,7 +21,7 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class BackupServiceTests: AbstractFsTest
|
||||
public class BackupServiceTests
|
||||
{
|
||||
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
|
@ -29,6 +31,13 @@ public class BackupServiceTests: AbstractFsTest
|
|||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string LogDirectory = "C:/kavita/config/logs/";
|
||||
private const string ConfigDirectory = "C:/kavita/config/";
|
||||
private const string BookmarkDirectory = "C:/kavita/config/bookmarks";
|
||||
private const string ThemesDirectory = "C:/kavita/config/theme";
|
||||
|
||||
public BackupServiceTests()
|
||||
{
|
||||
|
@ -73,7 +82,7 @@ public class BackupServiceTests: AbstractFsTest
|
|||
|
||||
_context.ServerSetting.Update(setting);
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
@ -85,6 +94,22 @@ public class BackupServiceTests: AbstractFsTest
|
|||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(LogDirectory);
|
||||
fileSystem.AddDirectory(ThemesDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory("C:/data/");
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using EasyCaching.Core;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
@ -18,7 +17,7 @@ public class BookServiceTests
|
|||
{
|
||||
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||
_bookService = new BookService(_logger, directoryService,
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService)
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService, Substitute.For<IEasyCachingProviderFactory>())
|
||||
, Substitute.For<IMediaErrorService>());
|
||||
}
|
||||
|
||||
|
@ -82,64 +81,4 @@ public class BookServiceTests
|
|||
Assert.Equal("Accel World", comicInfo.Series);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldHaveComicInfoForPdf()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var document = Path.Join(testDirectory, "test.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("Variations Chromatiques de concert", comicInfo.Title);
|
||||
Assert.Equal("Georges Bizet \\(1838-1875\\)", comicInfo.Writer);
|
||||
}
|
||||
|
||||
//[Fact]
|
||||
public void ShouldUsePdfInfoDict()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Library/Books/PDFs");
|
||||
var document = Path.Join(testDirectory, "Rollo at Work SP01.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("Rollo at Work", comicInfo.Title);
|
||||
Assert.Equal("Jacob Abbott", comicInfo.Writer);
|
||||
Assert.Equal(2008, comicInfo.Year);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldHandleIndirectPdfObjects()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var document = Path.Join(testDirectory, "indirect.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal(2018, comicInfo.Year);
|
||||
Assert.Equal(8, comicInfo.Month);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FailGracefullyWithEncryptedPdf()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var document = Path.Join(testDirectory, "encrypted.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.Null(comicInfo);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SeriesFallBackToMetadataTitle()
|
||||
{
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||
var pdfParser = new PdfParser(ds);
|
||||
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var filePath = Path.Join(testDirectory, "Bizet-Variations_Chromatiques_de_concert_Theme_A4.pdf");
|
||||
|
||||
var comicInfo = _bookService.GetComicInfo(filePath);
|
||||
Assert.NotNull(comicInfo);
|
||||
|
||||
var parserInfo = pdfParser.Parse(filePath, testDirectory, ds.GetParentDirectoryName(testDirectory), LibraryType.Book, true, comicInfo);
|
||||
Assert.NotNull(parserInfo);
|
||||
Assert.Equal(parserInfo.Title, comicInfo.Title);
|
||||
Assert.Equal(parserInfo.Series, comicInfo.Title);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,9 +9,12 @@ using API.Data.Repositories;
|
|||
using API.DTOs.Reader;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.SignalR;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
@ -22,12 +25,17 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class BookmarkServiceTests: AbstractFsTest
|
||||
public class BookmarkServiceTests
|
||||
{
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
|
||||
|
||||
|
||||
public BookmarkServiceTests()
|
||||
{
|
||||
|
@ -80,7 +88,7 @@ Substitute.For<IMediaConversionService>());
|
|||
_context.ServerSetting.Update(setting);
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
@ -94,6 +102,20 @@ Substitute.For<IMediaConversionService>());
|
|||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory("C:/data/");
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region BookmarkPage
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
using System.Data.Common;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
|
@ -50,17 +52,17 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
|
|||
throw new System.NotImplementedException();
|
||||
}
|
||||
|
||||
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata = true)
|
||||
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
throw new System.NotImplementedException();
|
||||
}
|
||||
|
||||
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata = true)
|
||||
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
throw new System.NotImplementedException();
|
||||
}
|
||||
}
|
||||
public class CacheServiceTests: AbstractFsTest
|
||||
public class CacheServiceTests
|
||||
{
|
||||
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
|
@ -69,6 +71,11 @@ public class CacheServiceTests: AbstractFsTest
|
|||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string DataDirectory = "C:/data/";
|
||||
|
||||
public CacheServiceTests()
|
||||
{
|
||||
var contextOptions = new DbContextOptionsBuilder()
|
||||
|
@ -111,7 +118,7 @@ public class CacheServiceTests: AbstractFsTest
|
|||
_context.ServerSetting.Update(setting);
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
@ -123,6 +130,19 @@ public class CacheServiceTests: AbstractFsTest
|
|||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ensure
|
||||
|
@ -243,7 +263,7 @@ public class CacheServiceTests: AbstractFsTest
|
|||
.WithFile(new MangaFileBuilder($"{DataDirectory}2.epub", MangaFormat.Epub).Build())
|
||||
.Build();
|
||||
cs.GetCachedFile(c);
|
||||
Assert.Equal($"{DataDirectory}1.epub", cs.GetCachedFile(c));
|
||||
Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.Filtering;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
|
@ -27,13 +30,14 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
|
||||
private readonly IReaderService _readerService;
|
||||
|
||||
|
||||
public CleanupServiceTests() : base()
|
||||
{
|
||||
Context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.Build());
|
||||
|
||||
_readerService = new ReaderService(UnitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
|
||||
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
|
||||
Substitute.For<IImageService>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()), Substitute.For<IScrobblingService>());
|
||||
}
|
||||
|
@ -43,11 +47,11 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Series.RemoveRange(Context.Series.ToList());
|
||||
Context.Users.RemoveRange(Context.Users.ToList());
|
||||
Context.AppUserBookmark.RemoveRange(Context.AppUserBookmark.ToList());
|
||||
_context.Series.RemoveRange(_context.Series.ToList());
|
||||
_context.Users.RemoveRange(_context.Users.ToList());
|
||||
_context.AppUserBookmark.RemoveRange(_context.AppUserBookmark.ToList());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
@ -68,18 +72,18 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
var s = new SeriesBuilder("Test 1").Build();
|
||||
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
|
||||
s.LibraryId = 1;
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
s = new SeriesBuilder("Test 2").Build();
|
||||
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
|
||||
s.LibraryId = 1;
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
s = new SeriesBuilder("Test 3").Build();
|
||||
s.CoverImage = $"{ImageService.GetSeriesFormat(1000)}.jpg";
|
||||
s.LibraryId = 1;
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
|
||||
await cleanupService.DeleteSeriesCoverImages();
|
||||
|
@ -102,16 +106,16 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
var s = new SeriesBuilder("Test 1").Build();
|
||||
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
|
||||
s.LibraryId = 1;
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
s = new SeriesBuilder("Test 2").Build();
|
||||
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
|
||||
s.LibraryId = 1;
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
|
||||
await cleanupService.DeleteSeriesCoverImages();
|
||||
|
@ -133,7 +137,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
await ResetDb();
|
||||
|
||||
// Add 2 series with cover images
|
||||
Context.Series.Add(new SeriesBuilder("Test 1")
|
||||
_context.Series.Add(new SeriesBuilder("Test 1")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c01.jpg").Build())
|
||||
.WithCoverImage("v01_c01.jpg")
|
||||
|
@ -142,7 +146,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
.WithLibraryId(1)
|
||||
.Build());
|
||||
|
||||
Context.Series.Add(new SeriesBuilder("Test 2")
|
||||
_context.Series.Add(new SeriesBuilder("Test 2")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c03.jpg").Build())
|
||||
.WithCoverImage("v01_c03.jpg")
|
||||
|
@ -152,9 +156,9 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
.Build());
|
||||
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
|
||||
await cleanupService.DeleteChapterCoverImages();
|
||||
|
@ -223,7 +227,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
// Delete all Series to reset state
|
||||
await ResetDb();
|
||||
|
||||
Context.Users.Add(new AppUser()
|
||||
_context.Users.Add(new AppUser()
|
||||
{
|
||||
UserName = "Joe",
|
||||
ReadingLists = new List<ReadingList>()
|
||||
|
@ -239,9 +243,9 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
}
|
||||
});
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
|
||||
await cleanupService.DeleteReadingListCoverImages();
|
||||
|
@ -260,7 +264,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData(""));
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
cleanupService.CleanupCacheAndTempDirectories();
|
||||
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
|
||||
|
@ -274,7 +278,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData(""));
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
cleanupService.CleanupCacheAndTempDirectories();
|
||||
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
|
||||
|
@ -297,7 +301,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile);
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
await cleanupService.CleanupBackups();
|
||||
Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories));
|
||||
|
@ -319,7 +323,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
});
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
await cleanupService.CleanupBackups();
|
||||
Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip"));
|
||||
|
@ -343,7 +347,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
}
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
await cleanupService.CleanupLogs();
|
||||
Assert.Single(ds.GetFiles(LogDirectory, searchOption: SearchOption.AllDirectories));
|
||||
|
@ -372,7 +376,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
await cleanupService.CleanupLogs();
|
||||
Assert.True(filesystem.File.Exists($"{LogDirectory}kavita20200911.log"));
|
||||
|
@ -396,36 +400,36 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
.Build();
|
||||
series.Library = new LibraryBuilder("Test LIb").Build();
|
||||
|
||||
Context.Series.Add(series);
|
||||
_context.Series.Add(series);
|
||||
|
||||
|
||||
Context.AppUser.Add(new AppUser()
|
||||
_context.AppUser.Add(new AppUser()
|
||||
{
|
||||
UserName = "majora2007"
|
||||
});
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
|
||||
await _readerService.MarkChaptersUntilAsRead(user, 1, 5);
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
// Validate correct chapters have read status
|
||||
Assert.Equal(1, (await UnitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
|
||||
Assert.Equal(1, (await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
||||
Substitute.For<IEventHub>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||
|
||||
// Delete the Chapter
|
||||
Context.Chapter.Remove(c);
|
||||
await UnitOfWork.CommitAsync();
|
||||
Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
||||
_context.Chapter.Remove(c);
|
||||
await _unitOfWork.CommitAsync();
|
||||
Assert.Empty(await _unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
||||
|
||||
// NOTE: This may not be needed, the underlying DB structure seems fixed as of v0.7
|
||||
await cleanupService.CleanupDbEntries();
|
||||
|
||||
Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
||||
Assert.Empty(await _unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -436,7 +440,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.Build();
|
||||
s.Library = new LibraryBuilder("Test LIb").Build();
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
|
||||
var c = new AppUserCollection()
|
||||
{
|
||||
|
@ -446,24 +450,24 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
Items = new List<Series>() {s}
|
||||
};
|
||||
|
||||
Context.AppUser.Add(new AppUser()
|
||||
_context.AppUser.Add(new AppUser()
|
||||
{
|
||||
UserName = "majora2007",
|
||||
Collections = new List<AppUserCollection>() {c}
|
||||
});
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
||||
Substitute.For<IEventHub>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||
|
||||
// Delete the Chapter
|
||||
Context.Series.Remove(s);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_context.Series.Remove(s);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await cleanupService.CleanupDbEntries();
|
||||
|
||||
Assert.Empty(await UnitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
|
||||
Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
@ -480,15 +484,15 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
.Build();
|
||||
|
||||
s.Library = new LibraryBuilder("Test LIb").Build();
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
|
||||
var user = new AppUser()
|
||||
{
|
||||
UserName = "CleanupWantToRead_ShouldRemoveFullyReadSeries",
|
||||
};
|
||||
Context.AppUser.Add(user);
|
||||
_context.AppUser.Add(user);
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Add want to read
|
||||
user.WantToRead = new List<AppUserWantToRead>()
|
||||
|
@ -498,12 +502,12 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
SeriesId = s.Id
|
||||
}
|
||||
};
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _readerService.MarkSeriesAsRead(user, s.Id);
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
||||
Substitute.For<IEventHub>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||
|
||||
|
@ -511,77 +515,12 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
await cleanupService.CleanupWantToRead();
|
||||
|
||||
var wantToRead =
|
||||
await UnitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
|
||||
await _unitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
|
||||
|
||||
Assert.Equal(0, wantToRead.TotalCount);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region ConsolidateProgress
|
||||
|
||||
[Fact]
|
||||
public async Task ConsolidateProgress_ShouldRemoveDuplicates()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var s = new SeriesBuilder("Test ConsolidateProgress_ShouldRemoveDuplicates")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithPages(3)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
s.Library = new LibraryBuilder("Test Lib").Build();
|
||||
Context.Series.Add(s);
|
||||
|
||||
var user = new AppUser()
|
||||
{
|
||||
UserName = "ConsolidateProgress_ShouldRemoveDuplicates",
|
||||
};
|
||||
Context.AppUser.Add(user);
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Add 2 progress events
|
||||
user.Progresses ??= [];
|
||||
user.Progresses.Add(new AppUserProgress()
|
||||
{
|
||||
ChapterId = 1,
|
||||
VolumeId = 1,
|
||||
SeriesId = 1,
|
||||
LibraryId = s.LibraryId,
|
||||
PagesRead = 1,
|
||||
});
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Add a duplicate with higher page number
|
||||
user.Progresses.Add(new AppUserProgress()
|
||||
{
|
||||
ChapterId = 1,
|
||||
VolumeId = 1,
|
||||
SeriesId = 1,
|
||||
LibraryId = s.LibraryId,
|
||||
PagesRead = 3,
|
||||
});
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
Assert.Equal(2, (await UnitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||
Substitute.For<IEventHub>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||
|
||||
|
||||
await cleanupService.ConsolidateProgress();
|
||||
|
||||
var progress = await UnitOfWork.AppUserProgressRepository.GetAllProgress();
|
||||
|
||||
Assert.Single(progress);
|
||||
Assert.True(progress.First().PagesRead == 3);
|
||||
}
|
||||
#endregion
|
||||
|
||||
|
||||
#region EnsureChapterProgressIsCapped
|
||||
|
||||
|
@ -601,54 +540,54 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
{
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume).WithChapter(c).Build()
|
||||
};
|
||||
Context.Series.Add(s);
|
||||
_context.Series.Add(s);
|
||||
|
||||
var user = new AppUser()
|
||||
{
|
||||
UserName = "EnsureChapterProgressIsCapped",
|
||||
Progresses = new List<AppUserProgress>()
|
||||
};
|
||||
Context.AppUser.Add(user);
|
||||
_context.AppUser.Add(user);
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _readerService.MarkChaptersAsRead(user, s.Id, new List<Chapter>() {c});
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||
await _unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||
|
||||
Assert.NotNull(chapter);
|
||||
Assert.Equal(2, chapter.PagesRead);
|
||||
|
||||
// Update chapter to have 1 page
|
||||
c.Pages = 1;
|
||||
UnitOfWork.ChapterRepository.Update(c);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_unitOfWork.ChapterRepository.Update(c);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||
chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||
await _unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||
Assert.NotNull(chapter);
|
||||
Assert.Equal(2, chapter.PagesRead);
|
||||
Assert.Equal(1, chapter.Pages);
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
||||
Substitute.For<IEventHub>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||
|
||||
await cleanupService.EnsureChapterProgressIsCapped();
|
||||
chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||
chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||
await _unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||
|
||||
Assert.NotNull(chapter);
|
||||
Assert.Equal(1, chapter.PagesRead);
|
||||
|
||||
Context.AppUser.Remove(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_context.AppUser.Remove(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region CleanupBookmarks
|
||||
// #region CleanupBookmarks
|
||||
//
|
||||
// [Fact]
|
||||
// public async Task CleanupBookmarks_LeaveAllFiles()
|
||||
|
@ -785,5 +724,5 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
// Assert.Equal(1, ds.FileSystem.Directory.GetDirectories($"{BookmarkDirectory}1/1/").Length);
|
||||
// }
|
||||
//
|
||||
#endregion
|
||||
// #endregion
|
||||
}
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Constants;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.Collection;
|
||||
|
@ -12,7 +10,6 @@ using API.Helpers.Builders;
|
|||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.SignalR;
|
||||
using Kavita.Common;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
|
@ -23,24 +20,24 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
private readonly ICollectionTagService _service;
|
||||
public CollectionTagServiceTests()
|
||||
{
|
||||
_service = new CollectionTagService(UnitOfWork, Substitute.For<IEventHub>());
|
||||
_service = new CollectionTagService(_unitOfWork, Substitute.For<IEventHub>());
|
||||
}
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.AppUserCollection.RemoveRange(Context.AppUserCollection.ToList());
|
||||
Context.Library.RemoveRange(Context.Library.ToList());
|
||||
_context.AppUserCollection.RemoveRange(_context.AppUserCollection.ToList());
|
||||
_context.Library.RemoveRange(_context.Library.ToList());
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
private async Task SeedSeries()
|
||||
{
|
||||
if (Context.AppUserCollection.Any()) return;
|
||||
if (_context.AppUserCollection.Any()) return;
|
||||
|
||||
var s1 = new SeriesBuilder("Series 1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build()).Build();
|
||||
var s2 = new SeriesBuilder("Series 2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build()).Build();
|
||||
Context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
|
||||
_context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
|
||||
.WithSeries(s1)
|
||||
.WithSeries(s2)
|
||||
.Build());
|
||||
|
@ -51,69 +48,11 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
new AppUserCollectionBuilder("Tag 1").WithItems(new []{s1}).Build(),
|
||||
new AppUserCollectionBuilder("Tag 2").WithItems(new []{s1, s2}).WithIsPromoted(true).Build()
|
||||
};
|
||||
UnitOfWork.UserRepository.Add(user);
|
||||
_unitOfWork.UserRepository.Add(user);
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
#region DeleteTag
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteTag_ShouldDeleteTag_WhenTagExists()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act
|
||||
var result = await _service.DeleteTag(1, user);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
var deletedTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.Null(deletedTag);
|
||||
Assert.Single(user.Collections); // Only one collection should remain
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteTag_ShouldReturnTrue_WhenTagDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act - Try to delete a non-existent tag
|
||||
var result = await _service.DeleteTag(999, user);
|
||||
|
||||
// Assert
|
||||
Assert.True(result); // Should return true because the tag is already "deleted"
|
||||
Assert.Equal(2, user.Collections.Count); // Both collections should remain
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteTag_ShouldNotAffectOtherTags()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act
|
||||
var result = await _service.DeleteTag(1, user);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
var remainingTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(remainingTag);
|
||||
Assert.Equal("Tag 2", remainingTag.Title);
|
||||
Assert.True(remainingTag.Promoted);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UpdateTag
|
||||
|
||||
[Fact]
|
||||
|
@ -121,12 +60,12 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldUpdateFields").WithIsPromoted(true).Build());
|
||||
UnitOfWork.UserRepository.Update(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_unitOfWork.UserRepository.Update(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
|
@ -137,7 +76,7 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
AgeRating = AgeRating.Unknown
|
||||
}, 1);
|
||||
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.Promoted);
|
||||
Assert.False(string.IsNullOrEmpty(tag.Summary));
|
||||
|
@ -151,12 +90,12 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource").WithSource(ScrobbleProvider.Mal).Build());
|
||||
UnitOfWork.UserRepository.Update(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_unitOfWork.UserRepository.Update(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
|
@ -167,194 +106,11 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
AgeRating = AgeRating.Unknown
|
||||
}, 1);
|
||||
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource", tag.Title);
|
||||
Assert.False(string.IsNullOrEmpty(tag.Summary));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldThrowException_WhenTagDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Non-existent Tag",
|
||||
Id = 999, // Non-existent ID
|
||||
Promoted = false
|
||||
}, 1));
|
||||
|
||||
Assert.Equal("collection-doesnt-exist", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldThrowException_WhenUserDoesNotOwnTag()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Create a second user
|
||||
var user2 = new AppUserBuilder("user2", "user2", Seed.DefaultThemes.First()).Build();
|
||||
UnitOfWork.UserRepository.Add(user2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1, // This belongs to user1
|
||||
Promoted = false
|
||||
}, 2)); // User with ID 2
|
||||
|
||||
Assert.Equal("access-denied", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldThrowException_WhenTitleIsEmpty()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = " ", // Empty after trimming
|
||||
Id = 1,
|
||||
Promoted = false
|
||||
}, 1));
|
||||
|
||||
Assert.Equal("collection-tag-title-required", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldThrowException_WhenTitleAlreadyExists()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 2", // Already exists
|
||||
Id = 1, // Trying to rename Tag 1 to Tag 2
|
||||
Promoted = false
|
||||
}, 1));
|
||||
|
||||
Assert.Equal("collection-tag-duplicate", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldUpdateCoverImageSettings()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
CoverImageLocked = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.CoverImageLocked);
|
||||
|
||||
// Now test unlocking the cover image
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
CoverImageLocked = false
|
||||
}, 1);
|
||||
|
||||
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.False(tag.CoverImageLocked);
|
||||
Assert.Equal(string.Empty, tag.CoverImage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldAllowPromoteForAdminRole()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Setup a user with admin role
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
await AddUserWithRole(user.Id, PolicyConstants.AdminRole);
|
||||
|
||||
|
||||
// Act - Try to promote a tag that wasn't previously promoted
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
Promoted = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.Promoted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldAllowPromoteForPromoteRole()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Setup a user with promote role
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Mock to return promote role for the user
|
||||
await AddUserWithRole(user.Id, PolicyConstants.PromoteRole);
|
||||
|
||||
// Act - Try to promote a tag that wasn't previously promoted
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
Promoted = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.Promoted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldNotChangePromotion_WhenUserHasNoPermission()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Setup a user with no special roles
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act - Try to promote a tag without proper role
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
Promoted = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.False(tag.Promoted); // Should remain unpromoted
|
||||
}
|
||||
#endregion
|
||||
|
||||
|
||||
|
@ -365,17 +121,17 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Tag 2 has 2 series
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
var userCollections = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
var userCollections = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.Equal(2, userCollections!.Collections.Count);
|
||||
Assert.Single(tag.Items);
|
||||
Assert.Equal(1, tag.Items.Count);
|
||||
Assert.Equal(2, tag.Items.First().Id);
|
||||
}
|
||||
|
||||
|
@ -387,11 +143,11 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Tag 2 has 2 series
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
|
@ -407,123 +163,18 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Tag 1 has 1 series
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
var tag2 = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
var tag2 = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.Null(tag2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldReturnFalse_WhenTagIsNull()
|
||||
{
|
||||
// Act
|
||||
var result = await _service.RemoveTagFromSeries(null, [1]);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldHandleEmptySeriesIdsList()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
var initialItemCount = tag.Items.Count;
|
||||
|
||||
// Act
|
||||
var result = await _service.RemoveTagFromSeries(tag, Array.Empty<int>());
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldHandleNonExistentSeriesIds()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
var initialItemCount = tag.Items.Count;
|
||||
|
||||
// Act - Try to remove a series that doesn't exist in the tag
|
||||
var result = await _service.RemoveTagFromSeries(tag, [999]);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldHandleNullItemsList()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
// Force null items list
|
||||
tag.Items = null;
|
||||
UnitOfWork.CollectionTagRepository.Update(tag);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Act
|
||||
var result = await _service.RemoveTagFromSeries(tag, [1]);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
// The tag should not be removed since the items list was null, not empty
|
||||
var tagAfter = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.Null(tagAfter);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldUpdateAgeRating_WhenMultipleSeriesRemain()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Add a third series with a different age rating
|
||||
var s3 = new SeriesBuilder("Series 3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.PG).Build()).Build();
|
||||
Context.Library.First().Series.Add(s3);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Add series 3 to tag 2
|
||||
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
tag.Items.Add(s3);
|
||||
UnitOfWork.CollectionTagRepository.Update(tag);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Act - Remove the series with Mature rating
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
|
||||
// Assert
|
||||
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(2, tag.Items.Count);
|
||||
|
||||
// The age rating should be updated to the highest remaining rating (PG)
|
||||
Assert.Equal(AgeRating.PG, tag.AgeRating);
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.Reflection;
|
||||
using System.Threading.Tasks;
|
||||
using API.Constants;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Metadata;
|
||||
using API.SignalR;
|
||||
using EasyCaching.Core;
|
||||
using Kavita.Common;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class CoverDbServiceTests : AbstractDbTest
|
||||
{
|
||||
private readonly DirectoryService _directoryService;
|
||||
private readonly IEasyCachingProviderFactory _cacheFactory = Substitute.For<IEasyCachingProviderFactory>();
|
||||
private readonly ICoverDbService _coverDbService;
|
||||
|
||||
private static readonly string FaviconPath = Path.Join(Directory.GetCurrentDirectory(),
|
||||
"../../../Services/Test Data/CoverDbService/Favicons");
|
||||
/// <summary>
|
||||
/// Path to download files temp to. Should be empty after each test.
|
||||
/// </summary>
|
||||
private static readonly string TempPath = Path.Join(Directory.GetCurrentDirectory(),
|
||||
"../../../Services/Test Data/CoverDbService/Temp");
|
||||
|
||||
public CoverDbServiceTests()
|
||||
{
|
||||
_directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), CreateFileSystem());
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
|
||||
|
||||
_coverDbService = new CoverDbService(Substitute.For<ILogger<CoverDbService>>(), _directoryService, _cacheFactory,
|
||||
Substitute.For<IHostEnvironment>(), imageService, UnitOfWork, Substitute.For<IEventHub>());
|
||||
}
|
||||
|
||||
protected override Task ResetDb()
|
||||
{
|
||||
throw new System.NotImplementedException();
|
||||
}
|
||||
|
||||
|
||||
#region Download Favicon
|
||||
|
||||
/// <summary>
|
||||
/// I cannot figure out how to test this code due to the reliance on the _directoryService.FaviconDirectory and not being
|
||||
/// able to redirect it to the real filesystem.
|
||||
/// </summary>
|
||||
public async Task DownloadFaviconAsync_ShouldDownloadAndMatchExpectedFavicon()
|
||||
{
|
||||
// Arrange
|
||||
var testUrl = "https://anilist.co/anime/6205/Kmpfer/";
|
||||
var encodeFormat = EncodeFormat.WEBP;
|
||||
var expectedFaviconPath = Path.Combine(FaviconPath, "anilist.co.webp");
|
||||
|
||||
// Ensure TempPath exists
|
||||
_directoryService.ExistOrCreate(TempPath);
|
||||
|
||||
var baseUrl = "https://anilist.co";
|
||||
|
||||
// Ensure there is no cache result for this URL
|
||||
var provider = Substitute.For<IEasyCachingProvider>();
|
||||
provider.GetAsync<string>(baseUrl).Returns(new CacheValue<string>(null, false));
|
||||
_cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
|
||||
|
||||
|
||||
// // Replace favicon directory with TempPath
|
||||
// var directoryService = (DirectoryService)_directoryService;
|
||||
// directoryService.FaviconDirectory = TempPath;
|
||||
|
||||
// Hack: Swap FaviconDirectory with TempPath for ability to download real files
|
||||
typeof(DirectoryService)
|
||||
.GetField("FaviconDirectory", BindingFlags.NonPublic | BindingFlags.Instance)
|
||||
?.SetValue(_directoryService, TempPath);
|
||||
|
||||
|
||||
// Act
|
||||
var resultFilename = await _coverDbService.DownloadFaviconAsync(testUrl, encodeFormat);
|
||||
var actualFaviconPath = Path.Combine(TempPath, resultFilename);
|
||||
|
||||
// Assert file exists
|
||||
Assert.True(File.Exists(actualFaviconPath), "Downloaded favicon does not exist in temp path");
|
||||
|
||||
// Load and compare similarity
|
||||
|
||||
var similarity = expectedFaviconPath.CalculateSimilarity(actualFaviconPath); // Assuming you have this extension
|
||||
Assert.True(similarity > 0.9f, $"Image similarity too low: {similarity}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadFaviconAsync_ShouldThrowKavitaException_WhenPreviouslyFailedUrlExistsInCache()
|
||||
{
|
||||
// Arrange
|
||||
var testUrl = "https://example.com";
|
||||
var encodeFormat = EncodeFormat.WEBP;
|
||||
|
||||
var provider = Substitute.For<IEasyCachingProvider>();
|
||||
provider.GetAsync<string>(Arg.Any<string>())
|
||||
.Returns(new CacheValue<string>(string.Empty, true)); // Simulate previous failure
|
||||
|
||||
_cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<KavitaException>(() =>
|
||||
_coverDbService.DownloadFaviconAsync(testUrl, encodeFormat));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
}
|
|
@ -18,13 +18,13 @@ public class DeviceServiceDbTests : AbstractDbTest
|
|||
|
||||
public DeviceServiceDbTests() : base()
|
||||
{
|
||||
_deviceService = new DeviceService(UnitOfWork, _logger, Substitute.For<IEmailService>());
|
||||
_deviceService = new DeviceService(_unitOfWork, _logger, Substitute.For<IEmailService>());
|
||||
}
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Users.RemoveRange(Context.Users.ToList());
|
||||
await UnitOfWork.CommitAsync();
|
||||
_context.Users.RemoveRange(_context.Users.ToList());
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
|
||||
|
@ -39,8 +39,8 @@ public class DeviceServiceDbTests : AbstractDbTest
|
|||
Devices = new List<Device>()
|
||||
};
|
||||
|
||||
Context.Users.Add(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_context.Users.Add(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var device = await _deviceService.Create(new CreateDeviceDto()
|
||||
{
|
||||
|
@ -62,8 +62,8 @@ public class DeviceServiceDbTests : AbstractDbTest
|
|||
Devices = new List<Device>()
|
||||
};
|
||||
|
||||
Context.Users.Add(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
_context.Users.Add(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var device = await _deviceService.Create(new CreateDeviceDto()
|
||||
{
|
||||
|
|
|
@ -1,30 +1,20 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using API.Services;
|
||||
using Kavita.Common.Helpers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class DirectoryServiceTests: AbstractFsTest
|
||||
public class DirectoryServiceTests
|
||||
{
|
||||
private readonly ILogger<DirectoryService> _logger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
public DirectoryServiceTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
|
||||
#region TraverseTreeParallelForEach
|
||||
|
@ -382,16 +372,9 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
#endregion
|
||||
|
||||
#region IsDriveMounted
|
||||
// The root directory (/) is always mounted on non windows
|
||||
[Fact]
|
||||
public void IsDriveMounted_DriveIsNotMounted()
|
||||
{
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
|
||||
return;
|
||||
}
|
||||
|
||||
const string testDirectory = "c:/manga/";
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
||||
|
@ -403,12 +386,6 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
[Fact]
|
||||
public void IsDriveMounted_DriveIsMounted()
|
||||
{
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
|
||||
return;
|
||||
}
|
||||
|
||||
const string testDirectory = "c:/manga/";
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
||||
|
@ -768,12 +745,6 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
[InlineData(new [] {"/manga"},
|
||||
new [] {"/manga/Love Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
|
||||
"/manga/Love Hina")]
|
||||
[InlineData(new [] {"/manga"},
|
||||
new [] {"/manga/Love Hina/Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
|
||||
"/manga/Love Hina")]
|
||||
[InlineData(new [] {"/manga"},
|
||||
new [] {"/manga/Dress Up Darling/Dress Up Darling Ch 01.cbz", "/manga/Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz"},
|
||||
"/manga/Dress Up Darling")]
|
||||
public void FindLowestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
|
@ -922,14 +893,12 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
#region GetHumanReadableBytes
|
||||
|
||||
[Theory]
|
||||
[InlineData(1200, 1.17, " KB")]
|
||||
[InlineData(1, 1, " B")]
|
||||
[InlineData(10000000, 9.54, " MB")]
|
||||
[InlineData(10000000000, 9.31, " GB")]
|
||||
public void GetHumanReadableBytesTest(long bytes, float number, string suffix)
|
||||
[InlineData(1200, "1.17 KB")]
|
||||
[InlineData(1, "1 B")]
|
||||
[InlineData(10000000, "9.54 MB")]
|
||||
[InlineData(10000000000, "9.31 GB")]
|
||||
public void GetHumanReadableBytesTest(long bytes, string expected)
|
||||
{
|
||||
// GetHumanReadableBytes is user facing, should be in CultureInfo.CurrentCulture
|
||||
var expected = number.ToString(CultureInfo.CurrentCulture) + suffix;
|
||||
Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes));
|
||||
}
|
||||
#endregion
|
||||
|
@ -951,9 +920,8 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
var globMatcher = new GlobMatcher();
|
||||
globMatcher.AddExclude("*.*");
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
||||
|
||||
Assert.Empty(allFiles);
|
||||
|
||||
|
@ -977,9 +945,7 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
var globMatcher = new GlobMatcher();
|
||||
globMatcher.AddExclude("**/Accel World/*");
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
||||
|
||||
Assert.Single(allFiles); // Ignore files are not counted in files, only valid extensions
|
||||
|
||||
|
@ -1008,10 +974,7 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
var globMatcher = new GlobMatcher();
|
||||
globMatcher.AddExclude("**/Accel World/*");
|
||||
globMatcher.AddExclude("**/ArtBooks/*");
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
||||
|
||||
Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions
|
||||
|
||||
|
@ -1065,14 +1028,11 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
#region GetParentDirectory
|
||||
|
||||
[Theory]
|
||||
[InlineData(@"file.txt", "")]
|
||||
[InlineData(@"folder/file.txt", "folder")]
|
||||
[InlineData(@"folder/subfolder/file.txt", "folder/subfolder")]
|
||||
[InlineData(@"C:/file.txt", "C:/")]
|
||||
[InlineData(@"C:/folder/file.txt", "C:/folder")]
|
||||
[InlineData(@"C:/folder/subfolder/file.txt", "C:/folder/subfolder")]
|
||||
public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected)
|
||||
{
|
||||
path = Root + path;
|
||||
expected = Root + expected;
|
||||
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
{ path, new MockFileData(string.Empty)}
|
||||
|
@ -1082,14 +1042,11 @@ public class DirectoryServiceTests: AbstractFsTest
|
|||
Assert.Equal(expected, ds.GetParentDirectoryName(path));
|
||||
}
|
||||
[Theory]
|
||||
[InlineData(@"folder", "")]
|
||||
[InlineData(@"folder/subfolder", "folder")]
|
||||
[InlineData(@"folder/subfolder/another", "folder/subfolder")]
|
||||
[InlineData(@"C:/folder", "C:/")]
|
||||
[InlineData(@"C:/folder/subfolder", "C:/folder")]
|
||||
[InlineData(@"C:/folder/subfolder/another", "C:/folder/subfolder")]
|
||||
public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected)
|
||||
{
|
||||
path = Root + path;
|
||||
expected = Root + expected;
|
||||
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory(path);
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ namespace API.Tests.Services;
|
|||
public class ImageServiceTests
|
||||
{
|
||||
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/Covers");
|
||||
private readonly string _testDirectoryColorScapes = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/ColorScapes");
|
||||
private const string OutputPattern = "_output";
|
||||
private const string BaselinePattern = "_baseline";
|
||||
|
||||
|
@ -23,7 +22,6 @@ public class ImageServiceTests
|
|||
public void GenerateBaseline()
|
||||
{
|
||||
GenerateFiles(BaselinePattern);
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -34,7 +32,6 @@ public class ImageServiceTests
|
|||
{
|
||||
GenerateFiles(OutputPattern);
|
||||
GenerateHtmlFile();
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
private void GenerateFiles(string outputExtension)
|
||||
|
@ -124,98 +121,4 @@ public class ImageServiceTests
|
|||
File.WriteAllText(Path.Combine(_testDirectory, "index.html"), htmlBuilder.ToString());
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public void TestColorScapes()
|
||||
{
|
||||
// Step 1: Delete any images that have _output in the name
|
||||
var outputFiles = Directory.GetFiles(_testDirectoryColorScapes, "*_output.*");
|
||||
foreach (var file in outputFiles)
|
||||
{
|
||||
File.Delete(file);
|
||||
}
|
||||
|
||||
// Step 2: Scan the _testDirectory for images
|
||||
var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
|
||||
.Where(file => !file.EndsWith("html"))
|
||||
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||
.ToList();
|
||||
|
||||
// Step 3: Process each image
|
||||
foreach (var imagePath in imageFiles)
|
||||
{
|
||||
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||
var colors = ImageService.CalculateColorScape(imagePath);
|
||||
|
||||
// Generate primary color image
|
||||
GenerateColorImage(colors.Primary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png"));
|
||||
|
||||
// Generate secondary color image
|
||||
GenerateColorImage(colors.Secondary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png"));
|
||||
}
|
||||
|
||||
// Step 4: Generate HTML file
|
||||
GenerateHtmlFileForColorScape();
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
private static void GenerateColorImage(string hexColor, string outputPath)
|
||||
{
|
||||
var (r, g, b) = ImageService.HexToRgb(hexColor);
|
||||
using var blackImage = Image.Black(200, 100);
|
||||
using var colorImage = blackImage.NewFromImage(r, g, b);
|
||||
colorImage.WriteToFile(outputPath);
|
||||
}
|
||||
|
||||
private void GenerateHtmlFileForColorScape()
|
||||
{
|
||||
var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
|
||||
.Where(file => !file.EndsWith("html"))
|
||||
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||
.ToList();
|
||||
|
||||
var htmlBuilder = new StringBuilder();
|
||||
htmlBuilder.AppendLine("<!DOCTYPE html>");
|
||||
htmlBuilder.AppendLine("<html lang=\"en\">");
|
||||
htmlBuilder.AppendLine("<head>");
|
||||
htmlBuilder.AppendLine("<meta charset=\"UTF-8\">");
|
||||
htmlBuilder.AppendLine("<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">");
|
||||
htmlBuilder.AppendLine("<title>Color Scape Comparison</title>");
|
||||
htmlBuilder.AppendLine("<style>");
|
||||
htmlBuilder.AppendLine("body { font-family: Arial, sans-serif; }");
|
||||
htmlBuilder.AppendLine(".container { display: flex; flex-wrap: wrap; }");
|
||||
htmlBuilder.AppendLine(".image-row { display: flex; align-items: center; margin-bottom: 20px; width: 100% }");
|
||||
htmlBuilder.AppendLine(".image-row img { margin-right: 10px; max-width: 200px; height: auto; }");
|
||||
htmlBuilder.AppendLine(".color-square { width: 100px; height: 100px; margin-right: 10px; }");
|
||||
htmlBuilder.AppendLine("</style>");
|
||||
htmlBuilder.AppendLine("</head>");
|
||||
htmlBuilder.AppendLine("<body>");
|
||||
htmlBuilder.AppendLine("<div class=\"container\">");
|
||||
|
||||
foreach (var imagePath in imageFiles)
|
||||
{
|
||||
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||
var primaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png");
|
||||
var secondaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png");
|
||||
|
||||
htmlBuilder.AppendLine("<div class=\"image-row\">");
|
||||
htmlBuilder.AppendLine($"<p>{fileName}</p>");
|
||||
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(imagePath)}\" alt=\"{fileName}\">");
|
||||
if (File.Exists(primaryPath))
|
||||
{
|
||||
htmlBuilder.AppendLine($"<img class=\"color-square\" src=\"./{Path.GetFileName(primaryPath)}\" alt=\"{fileName} primary color\">");
|
||||
}
|
||||
if (File.Exists(secondaryPath))
|
||||
{
|
||||
htmlBuilder.AppendLine($"<img class=\"color-square\" src=\"./{Path.GetFileName(secondaryPath)}\" alt=\"{fileName} secondary color\">");
|
||||
}
|
||||
htmlBuilder.AppendLine("</div>");
|
||||
}
|
||||
|
||||
htmlBuilder.AppendLine("</div>");
|
||||
htmlBuilder.AppendLine("</body>");
|
||||
htmlBuilder.AppendLine("</html>");
|
||||
|
||||
File.WriteAllText(Path.Combine(_testDirectoryColorScapes, "colorscape_index.html"), htmlBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,41 +1,37 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.Data.Common;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using API.SignalR;
|
||||
using API.Tests.Helpers;
|
||||
using Hangfire;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class MockReadingItemService : IReadingItemService
|
||||
internal class MockReadingItemService : IReadingItemService
|
||||
{
|
||||
private readonly BasicParser _basicParser;
|
||||
private readonly ComicVineParser _comicVineParser;
|
||||
private readonly ImageParser _imageParser;
|
||||
private readonly BookParser _bookParser;
|
||||
private readonly PdfParser _pdfParser;
|
||||
private readonly IDefaultParser _defaultParser;
|
||||
|
||||
public MockReadingItemService(IDirectoryService directoryService, IBookService bookService)
|
||||
public MockReadingItemService(IDefaultParser defaultParser)
|
||||
{
|
||||
_imageParser = new ImageParser(directoryService);
|
||||
_basicParser = new BasicParser(directoryService, _imageParser);
|
||||
_bookParser = new BookParser(directoryService, bookService, _basicParser);
|
||||
_comicVineParser = new ComicVineParser(directoryService);
|
||||
_pdfParser = new PdfParser(directoryService);
|
||||
_defaultParser = defaultParser;
|
||||
}
|
||||
|
||||
public ComicInfo GetComicInfo(string filePath)
|
||||
|
@ -58,55 +54,32 @@ public class MockReadingItemService : IReadingItemService
|
|||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata)
|
||||
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
if (_comicVineParser.IsApplicable(path, type))
|
||||
{
|
||||
return _comicVineParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||
}
|
||||
if (_imageParser.IsApplicable(path, type))
|
||||
{
|
||||
return _imageParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||
}
|
||||
if (_bookParser.IsApplicable(path, type))
|
||||
{
|
||||
return _bookParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||
}
|
||||
if (_pdfParser.IsApplicable(path, type))
|
||||
{
|
||||
return _pdfParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||
}
|
||||
if (_basicParser.IsApplicable(path, type))
|
||||
{
|
||||
return _basicParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||
}
|
||||
|
||||
return null;
|
||||
return _defaultParser.Parse(path, rootPath, libraryRoot, type);
|
||||
}
|
||||
|
||||
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata)
|
||||
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
return Parse(path, rootPath, libraryRoot, type, enableMetadata);
|
||||
return _defaultParser.Parse(path, rootPath, libraryRoot, type);
|
||||
}
|
||||
}
|
||||
|
||||
public class ParseScannedFilesTests : AbstractDbTest
|
||||
{
|
||||
private readonly ILogger<ParseScannedFiles> _logger = Substitute.For<ILogger<ParseScannedFiles>>();
|
||||
private readonly ScannerHelper _scannerHelper;
|
||||
|
||||
public ParseScannedFilesTests(ITestOutputHelper testOutputHelper)
|
||||
public ParseScannedFilesTests()
|
||||
{
|
||||
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
|
||||
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||
_scannerHelper = new ScannerHelper(UnitOfWork, testOutputHelper);
|
||||
|
||||
}
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Series.RemoveRange(Context.Series.ToList());
|
||||
_context.Series.RemoveRange(_context.Series.ToList());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
#region MergeName
|
||||
|
@ -194,25 +167,43 @@ public class ParseScannedFilesTests : AbstractDbTest
|
|||
public async Task ScanLibrariesForSeries_ShouldFindFiles()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory(Root + "Data/");
|
||||
fileSystem.AddFile(Root + "Data/Accel World v1.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile(Root + "Data/Accel World v2.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile(Root + "Data/Accel World v2.pdf", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile(Root + "Data/Nothing.pdf", new MockFileData(string.Empty));
|
||||
fileSystem.AddDirectory("C:/Data/");
|
||||
fileSystem.AddFile("C:/Data/Accel World v1.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile("C:/Data/Accel World v2.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile("C:/Data/Accel World v2.pdf", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile("C:/Data/Nothing.pdf", new MockFileData(string.Empty));
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(new BasicParser(ds, new ImageParser(ds))), Substitute.For<IEventHub>());
|
||||
|
||||
// var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
//
|
||||
// Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
|
||||
// {
|
||||
// var skippedScan = parsedInfo.Item1;
|
||||
// var parsedFiles = parsedInfo.Item2;
|
||||
// if (parsedFiles.Count == 0) return Task.CompletedTask;
|
||||
//
|
||||
// var foundParsedSeries = new ParsedSeries()
|
||||
// {
|
||||
// Name = parsedFiles.First().Series,
|
||||
// NormalizedName = parsedFiles.First().Series.ToNormalized(),
|
||||
// Format = parsedFiles.First().Format
|
||||
// };
|
||||
//
|
||||
// parsedSeries.Add(foundParsedSeries, parsedFiles);
|
||||
// return Task.CompletedTask;
|
||||
// }
|
||||
|
||||
var library =
|
||||
await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
|
||||
library.Type = LibraryType.Manga;
|
||||
var parsedSeries = await psf.ScanLibrariesForSeries(library, new List<string>() {Root + "Data/"}, false,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(1));
|
||||
var parsedSeries = await psf.ScanLibrariesForSeries(library, new List<string>() {"C:/Data/"}, false,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(1));
|
||||
|
||||
|
||||
// Assert.Equal(3, parsedSeries.Values.Count);
|
||||
|
@ -248,12 +239,12 @@ public class ParseScannedFilesTests : AbstractDbTest
|
|||
var fileSystem = CreateTestFilesystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(new BasicParser(ds, new ImageParser(ds))), Substitute.For<IEventHub>());
|
||||
|
||||
var directoriesSeen = new HashSet<string>();
|
||||
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
var scanResults = await psf.ScanFiles("C:/Data/", true, await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
var scanResults = await psf.ProcessFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
directoriesSeen.Add(scanResult.Folder);
|
||||
|
@ -268,15 +259,15 @@ public class ParseScannedFilesTests : AbstractDbTest
|
|||
var fileSystem = CreateTestFilesystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(new BasicParser(ds, new ImageParser(ds))), Substitute.For<IEventHub>());
|
||||
|
||||
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
|
||||
var directoriesSeen = new HashSet<string>();
|
||||
var scanResults = await psf.ScanFiles("C:/Data/", false,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
var scanResults = await psf.ProcessFiles("C:/Data/", false,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
|
@ -303,12 +294,12 @@ public class ParseScannedFilesTests : AbstractDbTest
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(new BasicParser(ds, new ImageParser(ds))), Substitute.For<IEventHub>());
|
||||
|
||||
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
var scanResults = await psf.ScanFiles("C:/Data", true, await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
var scanResults = await psf.ProcessFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
|
||||
Assert.Equal(2, scanResults.Count);
|
||||
}
|
||||
|
@ -332,13 +323,13 @@ public class ParseScannedFilesTests : AbstractDbTest
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(new BasicParser(ds, new ImageParser(ds))), Substitute.For<IEventHub>());
|
||||
|
||||
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
var scanResults = await psf.ScanFiles("C:/Data", false,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
var scanResults = await psf.ProcessFiles("C:/Data", false,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
|
||||
Assert.Single(scanResults);
|
||||
}
|
||||
|
@ -347,220 +338,4 @@ public class ParseScannedFilesTests : AbstractDbTest
|
|||
|
||||
|
||||
#endregion
|
||||
|
||||
// TODO: Add back in (removed for Hotfix v0.8.5.x)
|
||||
//[Fact]
|
||||
public async Task HasSeriesFolderNotChangedSinceLastScan_AllSeriesFoldersHaveChanges()
|
||||
{
|
||||
const string testcase = "Subfolders always scanning all series changes - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
UnitOfWork.LibraryRepository.Update(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||
|
||||
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||
Assert.Single(frieren.Volumes);
|
||||
|
||||
var executionerAndHerWayOfLife = postLib.Series.First(x => x.Name == "The Executioner and Her Way of Life");
|
||||
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
|
||||
|
||||
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||
|
||||
// Add a new chapter to a volume of the series, and scan. Validate that only, and all directories of this
|
||||
// series are marked as HasChanged
|
||||
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "The Executioner and Her Way of Life"),
|
||||
"The Executioner and Her Way of Life Vol. 1");
|
||||
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0001.cbz"),
|
||||
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
|
||||
|
||||
// 4 series, of which 2 have volumes as directories
|
||||
var folderMap = await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id);
|
||||
Assert.Equal(6, folderMap.Count);
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true, folderMap, postLib);
|
||||
var changes = res.Where(sc => sc.HasChanged).ToList();
|
||||
Assert.Equal(2, changes.Count);
|
||||
// Only volumes of The Executioner and Her Way of Life should be marked as HasChanged (Spice and Wolf also has 2 volumes dirs)
|
||||
Assert.Equal(2, changes.Count(sc => sc.Folder.Contains("The Executioner and Her Way of Life")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HasSeriesFolderNotChangedSinceLastScan_PublisherLayout()
|
||||
{
|
||||
const string testcase = "Subfolder always scanning fix publisher layout - Comic.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
UnitOfWork.LibraryRepository.Update(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||
|
||||
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||
Assert.Equal(2, frieren.Volumes.Count);
|
||||
|
||||
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||
|
||||
// Add a volume to a series, and scan. Ensure only this series is marked as HasChanged
|
||||
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "YenPress"), "The Executioner and Her Way of Life");
|
||||
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1.cbz"),
|
||||
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 2.cbz"));
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
var changes = res.Count(sc => sc.HasChanged);
|
||||
Assert.Equal(1, changes);
|
||||
}
|
||||
|
||||
// TODO: Add back in (removed for Hotfix v0.8.5.x)
|
||||
//[Fact]
|
||||
public async Task SubFoldersNoSubFolders_SkipAll()
|
||||
{
|
||||
const string testcase = "Subfolders and files at root - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
UnitOfWork.LibraryRepository.Update(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
// Needs to be actual time as the write time is now, so if we set LastFolderChecked in the past
|
||||
// it'll always a scan as it was changed since the last scan.
|
||||
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
Assert.DoesNotContain(res, sc => sc.HasChanged);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
|
||||
{
|
||||
const string testcase = "Subfolders and files at root - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
UnitOfWork.LibraryRepository.Update(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
|
||||
Context.Series.Update(spiceAndWolf);
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
// Add file at series root
|
||||
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
|
||||
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
|
||||
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
var changes = res.Count(sc => sc.HasChanged);
|
||||
Assert.Equal(2, changes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
|
||||
{
|
||||
const string testcase = "Subfolders and files at root - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
UnitOfWork.LibraryRepository.Update(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
|
||||
Context.Series.Update(spiceAndWolf);
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
// Add file in subfolder
|
||||
var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
|
||||
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0011.cbz"),
|
||||
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
var changes = res.Count(sc => sc.HasChanged);
|
||||
Assert.Equal(2, changes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,286 +0,0 @@
|
|||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Person;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class PersonServiceTests: AbstractDbTest
|
||||
{
|
||||
|
||||
[Fact]
|
||||
public async Task PersonMerge_KeepNonEmptyMetadata()
|
||||
{
|
||||
var ps = new PersonService(UnitOfWork);
|
||||
|
||||
var person1 = new Person
|
||||
{
|
||||
Name = "Casey Delores",
|
||||
NormalizedName = "Casey Delores".ToNormalized(),
|
||||
HardcoverId = "ANonEmptyId",
|
||||
MalId = 12,
|
||||
};
|
||||
|
||||
var person2 = new Person
|
||||
{
|
||||
Name= "Delores Casey",
|
||||
NormalizedName = "Delores Casey".ToNormalized(),
|
||||
Description = "Hi, I'm Delores Casey!",
|
||||
Aliases = [new PersonAliasBuilder("Casey, Delores").Build()],
|
||||
AniListId = 27,
|
||||
};
|
||||
|
||||
UnitOfWork.PersonRepository.Attach(person1);
|
||||
UnitOfWork.PersonRepository.Attach(person2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await ps.MergePeopleAsync(person2, person1);
|
||||
|
||||
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Single(allPeople);
|
||||
|
||||
var person = allPeople[0];
|
||||
Assert.Equal("Casey Delores", person.Name);
|
||||
Assert.NotEmpty(person.Description);
|
||||
Assert.Equal(27, person.AniListId);
|
||||
Assert.NotNull(person.HardcoverId);
|
||||
Assert.NotEmpty(person.HardcoverId);
|
||||
Assert.Contains(person.Aliases, pa => pa.Alias == "Delores Casey");
|
||||
Assert.Contains(person.Aliases, pa => pa.Alias == "Casey, Delores");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PersonMerge_MergedPersonDestruction()
|
||||
{
|
||||
var ps = new PersonService(UnitOfWork);
|
||||
|
||||
var person1 = new Person
|
||||
{
|
||||
Name = "Casey Delores",
|
||||
NormalizedName = "Casey Delores".ToNormalized(),
|
||||
};
|
||||
|
||||
var person2 = new Person
|
||||
{
|
||||
Name = "Delores Casey",
|
||||
NormalizedName = "Delores Casey".ToNormalized(),
|
||||
};
|
||||
|
||||
UnitOfWork.PersonRepository.Attach(person1);
|
||||
UnitOfWork.PersonRepository.Attach(person2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await ps.MergePeopleAsync(person2, person1);
|
||||
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Single(allPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PersonMerge_RetentionChapters()
|
||||
{
|
||||
var ps = new PersonService(UnitOfWork);
|
||||
|
||||
var library = new LibraryBuilder("My Library").Build();
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var user = new AppUserBuilder("Amelia", "amelia@localhost")
|
||||
.WithLibrary(library).Build();
|
||||
UnitOfWork.UserRepository.Add(user);
|
||||
|
||||
var person = new PersonBuilder("Jillian Cowan").Build();
|
||||
|
||||
var person2 = new PersonBuilder("Cowan Jillian").Build();
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithPerson(person, PersonRole.Editor)
|
||||
.Build();
|
||||
|
||||
var chapter2 = new ChapterBuilder("2")
|
||||
.WithPerson(person2, PersonRole.Editor)
|
||||
.Build();
|
||||
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(chapter)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var series2 = new SeriesBuilder("Test 2")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(chapter2)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
UnitOfWork.SeriesRepository.Add(series2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await ps.MergePeopleAsync(person2, person);
|
||||
|
||||
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Single(allPeople);
|
||||
var mergedPerson = allPeople[0];
|
||||
|
||||
Assert.Equal("Jillian Cowan", mergedPerson.Name);
|
||||
|
||||
var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(1, 1, PersonRole.Editor);
|
||||
Assert.Equal(2, chapters.Count());
|
||||
|
||||
chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(1, ChapterIncludes.People);
|
||||
Assert.NotNull(chapter);
|
||||
Assert.Single(chapter.People);
|
||||
|
||||
chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(2, ChapterIncludes.People);
|
||||
Assert.NotNull(chapter2);
|
||||
Assert.Single(chapter2.People);
|
||||
|
||||
Assert.Equal(chapter.People.First().PersonId, chapter2.People.First().PersonId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PersonMerge_NoDuplicateChaptersOrSeries()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var ps = new PersonService(UnitOfWork);
|
||||
|
||||
var library = new LibraryBuilder("My Library").Build();
|
||||
UnitOfWork.LibraryRepository.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var user = new AppUserBuilder("Amelia", "amelia@localhost")
|
||||
.WithLibrary(library).Build();
|
||||
UnitOfWork.UserRepository.Add(user);
|
||||
|
||||
var person = new PersonBuilder("Jillian Cowan").Build();
|
||||
|
||||
var person2 = new PersonBuilder("Cowan Jillian").Build();
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithPerson(person, PersonRole.Editor)
|
||||
.WithPerson(person2, PersonRole.Colorist)
|
||||
.Build();
|
||||
|
||||
var chapter2 = new ChapterBuilder("2")
|
||||
.WithPerson(person2, PersonRole.Editor)
|
||||
.WithPerson(person, PersonRole.Editor)
|
||||
.Build();
|
||||
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(chapter)
|
||||
.Build())
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithPerson(person, PersonRole.Editor)
|
||||
.WithPerson(person2, PersonRole.Editor)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var series2 = new SeriesBuilder("Test 2")
|
||||
.WithLibraryId(library.Id)
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(chapter2)
|
||||
.Build())
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithPerson(person, PersonRole.Editor)
|
||||
.WithPerson(person2, PersonRole.Colorist)
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
UnitOfWork.SeriesRepository.Add(series);
|
||||
UnitOfWork.SeriesRepository.Add(series2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await ps.MergePeopleAsync(person2, person);
|
||||
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||
Assert.Single(allPeople);
|
||||
|
||||
var mergedPerson = await UnitOfWork.PersonRepository.GetPersonById(person.Id, PersonIncludes.All);
|
||||
Assert.NotNull(mergedPerson);
|
||||
Assert.Equal(3, mergedPerson.ChapterPeople.Count);
|
||||
Assert.Equal(3, mergedPerson.SeriesMetadataPeople.Count);
|
||||
|
||||
chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(chapter.Id, ChapterIncludes.People);
|
||||
Assert.NotNull(chapter);
|
||||
Assert.Equal(2, chapter.People.Count);
|
||||
Assert.Single(chapter.People.Select(p => p.Person.Id).Distinct());
|
||||
Assert.Contains(chapter.People, p => p.Role == PersonRole.Editor);
|
||||
Assert.Contains(chapter.People, p => p.Role == PersonRole.Colorist);
|
||||
|
||||
chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(chapter2.Id, ChapterIncludes.People);
|
||||
Assert.NotNull(chapter2);
|
||||
Assert.Single(chapter2.People);
|
||||
Assert.Contains(chapter2.People, p => p.Role == PersonRole.Editor);
|
||||
Assert.DoesNotContain(chapter2.People, p => p.Role == PersonRole.Colorist);
|
||||
|
||||
series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(series.Id, SeriesIncludes.Metadata);
|
||||
Assert.NotNull(series);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Contains(series.Metadata.People, p => p.Role == PersonRole.Editor);
|
||||
Assert.DoesNotContain(series.Metadata.People, p => p.Role == PersonRole.Colorist);
|
||||
|
||||
series2 = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(series2.Id, SeriesIncludes.Metadata);
|
||||
Assert.NotNull(series2);
|
||||
Assert.Equal(2, series2.Metadata.People.Count);
|
||||
Assert.Contains(series2.Metadata.People, p => p.Role == PersonRole.Editor);
|
||||
Assert.Contains(series2.Metadata.People, p => p.Role == PersonRole.Colorist);
|
||||
|
||||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PersonAddAlias_NoOverlap()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
UnitOfWork.PersonRepository.Attach(new PersonBuilder("Jillian Cowan").Build());
|
||||
UnitOfWork.PersonRepository.Attach(new PersonBuilder("Jilly Cowan").WithAlias("Jolly Cowan").Build());
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var ps = new PersonService(UnitOfWork);
|
||||
|
||||
var person1 = await UnitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jillian Cowan");
|
||||
var person2 = await UnitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jilly Cowan");
|
||||
Assert.NotNull(person1);
|
||||
Assert.NotNull(person2);
|
||||
|
||||
// Overlap on Name
|
||||
var success = await ps.UpdatePersonAliasesAsync(person1, ["Jilly Cowan"]);
|
||||
Assert.False(success);
|
||||
|
||||
// Overlap on alias
|
||||
success = await ps.UpdatePersonAliasesAsync(person1, ["Jolly Cowan"]);
|
||||
Assert.False(success);
|
||||
|
||||
// No overlap
|
||||
success = await ps.UpdatePersonAliasesAsync(person2, ["Jilly Joy Cowan"]);
|
||||
Assert.True(success);
|
||||
|
||||
// Some overlap
|
||||
success = await ps.UpdatePersonAliasesAsync(person1, ["Jolly Cowan", "Jilly Joy Cowan"]);
|
||||
Assert.False(success);
|
||||
|
||||
// Some overlap
|
||||
success = await ps.UpdatePersonAliasesAsync(person1, ["Jolly Cowan", "Jilly Joy Cowan"]);
|
||||
Assert.False(success);
|
||||
|
||||
Assert.Single(person2.Aliases);
|
||||
}
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Person.RemoveRange(Context.Person.ToList());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
}
|
|
@ -1,8 +1,23 @@
|
|||
namespace API.Tests.Services;
|
||||
using System.IO;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Metadata;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class ProcessSeriesTests
|
||||
{
|
||||
// TODO: Implement
|
||||
|
||||
|
||||
#region UpdateSeriesMetadata
|
||||
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using Hangfire;
|
||||
using Hangfire.InMemory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class RatingServiceTests: AbstractDbTest
|
||||
{
|
||||
private readonly RatingService _ratingService;
|
||||
|
||||
public RatingServiceTests()
|
||||
{
|
||||
_ratingService = new RatingService(UnitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateRating_ShouldSetRating()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
Context.Library.Add(new LibraryBuilder("Test LIb")
|
||||
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||
.WithSeries(new SeriesBuilder("Test")
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build());
|
||||
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||
|
||||
JobStorage.Current = new InMemoryStorage();
|
||||
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||
{
|
||||
SeriesId = 1,
|
||||
UserRating = 3,
|
||||
});
|
||||
|
||||
Assert.True(result);
|
||||
|
||||
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))!
|
||||
.Ratings;
|
||||
Assert.NotEmpty(ratings);
|
||||
Assert.Equal(3, ratings.First().Rating);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateRating_ShouldUpdateExistingRating()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
Context.Library.Add(new LibraryBuilder("Test LIb")
|
||||
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||
.WithSeries(new SeriesBuilder("Test")
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build());
|
||||
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||
|
||||
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||
{
|
||||
SeriesId = 1,
|
||||
UserRating = 3,
|
||||
});
|
||||
|
||||
Assert.True(result);
|
||||
|
||||
JobStorage.Current = new InMemoryStorage();
|
||||
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
|
||||
.Ratings;
|
||||
Assert.NotEmpty(ratings);
|
||||
Assert.Equal(3, ratings.First().Rating);
|
||||
|
||||
// Update the DB again
|
||||
|
||||
var result2 = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||
{
|
||||
SeriesId = 1,
|
||||
UserRating = 5,
|
||||
});
|
||||
|
||||
Assert.True(result2);
|
||||
|
||||
var ratings2 = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
|
||||
.Ratings;
|
||||
Assert.NotEmpty(ratings2);
|
||||
Assert.True(ratings2.Count == 1);
|
||||
Assert.Equal(5, ratings2.First().Rating);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateRating_ShouldClampRatingAt5()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
Context.Library.Add(new LibraryBuilder("Test LIb")
|
||||
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||
.WithSeries(new SeriesBuilder("Test")
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||
|
||||
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||
{
|
||||
SeriesId = 1,
|
||||
UserRating = 10,
|
||||
});
|
||||
|
||||
Assert.True(result);
|
||||
|
||||
JobStorage.Current = new InMemoryStorage();
|
||||
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007",
|
||||
AppUserIncludes.Ratings)!)
|
||||
.Ratings;
|
||||
Assert.NotEmpty(ratings);
|
||||
Assert.Equal(5, ratings.First().Rating);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateRating_ShouldReturnFalseWhenSeriesDoesntExist()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
Context.Library.Add(new LibraryBuilder("Test LIb", LibraryType.Book)
|
||||
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||
.WithSeries(new SeriesBuilder("Test")
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||
|
||||
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||
{
|
||||
SeriesId = 2,
|
||||
UserRating = 5,
|
||||
});
|
||||
|
||||
Assert.False(result);
|
||||
|
||||
var ratings = user.Ratings;
|
||||
Assert.Empty(ratings);
|
||||
}
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.Series.RemoveRange(Context.Series.ToList());
|
||||
Context.AppUserRating.RemoveRange(Context.AppUserRating.ToList());
|
||||
Context.Genre.RemoveRange(Context.Genre.ToList());
|
||||
Context.CollectionTag.RemoveRange(Context.CollectionTag.ToList());
|
||||
Context.Person.RemoveRange(Context.Person.ToList());
|
||||
Context.Library.RemoveRange(Context.Library.ToList());
|
||||
|
||||
await Context.SaveChangesAsync();
|
||||
}
|
||||
}
|
|
@ -11,11 +11,15 @@ using API.DTOs.ReadingLists;
|
|||
using API.DTOs.ReadingLists.CBL;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.Services.Tasks;
|
||||
using API.SignalR;
|
||||
using API.Tests.Helpers;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
@ -48,9 +52,7 @@ public class ReadingListServiceTests
|
|||
var mapper = config.CreateMapper();
|
||||
_unitOfWork = new UnitOfWork(_context, mapper, null!);
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||
_readingListService = new ReadingListService(_unitOfWork, Substitute.For<ILogger<ReadingListService>>(),
|
||||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(), ds);
|
||||
_readingListService = new ReadingListService(_unitOfWork, Substitute.For<ILogger<ReadingListService>>(), Substitute.For<IEventHub>());
|
||||
|
||||
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(),
|
||||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
||||
|
@ -579,93 +581,6 @@ public class ReadingListServiceTests
|
|||
Assert.Equal(AgeRating.G, readingList.AgeRating);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateReadingListAgeRatingForSeries()
|
||||
{
|
||||
await ResetDb();
|
||||
var spiceAndWolf = new SeriesBuilder("Spice and Wolf")
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes([
|
||||
new VolumeBuilder("1")
|
||||
.WithChapters([
|
||||
new ChapterBuilder("1").Build(),
|
||||
new ChapterBuilder("2").Build(),
|
||||
]).Build()
|
||||
]).Build();
|
||||
spiceAndWolf.Metadata.AgeRating = AgeRating.Everyone;
|
||||
|
||||
var othersidePicnic = new SeriesBuilder("Otherside Picnic ")
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes([
|
||||
new VolumeBuilder("1")
|
||||
.WithChapters([
|
||||
new ChapterBuilder("1").Build(),
|
||||
new ChapterBuilder("2").Build(),
|
||||
]).Build()
|
||||
]).Build();
|
||||
othersidePicnic.Metadata.AgeRating = AgeRating.Everyone;
|
||||
|
||||
_context.AppUser.Add(new AppUser()
|
||||
{
|
||||
UserName = "Amelia",
|
||||
ReadingLists = new List<ReadingList>(),
|
||||
Libraries = new List<Library>
|
||||
{
|
||||
new LibraryBuilder("Test Library", LibraryType.LightNovel)
|
||||
.WithSeries(spiceAndWolf)
|
||||
.WithSeries(othersidePicnic)
|
||||
.Build(),
|
||||
},
|
||||
});
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("Amelia", AppUserIncludes.ReadingLists);
|
||||
Assert.NotNull(user);
|
||||
|
||||
var myTestReadingList = new ReadingListBuilder("MyReadingList").Build();
|
||||
var mySecondTestReadingList = new ReadingListBuilder("MySecondReadingList").Build();
|
||||
var myThirdTestReadingList = new ReadingListBuilder("MyThirdReadingList").Build();
|
||||
user.ReadingLists = new List<ReadingList>()
|
||||
{
|
||||
myTestReadingList,
|
||||
mySecondTestReadingList,
|
||||
myThirdTestReadingList,
|
||||
};
|
||||
|
||||
|
||||
await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List<int> {1, 2}, myTestReadingList);
|
||||
await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List<int> {3, 4}, myTestReadingList);
|
||||
await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List<int> {1, 2}, myThirdTestReadingList);
|
||||
await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List<int> {3, 4}, mySecondTestReadingList);
|
||||
|
||||
|
||||
_unitOfWork.UserRepository.Update(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _readingListService.CalculateReadingListAgeRating(myTestReadingList);
|
||||
await _readingListService.CalculateReadingListAgeRating(mySecondTestReadingList);
|
||||
Assert.Equal(AgeRating.Everyone, myTestReadingList.AgeRating);
|
||||
Assert.Equal(AgeRating.Everyone, mySecondTestReadingList.AgeRating);
|
||||
Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
|
||||
|
||||
await _readingListService.UpdateReadingListAgeRatingForSeries(othersidePicnic.Id, AgeRating.Mature);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Reading lists containing Otherside Picnic are updated
|
||||
myTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
|
||||
Assert.NotNull(myTestReadingList);
|
||||
Assert.Equal(AgeRating.Mature, myTestReadingList.AgeRating);
|
||||
|
||||
mySecondTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(2);
|
||||
Assert.NotNull(mySecondTestReadingList);
|
||||
Assert.Equal(AgeRating.Mature, mySecondTestReadingList.AgeRating);
|
||||
|
||||
// Unrelated reading list is not updated
|
||||
myThirdTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(3);
|
||||
Assert.NotNull(myThirdTestReadingList);
|
||||
Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CalculateStartAndEndDates
|
||||
|
@ -796,9 +711,6 @@ public class ReadingListServiceTests
|
|||
Assert.Equal("Issue #1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", "1", "The Title")));
|
||||
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", chapterTitleName: "The Title")));
|
||||
Assert.Equal("The Title", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterTitleName: "The Title")));
|
||||
var dto = CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterNumber: "The Special Title");
|
||||
dto.IsSpecial = true;
|
||||
Assert.Equal("The Special Title", ReadingListService.FormatTitle(dto));
|
||||
|
||||
// Book Library & Archive
|
||||
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Book, "1")));
|
||||
|
|
|
@ -1,561 +0,0 @@
|
|||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Tests.Helpers;
|
||||
using Kavita.Common;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class ReadingProfileServiceTest: AbstractDbTest
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Does not add a default reading profile
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public async Task<(ReadingProfileService, AppUser, Library, Series)> Setup()
|
||||
{
|
||||
var user = new AppUserBuilder("amelia", "amelia@localhost").Build();
|
||||
Context.AppUser.Add(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var series = new SeriesBuilder("Spice and Wolf").Build();
|
||||
|
||||
var library = new LibraryBuilder("Manga")
|
||||
.WithSeries(series)
|
||||
.Build();
|
||||
|
||||
user.Libraries.Add(library);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var rps = new ReadingProfileService(UnitOfWork, Substitute.For<ILocalizationService>(), Mapper);
|
||||
user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.UserPreferences);
|
||||
|
||||
return (rps, user, library, series);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImplicitProfileFirst()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, library, series) = await Setup();
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithKind(ReadingProfileKind.Implicit)
|
||||
.WithSeries(series)
|
||||
.WithName("Implicit Profile")
|
||||
.Build();
|
||||
|
||||
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Non-implicit Profile")
|
||||
.Build();
|
||||
|
||||
user.ReadingProfiles.Add(profile);
|
||||
user.ReadingProfiles.Add(profile2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(seriesProfile);
|
||||
Assert.Equal("Implicit Profile", seriesProfile.Name);
|
||||
|
||||
// Find parent
|
||||
seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id, true);
|
||||
Assert.NotNull(seriesProfile);
|
||||
Assert.Equal("Non-implicit Profile", seriesProfile.Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CantDeleteDefaultReadingProfile()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, _, _) = await Setup();
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithKind(ReadingProfileKind.Default)
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||
{
|
||||
await rps.DeleteReadingProfile(user.Id, profile.Id);
|
||||
});
|
||||
|
||||
var profile2 = new AppUserReadingProfileBuilder(user.Id).Build();
|
||||
Context.AppUserReadingProfiles.Add(profile2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.DeleteReadingProfile(user.Id, profile2.Id);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var allProfiles = await Context.AppUserReadingProfiles.ToListAsync();
|
||||
Assert.Single(allProfiles);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateImplicitSeriesReadingProfile()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, _, series) = await Setup();
|
||||
|
||||
var dto = new UserReadingProfileDto
|
||||
{
|
||||
ReaderMode = ReaderMode.Webtoon,
|
||||
ScalingOption = ScalingOption.FitToHeight,
|
||||
WidthOverride = 53,
|
||||
};
|
||||
|
||||
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, dto);
|
||||
|
||||
var profile = await rps.GetReadingProfileForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(profile);
|
||||
Assert.Contains(profile.SeriesIds, s => s == series.Id);
|
||||
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateImplicitReadingProfile_DoesNotCreateNew()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, _, series) = await Setup();
|
||||
|
||||
var dto = new UserReadingProfileDto
|
||||
{
|
||||
ReaderMode = ReaderMode.Webtoon,
|
||||
ScalingOption = ScalingOption.FitToHeight,
|
||||
WidthOverride = 53,
|
||||
};
|
||||
|
||||
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, dto);
|
||||
|
||||
var profile = await rps.GetReadingProfileForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(profile);
|
||||
Assert.Contains(profile.SeriesIds, s => s == series.Id);
|
||||
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
|
||||
|
||||
dto = new UserReadingProfileDto
|
||||
{
|
||||
ReaderMode = ReaderMode.LeftRight,
|
||||
};
|
||||
|
||||
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, dto);
|
||||
profile = await rps.GetReadingProfileForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(profile);
|
||||
Assert.Contains(profile.SeriesIds, s => s == series.Id);
|
||||
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
|
||||
Assert.Equal(ReaderMode.LeftRight, profile.ReaderMode);
|
||||
|
||||
var implicitCount = await Context.AppUserReadingProfiles
|
||||
.Where(p => p.Kind == ReadingProfileKind.Implicit)
|
||||
.CountAsync();
|
||||
Assert.Equal(1, implicitCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetCorrectProfile()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Series Specific")
|
||||
.Build();
|
||||
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithLibrary(lib)
|
||||
.WithName("Library Specific")
|
||||
.Build();
|
||||
var profile3 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithKind(ReadingProfileKind.Default)
|
||||
.WithName("Global")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile);
|
||||
Context.AppUserReadingProfiles.Add(profile2);
|
||||
Context.AppUserReadingProfiles.Add(profile3);
|
||||
|
||||
var series2 = new SeriesBuilder("Rainbows After Storms").Build();
|
||||
lib.Series.Add(series2);
|
||||
|
||||
var lib2 = new LibraryBuilder("Manga2").Build();
|
||||
var series3 = new SeriesBuilder("A Tropical Fish Yearns for Snow").Build();
|
||||
lib2.Series.Add(series3);
|
||||
|
||||
user.Libraries.Add(lib2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var p = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(p);
|
||||
Assert.Equal("Series Specific", p.Name);
|
||||
|
||||
p = await rps.GetReadingProfileDtoForSeries(user.Id, series2.Id);
|
||||
Assert.NotNull(p);
|
||||
Assert.Equal("Library Specific", p.Name);
|
||||
|
||||
p = await rps.GetReadingProfileDtoForSeries(user.Id, series3.Id);
|
||||
Assert.NotNull(p);
|
||||
Assert.Equal("Global", p.Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplaceReadingProfile()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
var profile1 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Profile 1")
|
||||
.Build();
|
||||
|
||||
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithName("Profile 2")
|
||||
.Build();
|
||||
|
||||
Context.AppUserReadingProfiles.Add(profile1);
|
||||
Context.AppUserReadingProfiles.Add(profile2);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var profile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(profile);
|
||||
Assert.Equal("Profile 1", profile.Name);
|
||||
|
||||
await rps.AddProfileToSeries(user.Id, profile2.Id, series.Id);
|
||||
profile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(profile);
|
||||
Assert.Equal("Profile 2", profile.Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteReadingProfile()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
var profile1 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Profile 1")
|
||||
.Build();
|
||||
|
||||
Context.AppUserReadingProfiles.Add(profile1);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.ClearSeriesProfile(user.Id, series.Id);
|
||||
var profiles = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
|
||||
Assert.DoesNotContain(profiles, rp => rp.SeriesIds.Contains(series.Id));
|
||||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BulkAddReadingProfiles()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var generatedSeries = new SeriesBuilder($"Generated Series #{i}").Build();
|
||||
lib.Series.Add(generatedSeries);
|
||||
}
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Profile")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile);
|
||||
|
||||
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Profile2")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile2);
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var someSeriesIds = lib.Series.Take(lib.Series.Count / 2).Select(s => s.Id).ToList();
|
||||
await rps.BulkAddProfileToSeries(user.Id, profile.Id, someSeriesIds);
|
||||
|
||||
foreach (var id in someSeriesIds)
|
||||
{
|
||||
var foundProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||
Assert.NotNull(foundProfile);
|
||||
Assert.Equal(profile.Id, foundProfile.Id);
|
||||
}
|
||||
|
||||
var allIds = lib.Series.Select(s => s.Id).ToList();
|
||||
await rps.BulkAddProfileToSeries(user.Id, profile2.Id, allIds);
|
||||
|
||||
foreach (var id in allIds)
|
||||
{
|
||||
var foundProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||
Assert.NotNull(foundProfile);
|
||||
Assert.Equal(profile2.Id, foundProfile.Id);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BulkAssignDeletesImplicit()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
var implicitProfile = Mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
|
||||
.Build());
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithName("Profile 1")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile);
|
||||
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var generatedSeries = new SeriesBuilder($"Generated Series #{i}").Build();
|
||||
lib.Series.Add(generatedSeries);
|
||||
}
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var ids = lib.Series.Select(s => s.Id).ToList();
|
||||
|
||||
foreach (var id in ids)
|
||||
{
|
||||
await rps.UpdateImplicitReadingProfile(user.Id, id, implicitProfile);
|
||||
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||
Assert.NotNull(seriesProfile);
|
||||
Assert.Equal(ReadingProfileKind.Implicit, seriesProfile.Kind);
|
||||
}
|
||||
|
||||
await rps.BulkAddProfileToSeries(user.Id, profile.Id, ids);
|
||||
|
||||
foreach (var id in ids)
|
||||
{
|
||||
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||
Assert.NotNull(seriesProfile);
|
||||
Assert.Equal(ReadingProfileKind.User, seriesProfile.Kind);
|
||||
}
|
||||
|
||||
var implicitCount = await Context.AppUserReadingProfiles
|
||||
.Where(p => p.Kind == ReadingProfileKind.Implicit)
|
||||
.CountAsync();
|
||||
Assert.Equal(0, implicitCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddDeletesImplicit()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
var implicitProfile = Mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithKind(ReadingProfileKind.Implicit)
|
||||
.Build());
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithName("Profile 1")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, implicitProfile);
|
||||
|
||||
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(seriesProfile);
|
||||
Assert.Equal(ReadingProfileKind.Implicit, seriesProfile.Kind);
|
||||
|
||||
await rps.AddProfileToSeries(user.Id, profile.Id, series.Id);
|
||||
|
||||
seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||
Assert.NotNull(seriesProfile);
|
||||
Assert.Equal(ReadingProfileKind.User, seriesProfile.Kind);
|
||||
|
||||
var implicitCount = await Context.AppUserReadingProfiles
|
||||
.Where(p => p.Kind == ReadingProfileKind.Implicit)
|
||||
.CountAsync();
|
||||
Assert.Equal(0, implicitCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateReadingProfile()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, series) = await Setup();
|
||||
|
||||
var dto = new UserReadingProfileDto
|
||||
{
|
||||
Name = "Profile 1",
|
||||
ReaderMode = ReaderMode.LeftRight,
|
||||
EmulateBook = false,
|
||||
};
|
||||
|
||||
await rps.CreateReadingProfile(user.Id, dto);
|
||||
|
||||
var dto2 = new UserReadingProfileDto
|
||||
{
|
||||
Name = "Profile 2",
|
||||
ReaderMode = ReaderMode.LeftRight,
|
||||
EmulateBook = false,
|
||||
};
|
||||
|
||||
await rps.CreateReadingProfile(user.Id, dto2);
|
||||
|
||||
var dto3 = new UserReadingProfileDto
|
||||
{
|
||||
Name = "Profile 1", // Not unique name
|
||||
ReaderMode = ReaderMode.LeftRight,
|
||||
EmulateBook = false,
|
||||
};
|
||||
|
||||
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||
{
|
||||
await rps.CreateReadingProfile(user.Id, dto3);
|
||||
});
|
||||
|
||||
var allProfiles = Context.AppUserReadingProfiles.ToList();
|
||||
Assert.Equal(2, allProfiles.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ClearSeriesProfile_RemovesImplicitAndUnlinksExplicit()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, _, series) = await Setup();
|
||||
|
||||
var implicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithKind(ReadingProfileKind.Implicit)
|
||||
.WithName("Implicit Profile")
|
||||
.Build();
|
||||
|
||||
var explicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithSeries(series)
|
||||
.WithName("Explicit Profile")
|
||||
.Build();
|
||||
|
||||
Context.AppUserReadingProfiles.Add(implicitProfile);
|
||||
Context.AppUserReadingProfiles.Add(explicitProfile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var allBefore = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
|
||||
Assert.Equal(2, allBefore.Count(rp => rp.SeriesIds.Contains(series.Id)));
|
||||
|
||||
await rps.ClearSeriesProfile(user.Id, series.Id);
|
||||
|
||||
var remainingProfiles = await Context.AppUserReadingProfiles.ToListAsync();
|
||||
Assert.Single(remainingProfiles);
|
||||
Assert.Equal("Explicit Profile", remainingProfiles[0].Name);
|
||||
Assert.Empty(remainingProfiles[0].SeriesIds);
|
||||
|
||||
var profilesForSeries = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
|
||||
Assert.DoesNotContain(profilesForSeries, rp => rp.SeriesIds.Contains(series.Id));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddProfileToLibrary_AddsAndOverridesExisting()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, _) = await Setup();
|
||||
|
||||
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithName("Library Profile")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(profile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.AddProfileToLibrary(user.Id, profile.Id, lib.Id);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var linkedProfile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||
Assert.NotNull(linkedProfile);
|
||||
Assert.Equal(profile.Id, linkedProfile.Id);
|
||||
|
||||
var newProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithName("New Profile")
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(newProfile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.AddProfileToLibrary(user.Id, newProfile.Id, lib.Id);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
linkedProfile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||
Assert.NotNull(linkedProfile);
|
||||
Assert.Equal(newProfile.Id, linkedProfile.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ClearLibraryProfile_RemovesImplicitOrUnlinksExplicit()
|
||||
{
|
||||
await ResetDb();
|
||||
var (rps, user, lib, _) = await Setup();
|
||||
|
||||
var implicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithKind(ReadingProfileKind.Implicit)
|
||||
.WithLibrary(lib)
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(implicitProfile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.ClearLibraryProfile(user.Id, lib.Id);
|
||||
var profile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||
Assert.Null(profile);
|
||||
|
||||
var explicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||
.WithLibrary(lib)
|
||||
.Build();
|
||||
Context.AppUserReadingProfiles.Add(explicitProfile);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await rps.ClearLibraryProfile(user.Id, lib.Id);
|
||||
profile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||
Assert.Null(profile);
|
||||
|
||||
var stillExists = await Context.AppUserReadingProfiles.FindAsync(explicitProfile.Id);
|
||||
Assert.NotNull(stillExists);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// As response to #3793, I'm not sure if we want to keep this. It's not the most nice. But I think the idea of this test
|
||||
/// is worth having.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void UpdateFields_UpdatesAll()
|
||||
{
|
||||
// Repeat to ensure booleans are flipped and actually tested
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
var profile = new AppUserReadingProfile();
|
||||
var dto = new UserReadingProfileDto();
|
||||
|
||||
RandfHelper.SetRandomValues(profile);
|
||||
RandfHelper.SetRandomValues(dto);
|
||||
|
||||
ReadingProfileService.UpdateReaderProfileFields(profile, dto);
|
||||
|
||||
var newDto = Mapper.Map<UserReadingProfileDto>(profile);
|
||||
|
||||
Assert.True(RandfHelper.AreSimpleFieldsEqual(dto, newDto,
|
||||
["<Id>k__BackingField", "<UserId>k__BackingField"]));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.AppUserReadingProfiles.RemoveRange(Context.AppUserReadingProfiles);
|
||||
await UnitOfWork.CommitAsync();
|
||||
}
|
||||
}
|
|
@ -1,619 +1,11 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.Scrobbling;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Scrobble;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.SignalR;
|
||||
using Kavita.Common;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using API.Services.Plus;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
#nullable enable
|
||||
|
||||
public class ScrobblingServiceTests : AbstractDbTest
|
||||
public class ScrobblingServiceTests
|
||||
{
|
||||
private const int ChapterPages = 100;
|
||||
|
||||
/// <summary>
|
||||
/// {
|
||||
/// "Issuer": "Issuer",
|
||||
/// "Issued At": "2025-06-15T21:01:57.615Z",
|
||||
/// "Expiration": "2200-06-15T21:01:57.615Z"
|
||||
/// }
|
||||
/// </summary>
|
||||
/// <remarks>Our UnitTests will fail in 2200 :(</remarks>
|
||||
private const string ValidJwtToken =
|
||||
"eyJhbGciOiJIUzI1NiJ9.eyJJc3N1ZXIiOiJJc3N1ZXIiLCJleHAiOjcyNzI0NTAxMTcsImlhdCI6MTc1MDAyMTMxN30.zADmcGq_BfxbcV8vy4xw5Cbzn4COkmVINxgqpuL17Ng";
|
||||
|
||||
private readonly ScrobblingService _service;
|
||||
private readonly ILicenseService _licenseService;
|
||||
private readonly ILocalizationService _localizationService;
|
||||
private readonly ILogger<ScrobblingService> _logger;
|
||||
private readonly IEmailService _emailService;
|
||||
private readonly IKavitaPlusApiService _kavitaPlusApiService;
|
||||
/// <summary>
|
||||
/// IReaderService, without the ScrobblingService injected
|
||||
/// </summary>
|
||||
private readonly IReaderService _readerService;
|
||||
/// <summary>
|
||||
/// IReaderService, with the _service injected
|
||||
/// </summary>
|
||||
private readonly IReaderService _hookedUpReaderService;
|
||||
|
||||
public ScrobblingServiceTests()
|
||||
{
|
||||
_licenseService = Substitute.For<ILicenseService>();
|
||||
_localizationService = Substitute.For<ILocalizationService>();
|
||||
_logger = Substitute.For<ILogger<ScrobblingService>>();
|
||||
_emailService = Substitute.For<IEmailService>();
|
||||
_kavitaPlusApiService = Substitute.For<IKavitaPlusApiService>();
|
||||
|
||||
_service = new ScrobblingService(UnitOfWork, Substitute.For<IEventHub>(), _logger, _licenseService,
|
||||
_localizationService, _emailService, _kavitaPlusApiService);
|
||||
|
||||
_readerService = new ReaderService(UnitOfWork,
|
||||
Substitute.For<ILogger<ReaderService>>(),
|
||||
Substitute.For<IEventHub>(),
|
||||
Substitute.For<IImageService>(),
|
||||
Substitute.For<IDirectoryService>(),
|
||||
Substitute.For<IScrobblingService>()); // Do not use the actual one
|
||||
|
||||
_hookedUpReaderService = new ReaderService(UnitOfWork,
|
||||
Substitute.For<ILogger<ReaderService>>(),
|
||||
Substitute.For<IEventHub>(),
|
||||
Substitute.For<IImageService>(),
|
||||
Substitute.For<IDirectoryService>(),
|
||||
_service);
|
||||
}
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.ScrobbleEvent.RemoveRange(Context.ScrobbleEvent.ToList());
|
||||
Context.Series.RemoveRange(Context.Series.ToList());
|
||||
Context.Library.RemoveRange(Context.Library.ToList());
|
||||
Context.AppUser.RemoveRange(Context.AppUser.ToList());
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
private async Task SeedData()
|
||||
{
|
||||
var series = new SeriesBuilder("Test Series")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolume(new VolumeBuilder("Volume 1")
|
||||
.WithChapters([
|
||||
new ChapterBuilder("1")
|
||||
.WithPages(ChapterPages)
|
||||
.Build(),
|
||||
new ChapterBuilder("2")
|
||||
.WithPages(ChapterPages)
|
||||
.Build(),
|
||||
new ChapterBuilder("3")
|
||||
.WithPages(ChapterPages)
|
||||
.Build()])
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("Volume 2")
|
||||
.WithChapters([
|
||||
new ChapterBuilder("4")
|
||||
.WithPages(ChapterPages)
|
||||
.Build(),
|
||||
new ChapterBuilder("5")
|
||||
.WithPages(ChapterPages)
|
||||
.Build(),
|
||||
new ChapterBuilder("6")
|
||||
.WithPages(ChapterPages)
|
||||
.Build()])
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var library = new LibraryBuilder("Test Library", LibraryType.Manga)
|
||||
.WithAllowScrobbling(true)
|
||||
.WithSeries(series)
|
||||
.Build();
|
||||
|
||||
|
||||
Context.Library.Add(library);
|
||||
|
||||
var user = new AppUserBuilder("testuser", "testuser")
|
||||
//.WithPreferences(new UserPreferencesBuilder().WithAniListScrobblingEnabled(true).Build())
|
||||
.Build();
|
||||
|
||||
user.UserPreferences.AniListScrobblingEnabled = true;
|
||||
|
||||
UnitOfWork.UserRepository.Add(user);
|
||||
|
||||
await UnitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
private async Task<ScrobbleEvent> CreateScrobbleEvent(int? seriesId = null)
|
||||
{
|
||||
var evt = new ScrobbleEvent
|
||||
{
|
||||
ScrobbleEventType = ScrobbleEventType.ChapterRead,
|
||||
Format = PlusMediaFormat.Manga,
|
||||
SeriesId = seriesId ?? 0,
|
||||
LibraryId = 0,
|
||||
AppUserId = 0,
|
||||
};
|
||||
|
||||
if (seriesId != null)
|
||||
{
|
||||
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId.Value);
|
||||
if (series != null) evt.Series = series;
|
||||
}
|
||||
|
||||
return evt;
|
||||
}
|
||||
|
||||
|
||||
#region K+ API Request Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PostScrobbleUpdate_AuthErrors()
|
||||
{
|
||||
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
|
||||
.ReturnsForAnyArgs(new ScrobbleResponseDto()
|
||||
{
|
||||
ErrorMessage = "Unauthorized"
|
||||
});
|
||||
|
||||
var evt = await CreateScrobbleEvent();
|
||||
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||
{
|
||||
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
|
||||
});
|
||||
Assert.True(evt.IsErrored);
|
||||
Assert.Equal("Kavita+ subscription no longer active", evt.ErrorDetails);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PostScrobbleUpdate_UnknownSeriesLoggedAsError()
|
||||
{
|
||||
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
|
||||
.ReturnsForAnyArgs(new ScrobbleResponseDto()
|
||||
{
|
||||
ErrorMessage = "Unknown Series"
|
||||
});
|
||||
|
||||
await SeedData();
|
||||
var evt = await CreateScrobbleEvent(1);
|
||||
|
||||
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
|
||||
await UnitOfWork.CommitAsync();
|
||||
Assert.True(evt.IsErrored);
|
||||
|
||||
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
|
||||
Assert.NotNull(series);
|
||||
Assert.True(series.IsBlacklisted);
|
||||
|
||||
var errors = await UnitOfWork.ScrobbleRepository.GetAllScrobbleErrorsForSeries(1);
|
||||
Assert.Single(errors);
|
||||
Assert.Equal("Series cannot be matched for Scrobbling", errors.First().Comment);
|
||||
Assert.Equal(series.Id, errors.First().SeriesId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PostScrobbleUpdate_InvalidAccessToken()
|
||||
{
|
||||
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
|
||||
.ReturnsForAnyArgs(new ScrobbleResponseDto()
|
||||
{
|
||||
ErrorMessage = "Access token is invalid"
|
||||
});
|
||||
|
||||
var evt = await CreateScrobbleEvent();
|
||||
|
||||
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||
{
|
||||
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
|
||||
});
|
||||
|
||||
Assert.True(evt.IsErrored);
|
||||
Assert.Equal("Access Token needs to be rotated to continue scrobbling", evt.ErrorDetails);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region K+ API Request data tests
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessReadEvents_CreatesNoEventsWhenNoProgress()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
// Set Returns
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
_kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
|
||||
.Returns(100);
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Ensure CanProcessScrobbleEvent returns true
|
||||
user.AniListAccessToken = ValidJwtToken;
|
||||
UnitOfWork.UserRepository.Update(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(4);
|
||||
Assert.NotNull(chapter);
|
||||
|
||||
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
|
||||
Assert.NotNull(volume);
|
||||
|
||||
// Call Scrobble without having any progress
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Empty(events);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessReadEvents_UpdateVolumeAndChapterData()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
// Set Returns
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
_kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
|
||||
.Returns(100);
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Ensure CanProcessScrobbleEvent returns true
|
||||
user.AniListAccessToken = ValidJwtToken;
|
||||
UnitOfWork.UserRepository.Update(user);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
var chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(4);
|
||||
Assert.NotNull(chapter);
|
||||
|
||||
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
|
||||
Assert.NotNull(volume);
|
||||
|
||||
// Mark something as read to trigger event creation
|
||||
await _readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Call Scrobble while having some progress
|
||||
await _service.ScrobbleReadingUpdate(user.Id, 1);
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Single(events);
|
||||
|
||||
// Give it some (more) read progress
|
||||
await _readerService.MarkChaptersAsRead(user, 1, volume.Chapters);
|
||||
await _readerService.MarkChaptersAsRead(user, 1, [chapter]);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await _service.ProcessUpdatesSinceLastSync();
|
||||
|
||||
await _kavitaPlusApiService.Received(1).PostScrobbleUpdate(
|
||||
Arg.Is<ScrobbleDto>(data =>
|
||||
data.ChapterNumber == (int)chapter.MaxNumber &&
|
||||
data.VolumeNumber == (int)volume.MaxNumber
|
||||
),
|
||||
Arg.Any<string>());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Scrobble Reading Update Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleReadingUpdate_IgnoreNoLicense()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
_licenseService.HasActiveLicense().Returns(false);
|
||||
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Empty(events);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleReadingUpdate_RemoveWhenNoProgress()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
_licenseService.HasActiveLicense().Returns(true);
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||
Assert.NotNull(user);
|
||||
|
||||
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
|
||||
Assert.NotNull(volume);
|
||||
|
||||
await _readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Single(events);
|
||||
|
||||
var readEvent = events.First();
|
||||
Assert.False(readEvent.IsProcessed);
|
||||
|
||||
await _hookedUpReaderService.MarkSeriesAsUnread(user, 1);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Existing event is deleted
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Empty(events);
|
||||
|
||||
await _hookedUpReaderService.MarkSeriesAsUnread(user, 1);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// No new events are added
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Empty(events);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleReadingUpdate_UpdateExistingNotIsProcessed()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||
Assert.NotNull(user);
|
||||
|
||||
var chapter1 = await UnitOfWork.ChapterRepository.GetChapterAsync(1);
|
||||
var chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(2);
|
||||
var chapter3 = await UnitOfWork.ChapterRepository.GetChapterAsync(3);
|
||||
Assert.NotNull(chapter1);
|
||||
Assert.NotNull(chapter2);
|
||||
Assert.NotNull(chapter3);
|
||||
|
||||
_licenseService.HasActiveLicense().Returns(true);
|
||||
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Empty(events);
|
||||
|
||||
|
||||
await _readerService.MarkChaptersAsRead(user, 1, [chapter1]);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Scrobble update
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Single(events);
|
||||
|
||||
var readEvent = events[0];
|
||||
Assert.False(readEvent.IsProcessed);
|
||||
Assert.Equal(1, readEvent.ChapterNumber);
|
||||
|
||||
// Mark as processed
|
||||
readEvent.IsProcessed = true;
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await _readerService.MarkChaptersAsRead(user, 1, [chapter2]);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Scrobble update
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Equal(2, events.Count);
|
||||
Assert.Single(events.Where(e => e.IsProcessed).ToList());
|
||||
Assert.Single(events.Where(e => !e.IsProcessed).ToList());
|
||||
|
||||
// Should update the existing non processed event
|
||||
await _readerService.MarkChaptersAsRead(user, 1, [chapter3]);
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
// Scrobble update
|
||||
await _service.ScrobbleReadingUpdate(1, 1);
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Equal(2, events.Count);
|
||||
Assert.Single(events.Where(e => e.IsProcessed).ToList());
|
||||
Assert.Single(events.Where(e => !e.IsProcessed).ToList());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ScrobbleWantToReadUpdate Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_WantToRead_ShouldCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// Act
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Assert
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
Assert.Single(events);
|
||||
Assert.Equal(ScrobbleEventType.AddWantToRead, events[0].ScrobbleEventType);
|
||||
Assert.Equal(userId, events[0].AppUserId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_RemoveWantToRead_ShouldCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// Act
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Assert
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
Assert.Single(events);
|
||||
Assert.Equal(ScrobbleEventType.RemoveWantToRead, events[0].ScrobbleEventType);
|
||||
Assert.Equal(userId, events[0].AppUserId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_WantToRead_ShouldNotCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create an event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Act - Try to create the same event again
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Assert
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.All(events, e => Assert.Equal(ScrobbleEventType.AddWantToRead, e.ScrobbleEventType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_RemoveWantToRead_ShouldAddRemoveEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create a want-to-read event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Act - Now remove from want-to-read
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Assert
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.RemoveWantToRead);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_RemoveWantToRead_ShouldNotCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create a remove-from-want-to-read event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Act - Try to create the same event again
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Assert
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.All(events, e => Assert.Equal(ScrobbleEventType.RemoveWantToRead, e.ScrobbleEventType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_WantToRead_ShouldAddWantToReadEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create a remove-from-want-to-read event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Act - Now add to want-to-read
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Assert
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.AddWantToRead);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Scrobble Rating Update Test
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleRatingUpdate_IgnoreNoLicense()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
_licenseService.HasActiveLicense().Returns(false);
|
||||
|
||||
await _service.ScrobbleRatingUpdate(1, 1, 1);
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Empty(events);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleRatingUpdate_UpdateExistingNotIsProcessed()
|
||||
{
|
||||
await ResetDb();
|
||||
await SeedData();
|
||||
|
||||
_licenseService.HasActiveLicense().Returns(true);
|
||||
|
||||
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||
Assert.NotNull(user);
|
||||
|
||||
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
|
||||
Assert.NotNull(series);
|
||||
|
||||
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 1);
|
||||
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Single(events);
|
||||
Assert.Equal(1, events.First().Rating);
|
||||
|
||||
// Mark as processed
|
||||
events.First().IsProcessed = true;
|
||||
await UnitOfWork.CommitAsync();
|
||||
|
||||
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Equal(2, events.Count);
|
||||
Assert.Single(events, evt => evt.IsProcessed);
|
||||
Assert.Single(events, evt => !evt.IsProcessed);
|
||||
|
||||
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
|
||||
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||
Assert.Single(events, evt => !evt.IsProcessed);
|
||||
Assert.Equal(5, events.First(evt => !evt.IsProcessed).Rating);
|
||||
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
[Theory]
|
||||
[InlineData("https://anilist.co/manga/35851/Byeontaega-Doeja/", 35851)]
|
||||
[InlineData("https://anilist.co/manga/30105", 30105)]
|
||||
|
|
|
@ -1,292 +0,0 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO.Abstractions;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.KavitaPlus.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.MetadataMatching;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class SettingsServiceTests
|
||||
{
|
||||
private readonly ISettingsService _settingsService;
|
||||
private readonly IUnitOfWork _mockUnitOfWork;
|
||||
|
||||
public SettingsServiceTests()
|
||||
{
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||
|
||||
_mockUnitOfWork = Substitute.For<IUnitOfWork>();
|
||||
_settingsService = new SettingsService(_mockUnitOfWork, ds,
|
||||
Substitute.For<ILibraryWatcher>(), Substitute.For<ITaskScheduler>(),
|
||||
Substitute.For<ILogger<SettingsService>>());
|
||||
}
|
||||
|
||||
#region UpdateMetadataSettings
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_ShouldUpdateExistingSettings()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
Enabled = false,
|
||||
EnableSummary = false,
|
||||
EnableLocalizedName = false,
|
||||
EnablePublicationStatus = false,
|
||||
EnableRelationships = false,
|
||||
EnablePeople = false,
|
||||
EnableStartDate = false,
|
||||
EnableGenres = false,
|
||||
EnableTags = false,
|
||||
FirstLastPeopleNaming = false,
|
||||
EnableCoverImage = false,
|
||||
AgeRatingMappings = new Dictionary<string, AgeRating>(),
|
||||
Blacklist = [],
|
||||
Whitelist = [],
|
||||
Overrides = [],
|
||||
PersonRoles = [],
|
||||
FieldMappings = []
|
||||
};
|
||||
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
Enabled = true,
|
||||
EnableSummary = true,
|
||||
EnableLocalizedName = true,
|
||||
EnablePublicationStatus = true,
|
||||
EnableRelationships = true,
|
||||
EnablePeople = true,
|
||||
EnableStartDate = true,
|
||||
EnableGenres = true,
|
||||
EnableTags = true,
|
||||
FirstLastPeopleNaming = true,
|
||||
EnableCoverImage = true,
|
||||
AgeRatingMappings = new Dictionary<string, AgeRating> { { "Adult", AgeRating.R18Plus } },
|
||||
Blacklist = ["blacklisted-tag"],
|
||||
Whitelist = ["whitelisted-tag"],
|
||||
Overrides = [MetadataSettingField.Summary],
|
||||
PersonRoles = [PersonRole.Writer],
|
||||
FieldMappings =
|
||||
[
|
||||
new MetadataFieldMappingDto
|
||||
{
|
||||
SourceType = MetadataFieldType.Genre,
|
||||
DestinationType = MetadataFieldType.Tag,
|
||||
SourceValue = "Action",
|
||||
DestinationValue = "Fight",
|
||||
ExcludeFromSource = true
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
// Verify properties were updated
|
||||
Assert.True(existingSettings.Enabled);
|
||||
Assert.True(existingSettings.EnableSummary);
|
||||
Assert.True(existingSettings.EnableLocalizedName);
|
||||
Assert.True(existingSettings.EnablePublicationStatus);
|
||||
Assert.True(existingSettings.EnableRelationships);
|
||||
Assert.True(existingSettings.EnablePeople);
|
||||
Assert.True(existingSettings.EnableStartDate);
|
||||
Assert.True(existingSettings.EnableGenres);
|
||||
Assert.True(existingSettings.EnableTags);
|
||||
Assert.True(existingSettings.FirstLastPeopleNaming);
|
||||
Assert.True(existingSettings.EnableCoverImage);
|
||||
|
||||
// Verify collections were updated
|
||||
Assert.Single(existingSettings.AgeRatingMappings);
|
||||
Assert.Equal(AgeRating.R18Plus, existingSettings.AgeRatingMappings["Adult"]);
|
||||
|
||||
Assert.Single(existingSettings.Blacklist);
|
||||
Assert.Equal("blacklisted-tag", existingSettings.Blacklist[0]);
|
||||
|
||||
Assert.Single(existingSettings.Whitelist);
|
||||
Assert.Equal("whitelisted-tag", existingSettings.Whitelist[0]);
|
||||
|
||||
Assert.Single(existingSettings.Overrides);
|
||||
Assert.Equal(MetadataSettingField.Summary, existingSettings.Overrides[0]);
|
||||
|
||||
Assert.Single(existingSettings.PersonRoles);
|
||||
Assert.Equal(PersonRole.Writer, existingSettings.PersonRoles[0]);
|
||||
|
||||
Assert.Single(existingSettings.FieldMappings);
|
||||
Assert.Equal(MetadataFieldType.Genre, existingSettings.FieldMappings[0].SourceType);
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[0].DestinationType);
|
||||
Assert.Equal("Action", existingSettings.FieldMappings[0].SourceValue);
|
||||
Assert.Equal("Fight", existingSettings.FieldMappings[0].DestinationValue);
|
||||
Assert.True(existingSettings.FieldMappings[0].ExcludeFromSource);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_WithNullCollections_ShouldUseEmptyCollections()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
FieldMappings = [new MetadataFieldMapping {Id = 1, SourceValue = "OldValue"}]
|
||||
};
|
||||
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
AgeRatingMappings = null,
|
||||
Blacklist = null,
|
||||
Whitelist = null,
|
||||
Overrides = null,
|
||||
PersonRoles = null,
|
||||
FieldMappings = null
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
Assert.Empty(existingSettings.AgeRatingMappings);
|
||||
Assert.Empty(existingSettings.Blacklist);
|
||||
Assert.Empty(existingSettings.Whitelist);
|
||||
Assert.Empty(existingSettings.Overrides);
|
||||
Assert.Empty(existingSettings.PersonRoles);
|
||||
|
||||
// Verify existing field mappings were cleared
|
||||
settingsRepo.Received(1).RemoveRange(Arg.Any<List<MetadataFieldMapping>>());
|
||||
Assert.Empty(existingSettings.FieldMappings);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_WithFieldMappings_ShouldReplaceExistingMappings()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
FieldMappings =
|
||||
[
|
||||
new MetadataFieldMapping
|
||||
{
|
||||
Id = 1,
|
||||
SourceType = MetadataFieldType.Genre,
|
||||
DestinationType = MetadataFieldType.Genre,
|
||||
SourceValue = "OldValue",
|
||||
DestinationValue = "OldDestination",
|
||||
ExcludeFromSource = false
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
FieldMappings =
|
||||
[
|
||||
new MetadataFieldMappingDto
|
||||
{
|
||||
SourceType = MetadataFieldType.Tag,
|
||||
DestinationType = MetadataFieldType.Genre,
|
||||
SourceValue = "NewValue",
|
||||
DestinationValue = "NewDestination",
|
||||
ExcludeFromSource = true
|
||||
},
|
||||
|
||||
new MetadataFieldMappingDto
|
||||
{
|
||||
SourceType = MetadataFieldType.Tag,
|
||||
DestinationType = MetadataFieldType.Tag,
|
||||
SourceValue = "AnotherValue",
|
||||
DestinationValue = "AnotherDestination",
|
||||
ExcludeFromSource = false
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
// Verify existing field mappings were cleared and new ones added
|
||||
settingsRepo.Received(1).RemoveRange(Arg.Any<List<MetadataFieldMapping>>());
|
||||
Assert.Equal(2, existingSettings.FieldMappings.Count);
|
||||
|
||||
// Verify first mapping
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[0].SourceType);
|
||||
Assert.Equal(MetadataFieldType.Genre, existingSettings.FieldMappings[0].DestinationType);
|
||||
Assert.Equal("NewValue", existingSettings.FieldMappings[0].SourceValue);
|
||||
Assert.Equal("NewDestination", existingSettings.FieldMappings[0].DestinationValue);
|
||||
Assert.True(existingSettings.FieldMappings[0].ExcludeFromSource);
|
||||
|
||||
// Verify second mapping
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[1].SourceType);
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[1].DestinationType);
|
||||
Assert.Equal("AnotherValue", existingSettings.FieldMappings[1].SourceValue);
|
||||
Assert.Equal("AnotherDestination", existingSettings.FieldMappings[1].DestinationValue);
|
||||
Assert.False(existingSettings.FieldMappings[1].ExcludeFromSource);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_WithBlacklistWhitelist_ShouldNormalizeAndDeduplicateEntries()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
Blacklist = [],
|
||||
Whitelist = []
|
||||
};
|
||||
|
||||
// We need to mock the repository and provide a custom implementation for ToNormalized
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
// Include duplicates with different casing and whitespace
|
||||
Blacklist = ["tag1", "Tag1", " tag2 ", "", " ", "tag3"],
|
||||
Whitelist = ["allowed1", "Allowed1", " allowed2 ", "", "allowed3"]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
Assert.Equal(3, existingSettings.Blacklist.Count);
|
||||
Assert.Equal(3, existingSettings.Whitelist.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
|
@ -31,24 +31,24 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
Context.SiteTheme.RemoveRange(Context.SiteTheme);
|
||||
await Context.SaveChangesAsync();
|
||||
_context.SiteTheme.RemoveRange(_context.SiteTheme);
|
||||
await _context.SaveChangesAsync();
|
||||
// Recreate defaults
|
||||
await Seed.SeedThemes(Context);
|
||||
await Seed.SeedThemes(_context);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateDefault_ShouldThrowOnInvalidId()
|
||||
{
|
||||
await ResetDb();
|
||||
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldThrowOnInvalidId] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldThrowOnInvalidId] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||
|
||||
Context.SiteTheme.Add(new SiteTheme()
|
||||
_context.SiteTheme.Add(new SiteTheme()
|
||||
{
|
||||
Name = "Custom",
|
||||
NormalizedName = "Custom".ToNormalized(),
|
||||
|
@ -56,7 +56,7 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
FileName = "custom.css",
|
||||
IsDefault = false
|
||||
});
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
var ex = await Assert.ThrowsAsync<KavitaException>(() => siteThemeService.UpdateDefault(10));
|
||||
Assert.Equal("Theme file missing or invalid", ex.Message);
|
||||
|
@ -68,14 +68,14 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
public async Task GetContent_ShouldReturnContent()
|
||||
{
|
||||
await ResetDb();
|
||||
_testOutputHelper.WriteLine($"[GetContent_ShouldReturnContent] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
_testOutputHelper.WriteLine($"[GetContent_ShouldReturnContent] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||
|
||||
Context.SiteTheme.Add(new SiteTheme()
|
||||
_context.SiteTheme.Add(new SiteTheme()
|
||||
{
|
||||
Name = "Custom",
|
||||
NormalizedName = "Custom".ToNormalized(),
|
||||
|
@ -83,9 +83,9 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
FileName = "custom.css",
|
||||
IsDefault = false
|
||||
});
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
var content = await siteThemeService.GetContent((await UnitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom")).Id);
|
||||
var content = await siteThemeService.GetContent((await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom")).Id);
|
||||
Assert.NotNull(content);
|
||||
Assert.NotEmpty(content);
|
||||
Assert.Equal("123", content);
|
||||
|
@ -95,14 +95,14 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
public async Task UpdateDefault_ShouldHaveOneDefault()
|
||||
{
|
||||
await ResetDb();
|
||||
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldHaveOneDefault] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldHaveOneDefault] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||
|
||||
Context.SiteTheme.Add(new SiteTheme()
|
||||
_context.SiteTheme.Add(new SiteTheme()
|
||||
{
|
||||
Name = "Custom",
|
||||
NormalizedName = "Custom".ToNormalized(),
|
||||
|
@ -110,16 +110,16 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
FileName = "custom.css",
|
||||
IsDefault = false
|
||||
});
|
||||
await Context.SaveChangesAsync();
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
var customTheme = (await UnitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom"));
|
||||
var customTheme = (await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom"));
|
||||
|
||||
Assert.NotNull(customTheme);
|
||||
await siteThemeService.UpdateDefault(customTheme.Id);
|
||||
|
||||
|
||||
|
||||
Assert.Equal(customTheme.Id, (await UnitOfWork.SiteThemeRepository.GetDefaultTheme()).Id);
|
||||
Assert.Equal(customTheme.Id, (await _unitOfWork.SiteThemeRepository.GetDefaultTheme()).Id);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
using API.Helpers.Builders;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Plus;
|
||||
using API.Services.Tasks;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
using System.Collections.Generic;
|
||||
|
@ -14,6 +16,7 @@ using API.Entities.Enums;
|
|||
using API.Helpers;
|
||||
using API.Services;
|
||||
using SignalR;
|
||||
using Helpers;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
@ -49,7 +52,7 @@ public class TachiyomiServiceTests
|
|||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()),
|
||||
Substitute.For<IScrobblingService>());
|
||||
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<TachiyomiService>>(), _readerService);
|
||||
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<ReaderService>>(), _readerService);
|
||||
|
||||
}
|
||||
|
||||
|
|
Before Width: | Height: | Size: 17 KiB |
Before Width: | Height: | Size: 678 B |
Before Width: | Height: | Size: 336 KiB |
Before Width: | Height: | Size: 28 KiB |
Before Width: | Height: | Size: 320 KiB |
Before Width: | Height: | Size: 340 KiB |
Before Width: | Height: | Size: 294 KiB |
Before Width: | Height: | Size: 286 KiB |
Before Width: | Height: | Size: 327 KiB |
Before Width: | Height: | Size: 168 KiB |
Before Width: | Height: | Size: 69 B |