Merged develop in
|
@ -8,10 +8,4 @@
|
|||
# You can see what browsers were selected by your queries by running:
|
||||
# npx browserslist
|
||||
|
||||
last 1 Chrome version
|
||||
last 1 Firefox version
|
||||
last 2 Edge major versions
|
||||
last 2 Safari major versions
|
||||
last 2 iOS major versions
|
||||
Firefox ESR
|
||||
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
|
||||
defaults
|
|
@ -1,6 +1,7 @@
|
|||
# Editor configuration, see https://editorconfig.org
|
||||
root = true
|
||||
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
|
@ -22,3 +23,7 @@ indent_size = 2
|
|||
|
||||
[*.csproj]
|
||||
indent_size = 2
|
||||
|
||||
[*.cs]
|
||||
# Disable SonarLint warning S1075 (Don't use hardcoded url)
|
||||
dotnet_diagnostic.S1075.severity = none
|
||||
|
|
62
.github/DISCUSSION_TEMPLATE/ideas.yml
vendored
|
@ -1,68 +1,48 @@
|
|||
title: "[Kavita] Idea Submission"
|
||||
labels: ["Idea Submission"]
|
||||
title: "[Kavita] Idea / Feature Submission"
|
||||
labels:
|
||||
- "Idea Submission"
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## 🌟 Idea Submission for Kavita 🌟
|
||||
|
||||
This is a template for submitting your ideas to enhance Kavita. Please fill out the details below, and let's make Kavita even better together!
|
||||
## Idea Submission for Kavita 💡
|
||||
|
||||
Please fill out the details below, and let's make Kavita even better together!
|
||||
|
||||
- type: textarea
|
||||
id: idea-description
|
||||
attributes:
|
||||
label: Idea Description
|
||||
description: "Describe your idea in detail."
|
||||
value: |
|
||||
[Include a brief overview of your idea]
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Why I Think This Is Important:**
|
||||
|
||||
[Provide context on why you believe this idea is valuable or necessary for Kavita users]
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**How You Can Contribute:**
|
||||
|
||||
1. **Upvote if You Agree:**
|
||||
- If you resonate with my idea, please upvote it! This helps us gauge community interest.
|
||||
|
||||
2. **Leave Your Thoughts:**
|
||||
- Feel free to leave comments with your opinions, suggestions, or even constructive critiques.
|
||||
|
||||
Let's work together to shape the future of Kavita! 🌟
|
||||
|
||||
- type: input
|
||||
id: duration-of-use
|
||||
attributes:
|
||||
label: Duration of Using Kavita
|
||||
description: "How long have you been using Kavita?"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
Go into as much detail as possible to explain why your idea should be added to Kavita. Try to present some use cases and examples of how it would help other users. The more detail you have the better.
|
||||
|
||||
- type: dropdown
|
||||
id: idea-category
|
||||
attributes:
|
||||
label: Idea Category
|
||||
options:
|
||||
- API
|
||||
- Feature Enhancement
|
||||
- User Experience
|
||||
- Performance Improvement
|
||||
description: "Select the category that best fits your idea."
|
||||
- Web UI
|
||||
description: "What area would your idea help with?"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
- type: input
|
||||
id: duration-of-use
|
||||
attributes:
|
||||
label: Duration of Using Kavita
|
||||
description: "How long have you been using Kavita?"
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Agreement
|
||||
label: Before submitting
|
||||
options:
|
||||
- label: "I agree that this is solely for submitting ideas, and I will search for existing ideas before posting."
|
||||
- label: "I've already searched for existing ideas before posting."
|
||||
required: true
|
||||
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
|
|
8
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
@ -25,10 +25,10 @@ body:
|
|||
- type: dropdown
|
||||
id: version
|
||||
attributes:
|
||||
label: Kavita Version Number - Don't see your version number listed? Then your install is out of date. Please update and see if your issue still persists.
|
||||
label: Kavita Version Number - If you don't see your version number listed, please update Kavita and see if your issue still persists.
|
||||
multiple: false
|
||||
options:
|
||||
- 0.7.14 - Stable
|
||||
- 0.8.6.2 - Stable
|
||||
- Nightly Testing Branch
|
||||
validations:
|
||||
required: true
|
||||
|
@ -75,13 +75,13 @@ body:
|
|||
- type: dropdown
|
||||
id: mobile-browsers
|
||||
attributes:
|
||||
label: If the issue is being seen on the UI, what browsers are you seeing the problem on?
|
||||
label: If the issue is being seen on the Mobile UI, what browsers are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Firefox
|
||||
- Chrome
|
||||
- Safari
|
||||
- Microsoft Edge
|
||||
- Other iOS Browser
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
|
|
10
.github/workflows/build-and-test.yml
vendored
|
@ -10,23 +10,23 @@ jobs:
|
|||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v3
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
shell: powershell
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: csproj
|
||||
path: Kavita.Common/Kavita.Common.csproj
|
||||
|
|
40
.github/workflows/canary-workflow.yml
vendored
|
@ -9,14 +9,14 @@ on:
|
|||
jobs:
|
||||
build:
|
||||
name: Upload Kavita.Common for Version Bump
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: csproj
|
||||
path: Kavita.Common/Kavita.Common.csproj
|
||||
|
@ -24,16 +24,16 @@ jobs:
|
|||
version:
|
||||
name: Bump version
|
||||
needs: [ build ]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v3
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Bump versions
|
||||
uses: SiqiLu/dotnet-bump-version@2.0.0
|
||||
|
@ -45,7 +45,7 @@ jobs:
|
|||
canary:
|
||||
name: Build Canary Docker
|
||||
needs: [ build, version ]
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
@ -59,14 +59,14 @@ jobs:
|
|||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: canary
|
||||
|
||||
- name: NodeJS to Compile WebUI
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.13.x'
|
||||
node-version: 20
|
||||
- run: |
|
||||
cd UI/Web || exit
|
||||
echo 'Installing web dependencies'
|
||||
|
@ -81,7 +81,7 @@ jobs:
|
|||
cd ../ || exit
|
||||
|
||||
- name: Get csproj Version
|
||||
uses: kzrnm/get-net-sdk-project-versions-action@v1
|
||||
uses: kzrnm/get-net-sdk-project-versions-action@v2
|
||||
id: get-version
|
||||
with:
|
||||
proj-path: Kavita.Common/Kavita.Common.csproj
|
||||
|
@ -96,38 +96,38 @@ jobs:
|
|||
run: echo "${{steps.get-version.outputs.assembly-version}}"
|
||||
|
||||
- name: Compile dotnet app
|
||||
uses: actions/setup-dotnet@v3
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- run: ./monorepo-build.sh
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
|
|
19
.github/workflows/codeql.yml
vendored
|
@ -13,7 +13,7 @@ name: "CodeQL"
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ "develop", "main" ]
|
||||
branches: [ "develop"]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "develop" ]
|
||||
|
@ -38,7 +38,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'csharp', 'javascript-typescript', 'python' ]
|
||||
language: [ 'csharp', 'javascript-typescript' ]
|
||||
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
||||
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
|
@ -46,15 +46,16 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
shell: bash
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
@ -68,7 +69,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
@ -81,6 +82,6 @@ jobs:
|
|||
dotnet build Kavita.sln
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
|
69
.github/workflows/develop-workflow.yml
vendored
|
@ -2,15 +2,12 @@ name: Nightly Workflow
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: ['!release/**']
|
||||
pull_request:
|
||||
branches: [ 'develop', '!release/**' ]
|
||||
types: [ closed ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Debug Info
|
||||
run: |
|
||||
|
@ -20,15 +17,15 @@ jobs:
|
|||
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
||||
build:
|
||||
name: Upload Kavita.Common for Version Bump
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: csproj
|
||||
path: Kavita.Common/Kavita.Common.csproj
|
||||
|
@ -36,17 +33,17 @@ jobs:
|
|||
version:
|
||||
name: Bump version
|
||||
needs: [ build ]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup .NET Core
|
||||
uses: actions/setup-dotnet@v3
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Bump versions
|
||||
uses: majora2007/dotnet-bump-version@v0.0.10
|
||||
|
@ -58,8 +55,8 @@ jobs:
|
|||
develop:
|
||||
name: Build Nightly Docker
|
||||
needs: [ build, version ]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
@ -92,18 +89,18 @@ jobs:
|
|||
echo "BODY=$body" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: develop
|
||||
|
||||
- name: NodeJS to Compile WebUI
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.13.x'
|
||||
node-version: 20
|
||||
- run: |
|
||||
cd UI/Web || exit
|
||||
echo 'Installing web dependencies'
|
||||
npm install --legacy-peer-deps
|
||||
npm ci
|
||||
|
||||
echo 'Building UI'
|
||||
npm run prod
|
||||
|
@ -114,7 +111,7 @@ jobs:
|
|||
cd ../ || exit
|
||||
|
||||
- name: Get csproj Version
|
||||
uses: kzrnm/get-net-sdk-project-versions-action@v1
|
||||
uses: kzrnm/get-net-sdk-project-versions-action@v2
|
||||
id: get-version
|
||||
with:
|
||||
proj-path: Kavita.Common/Kavita.Common.csproj
|
||||
|
@ -129,49 +126,63 @@ jobs:
|
|||
run: echo "${{steps.get-version.outputs.assembly-version}}"
|
||||
|
||||
- name: Compile dotnet app
|
||||
uses: actions/setup-dotnet@v3
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install Swashbuckle CLI
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- run: ./monorepo-build.sh
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: docker_meta_nightly
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=nightly
|
||||
type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
images: |
|
||||
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
tags: ${{ steps.docker_meta_nightly.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta_nightly.outputs.labels }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Notify Discord
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||
with:
|
||||
severity: info
|
||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||
|
|
68
.github/workflows/openapi-gen.yml
vendored
Normal file
|
@ -0,0 +1,68 @@
|
|||
name: Generate OpenAPI Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'develop', '!release/**' ]
|
||||
paths:
|
||||
- '**/*.cs'
|
||||
- '**/*.csproj'
|
||||
pull_request:
|
||||
branches: [ 'develop', '!release/**' ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-openapi:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run on direct pushes to develop, not PRs
|
||||
if: (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && github.repository_owner == 'Kareadita'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 9.0.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: dotnet restore
|
||||
|
||||
- name: Build project
|
||||
run: dotnet build API/API.csproj --configuration Debug
|
||||
|
||||
- name: Get Swashbuckle version
|
||||
id: swashbuckle-version
|
||||
run: |
|
||||
VERSION=$(grep -o '<PackageReference Include="Swashbuckle.AspNetCore" Version="[^"]*"' API/API.csproj | grep -o 'Version="[^"]*"' | cut -d'"' -f2)
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Found Swashbuckle.AspNetCore version: $VERSION"
|
||||
|
||||
- name: Install matching Swashbuckle CLI tool
|
||||
run: |
|
||||
dotnet new tool-manifest --force
|
||||
dotnet tool install Swashbuckle.AspNetCore.Cli --version ${{ steps.swashbuckle-version.outputs.VERSION }}
|
||||
|
||||
- name: Generate OpenAPI file
|
||||
run: dotnet swagger tofile --output openapi.json API/bin/Debug/net9.0/API.dll v1
|
||||
|
||||
- name: Check for changes
|
||||
id: git-check
|
||||
run: |
|
||||
git add openapi.json
|
||||
git diff --staged --quiet openapi.json || echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Commit and push if changed
|
||||
if: steps.git-check.outputs.has_changes == 'true'
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
|
||||
git commit -m "Update OpenAPI documentation" openapi.json
|
||||
|
||||
# Pull latest changes with rebase to avoid merge commits
|
||||
git pull --rebase origin develop
|
||||
|
||||
git push
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.REPO_GHA_PAT }}
|
4
.github/workflows/pr-check.yml
vendored
|
@ -1,15 +1,13 @@
|
|||
name: Validate PR Body
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: '**'
|
||||
pull_request:
|
||||
branches: [ main, develop, canary ]
|
||||
types: [synchronize]
|
||||
|
||||
jobs:
|
||||
check_pr:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
|
|
102
.github/workflows/release-workflow.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
jobs:
|
||||
debug:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Debug Info
|
||||
run: |
|
||||
|
@ -20,21 +20,21 @@ jobs:
|
|||
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
||||
if_merged:
|
||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- run: |
|
||||
echo The PR was merged
|
||||
build:
|
||||
name: Upload Kavita.Common for Version Bump
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: csproj
|
||||
path: Kavita.Common/Kavita.Common.csproj
|
||||
|
@ -43,7 +43,7 @@ jobs:
|
|||
name: Build Stable and Nightly Docker if Release
|
||||
needs: [ build ]
|
||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
@ -58,38 +58,25 @@ jobs:
|
|||
- name: Parse PR body
|
||||
id: parse-body
|
||||
run: |
|
||||
body="${{ steps.findPr.outputs.body }}"
|
||||
body=${body//\'/}
|
||||
body=${body//'%'/'%25'}
|
||||
body=${body//$'\n'/'%0A'}
|
||||
body=${body//$'\r'/'%0D'}
|
||||
body=${body//$'`'/'%60'}
|
||||
body=${body//$'>'/'%3E'}
|
||||
|
||||
if [[ ${#body} -gt 1870 ]] ; then
|
||||
body=${body:0:1870}
|
||||
body="${body}...and much more.
|
||||
|
||||
Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
|
||||
fi
|
||||
body="Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
|
||||
|
||||
echo $body
|
||||
echo "BODY=$body" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: develop
|
||||
|
||||
- name: NodeJS to Compile WebUI
|
||||
uses: actions/setup-node@v3
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.13.x'
|
||||
node-version: 20
|
||||
- run: |
|
||||
|
||||
cd UI/Web || exit
|
||||
echo 'Installing web dependencies'
|
||||
npm install --legacy-peer-deps
|
||||
npm ci
|
||||
|
||||
echo 'Building UI'
|
||||
npm run prod
|
||||
|
@ -100,7 +87,7 @@ jobs:
|
|||
cd ../ || exit
|
||||
|
||||
- name: Get csproj Version
|
||||
uses: kzrnm/get-net-sdk-project-versions-action@v1
|
||||
uses: kzrnm/get-net-sdk-project-versions-action@v2
|
||||
id: get-version
|
||||
with:
|
||||
proj-path: Kavita.Common/Kavita.Common.csproj
|
||||
|
@ -117,72 +104,79 @@ jobs:
|
|||
id: parse-version
|
||||
|
||||
- name: Compile dotnet app
|
||||
uses: actions/setup-dotnet@v3
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 9.0.x
|
||||
- name: Install Swashbuckle CLI
|
||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
||||
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||
|
||||
- run: ./monorepo-build.sh
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: docker_meta_stable
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=raw,value=${{ steps.parse-version.outputs.VERSION }}
|
||||
images: |
|
||||
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Build and push stable
|
||||
id: docker_build_stable
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: jvmilazz0/kavita:latest, jvmilazz0/kavita:${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:latest, ghcr.io/kareadita/kavita:${{ steps.parse-version.outputs.VERSION }}
|
||||
tags: ${{ steps.docker_meta_stable.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta_stable.outputs.labels }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: docker_meta_nightly
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=nightly
|
||||
type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
images: |
|
||||
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
|
||||
- name: Build and push nightly
|
||||
id: docker_build_nightly
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||
tags: ${{ steps.docker_meta_nightly.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta_nightly.outputs.labels }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.docker_build_stable.outputs.digest }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo ${{ steps.docker_build_nightly.outputs.digest }}
|
||||
|
||||
- name: Notify Discord
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
with:
|
||||
severity: info
|
||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||
details: '${{ steps.findPr.outputs.body }}'
|
||||
text: <@&939225192553644133> A new stable build has been released.
|
||||
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
|
||||
|
||||
- name: Notify Discord
|
||||
uses: rjstone/discord-webhook-notify@v1
|
||||
with:
|
||||
severity: info
|
||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||
details: '${{ steps.findPr.outputs.body }}'
|
||||
text: <@&939225459156217917> <@&939225350775406643> A new nightly build has been released for docker.
|
||||
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
|
||||
|
|
11
.gitignore
vendored
|
@ -513,6 +513,7 @@ UI/Web/dist/
|
|||
/API/config/stats/
|
||||
/API/config/bookmarks/
|
||||
/API/config/favicons/
|
||||
/API/config/cache-long/
|
||||
/API/config/kavita.db
|
||||
/API/config/kavita.db-shm
|
||||
/API/config/kavita.db-wal
|
||||
|
@ -520,9 +521,11 @@ UI/Web/dist/
|
|||
/API/config/*.db
|
||||
/API/config/*.bak
|
||||
/API/config/*.backup
|
||||
/API/config/*.csv
|
||||
/API/config/Hangfire.db
|
||||
/API/config/Hangfire-log.db
|
||||
API/config/covers/
|
||||
API/config/images/*
|
||||
API/config/stats/*
|
||||
API/config/stats/app_stats.json
|
||||
API/config/pre-metadata/
|
||||
|
@ -533,3 +536,11 @@ UI/Web/.vscode/settings.json
|
|||
/API.Tests/Services/Test Data/ArchiveService/CoverImages/output/*
|
||||
UI/Web/.angular/
|
||||
BenchmarkDotNet.Artifacts
|
||||
|
||||
|
||||
API.Tests/Services/Test Data/ImageService/**/*_output*
|
||||
API.Tests/Services/Test Data/ImageService/**/*_baseline*
|
||||
API.Tests/Services/Test Data/ImageService/**/*.html
|
||||
|
||||
|
||||
API.Tests/Services/Test Data/ScannerService/ScanTests/**/*
|
||||
|
|
15
.sonarcloud.properties
Normal file
|
@ -0,0 +1,15 @@
|
|||
# Path to sources
|
||||
sonar.sources=.
|
||||
sonar.exclusions=API.Benchmark
|
||||
#sonar.inclusions=
|
||||
|
||||
# Path to tests
|
||||
sonar.tests=API.Tests
|
||||
#sonar.test.exclusions=
|
||||
#sonar.test.inclusions=
|
||||
|
||||
# Source encoding
|
||||
sonar.sourceEncoding=UTF-8
|
||||
|
||||
# Exclusions for copy-paste detection
|
||||
#sonar.cpd.exclusions=
|
|
@ -1,7 +1,7 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
</PropertyGroup>
|
||||
|
||||
|
@ -10,9 +10,9 @@
|
|||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" Version="0.13.12" />
|
||||
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.13.12" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="BenchmarkDotNet" Version="0.14.0" />
|
||||
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.14.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
|
|
@ -32,7 +32,7 @@ public class ArchiveServiceBenchmark
|
|||
public ArchiveServiceBenchmark()
|
||||
{
|
||||
_directoryService = new DirectoryService(null, new FileSystem());
|
||||
_imageService = new ImageService(null, _directoryService, Substitute.For<IEasyCachingProviderFactory>());
|
||||
_imageService = new ImageService(null, _directoryService);
|
||||
_archiveService = new ArchiveService(new NullLogger<ArchiveService>(), _directoryService, _imageService, Substitute.For<IMediaErrorService>());
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ public class TestBenchmark
|
|||
|
||||
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
|
||||
{
|
||||
foreach (var v in volumes.Where(vDto => vDto.MinNumber == 0))
|
||||
foreach (var v in volumes.WhereNotLooseLeaf())
|
||||
{
|
||||
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
|
||||
}
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net9.0</TargetFramework>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="8.0.1" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.15" />
|
||||
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="20.0.15" />
|
||||
<PackageReference Include="xunit" Version="2.6.6" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.6">
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="22.0.13" />
|
||||
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="22.0.13" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.0">
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
|
@ -28,7 +28,7 @@
|
|||
|
||||
<ItemGroup>
|
||||
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
|
||||
<Folder Include="Services\Test Data\ScannerService\Manga" />
|
||||
<Folder Include="Services\Test Data\ImageService\Covers\" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Data.Common;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
|
@ -10,6 +9,7 @@ using API.Helpers;
|
|||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using AutoMapper;
|
||||
using Hangfire;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
|
@ -18,36 +18,33 @@ using NSubstitute;
|
|||
|
||||
namespace API.Tests;
|
||||
|
||||
public abstract class AbstractDbTest
|
||||
public abstract class AbstractDbTest : AbstractFsTest , IDisposable
|
||||
{
|
||||
protected readonly DbConnection _connection;
|
||||
protected readonly DataContext _context;
|
||||
protected readonly IUnitOfWork _unitOfWork;
|
||||
|
||||
|
||||
protected const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
protected const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
protected const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
protected const string LogDirectory = "C:/kavita/config/logs/";
|
||||
protected const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
|
||||
protected const string SiteThemeDirectory = "C:/kavita/config/themes/";
|
||||
protected const string TempDirectory = "C:/kavita/config/temp/";
|
||||
protected const string DataDirectory = "C:/data/";
|
||||
protected readonly IMapper _mapper;
|
||||
|
||||
protected AbstractDbTest()
|
||||
{
|
||||
var contextOptions = new DbContextOptionsBuilder()
|
||||
var contextOptions = new DbContextOptionsBuilder<DataContext>()
|
||||
.UseSqlite(CreateInMemoryDatabase())
|
||||
.EnableSensitiveDataLogging()
|
||||
.Options;
|
||||
|
||||
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
||||
|
||||
_context = new DataContext(contextOptions);
|
||||
|
||||
_context.Database.EnsureCreated(); // Ensure DB schema is created
|
||||
|
||||
Task.Run(SeedDb).GetAwaiter().GetResult();
|
||||
|
||||
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
|
||||
var mapper = config.CreateMapper();
|
||||
_mapper = config.CreateMapper();
|
||||
|
||||
_unitOfWork = new UnitOfWork(_context, mapper, null);
|
||||
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||
_unitOfWork = new UnitOfWork(_context, _mapper, null);
|
||||
}
|
||||
|
||||
private static DbConnection CreateInMemoryDatabase()
|
||||
|
@ -60,47 +57,66 @@ public abstract class AbstractDbTest
|
|||
|
||||
private async Task<bool> SeedDb()
|
||||
{
|
||||
await _context.Database.MigrateAsync();
|
||||
var filesystem = CreateFileSystem();
|
||||
try
|
||||
{
|
||||
await _context.Database.EnsureCreatedAsync();
|
||||
var filesystem = CreateFileSystem();
|
||||
|
||||
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
||||
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
||||
|
||||
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
||||
setting.Value = CacheDirectory;
|
||||
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
||||
setting.Value = CacheDirectory;
|
||||
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
||||
setting.Value = BackupDirectory;
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
||||
setting.Value = BackupDirectory;
|
||||
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
|
||||
setting.Value = BookmarkDirectory;
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
|
||||
setting.Value = BookmarkDirectory;
|
||||
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
|
||||
setting.Value = "10";
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
|
||||
setting.Value = "10";
|
||||
|
||||
_context.ServerSetting.Update(setting);
|
||||
_context.ServerSetting.Update(setting);
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithAllowMetadataMatching(true)
|
||||
.WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
|
||||
.Build());
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
await Seed.SeedMetadataSettings(_context);
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($"[SeedDb] Error: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract Task ResetDb();
|
||||
|
||||
protected static MockFileSystem CreateFileSystem()
|
||||
public void Dispose()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory(SiteThemeDirectory);
|
||||
fileSystem.AddDirectory(LogDirectory);
|
||||
fileSystem.AddDirectory(TempDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
_context.Dispose();
|
||||
_connection.Dispose();
|
||||
}
|
||||
|
||||
return fileSystem;
|
||||
/// <summary>
|
||||
/// Add a role to an existing User. Commits.
|
||||
/// </summary>
|
||||
/// <param name="userId"></param>
|
||||
/// <param name="roleName"></param>
|
||||
protected async Task AddUserWithRole(int userId, string roleName)
|
||||
{
|
||||
var role = new AppRole { Id = userId, Name = roleName, NormalizedName = roleName.ToUpper() };
|
||||
|
||||
await _context.Roles.AddAsync(role);
|
||||
await _context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
}
|
||||
|
|
43
API.Tests/AbstractFsTest.cs
Normal file
|
@ -0,0 +1,43 @@
|
|||
|
||||
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
namespace API.Tests;
|
||||
|
||||
public abstract class AbstractFsTest
|
||||
{
|
||||
|
||||
protected static readonly string Root = Parser.NormalizePath(Path.GetPathRoot(Directory.GetCurrentDirectory()));
|
||||
protected static readonly string ConfigDirectory = Root + "kavita/config/";
|
||||
protected static readonly string CacheDirectory = ConfigDirectory + "cache/";
|
||||
protected static readonly string CacheLongDirectory = ConfigDirectory + "cache-long/";
|
||||
protected static readonly string CoverImageDirectory = ConfigDirectory + "covers/";
|
||||
protected static readonly string BackupDirectory = ConfigDirectory + "backups/";
|
||||
protected static readonly string LogDirectory = ConfigDirectory + "logs/";
|
||||
protected static readonly string BookmarkDirectory = ConfigDirectory + "bookmarks/";
|
||||
protected static readonly string SiteThemeDirectory = ConfigDirectory + "themes/";
|
||||
protected static readonly string TempDirectory = ConfigDirectory + "temp/";
|
||||
protected static readonly string ThemesDirectory = ConfigDirectory + "theme";
|
||||
protected static readonly string DataDirectory = Root + "data/";
|
||||
|
||||
protected static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory(Root + "kavita/");
|
||||
fileSystem.AddDirectory(Root + "kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CacheLongDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory(SiteThemeDirectory);
|
||||
fileSystem.AddDirectory(LogDirectory);
|
||||
fileSystem.AddDirectory(TempDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
fileSystem.AddDirectory(ThemesDirectory);
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
}
|
|
@ -4,15 +4,16 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Comparers;
|
||||
|
||||
public class ChapterSortComparerTest
|
||||
public class ChapterSortComparerDefaultLastTest
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(new[] {1, 2, 0}, new[] {1, 2, 0})]
|
||||
[InlineData(new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
|
||||
[InlineData(new[] {1, 0, 0}, new[] {1, 0, 0})]
|
||||
[InlineData(new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||
[InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||
public void ChapterSortTest(int[] input, int[] expected)
|
||||
{
|
||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparer()).ToArray());
|
||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultLast()).ToArray());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Comparers;
|
||||
|
||||
public class ChapterSortComparerZeroFirstTests
|
||||
public class ChapterSortComparerDefaultFirstTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(new[] {1, 2, 0}, new[] {0, 1, 2,})]
|
||||
|
@ -12,13 +12,13 @@ public class ChapterSortComparerZeroFirstTests
|
|||
[InlineData(new[] {1, 0, 0}, new[] {0, 0, 1})]
|
||||
public void ChapterSortComparerZeroFirstTest(int[] input, int[] expected)
|
||||
{
|
||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
|
||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(new[] {1.0, 0.5, 0.3}, new[] {0.3, 0.5, 1.0})]
|
||||
public void ChapterSortComparerZeroFirstTest_Doubles(double[] input, double[] expected)
|
||||
[InlineData(new [] {1.0f, 0.5f, 0.3f}, new [] {0.3f, 0.5f, 1.0f})]
|
||||
public void ChapterSortComparerZeroFirstTest_Doubles(float[] input, float[] expected)
|
||||
{
|
||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
|
||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,11 +7,11 @@ namespace API.Tests.Comparers;
|
|||
public class SortComparerZeroLastTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(new[] {0, 1, 2,}, new[] {1, 2, 0})]
|
||||
[InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1, 2,}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
|
||||
[InlineData(new[] {0, 0, 1}, new[] {1, 0, 0})]
|
||||
[InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||
public void SortComparerZeroLastTest(int[] input, int[] expected)
|
||||
{
|
||||
Assert.Equal(expected, input.OrderBy(f => f, SortComparerZeroLast.Default).ToArray());
|
||||
Assert.Equal(expected, input.OrderBy(f => f, ChapterSortComparerDefaultLast.Default).ToArray());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
using API.Helpers.Converters;
|
||||
using Hangfire;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Converters;
|
||||
|
|
|
@ -30,7 +30,7 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var info = new ParserInfo()
|
||||
{
|
||||
Chapters = "0",
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/darker than black.cbz",
|
||||
|
@ -38,12 +38,12 @@ public class ChapterListExtensionsTests
|
|||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = "0"
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
|
||||
CreateChapter("darker than black - Some special", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
var actualChapter = chapterList.GetChapterByRange(info);
|
||||
|
@ -57,7 +57,7 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var info = new ParserInfo()
|
||||
{
|
||||
Chapters = "0",
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/darker than black.cbz",
|
||||
|
@ -65,12 +65,12 @@ public class ChapterListExtensionsTests
|
|||
IsSpecial = true,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = "0"
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
|
||||
CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
var actualChapter = chapterList.GetChapterByRange(info);
|
||||
|
@ -83,7 +83,7 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var info = new ParserInfo()
|
||||
{
|
||||
Chapters = "0",
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/detective comics #001.cbz",
|
||||
|
@ -91,13 +91,39 @@ public class ChapterListExtensionsTests
|
|||
IsSpecial = true,
|
||||
Series = "detective comics",
|
||||
Title = "detective comics",
|
||||
Volumes = "0"
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
var actualChapter = chapterList.GetChapterByRange(info);
|
||||
|
||||
Assert.Equal(chapterList[0], actualChapter);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChapterByRange_On_FilenameChange_ShouldGetChapter()
|
||||
{
|
||||
var info = new ParserInfo()
|
||||
{
|
||||
Chapters = "1",
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/detective comics #001.cbz",
|
||||
Filename = "detective comics #001.cbz",
|
||||
IsSpecial = false,
|
||||
Series = "detective comics",
|
||||
Title = "detective comics",
|
||||
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("1", "1", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), false),
|
||||
};
|
||||
|
||||
var actualChapter = chapterList.GetChapterByRange(info);
|
||||
|
@ -112,7 +138,7 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
||||
};
|
||||
|
||||
|
@ -124,7 +150,7 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
||||
};
|
||||
|
||||
|
@ -151,8 +177,8 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
chapterList[0].ReleaseDate = new DateTime(10, 1, 1);
|
||||
|
@ -166,8 +192,8 @@ public class ChapterListExtensionsTests
|
|||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1);
|
||||
|
|
31
API.Tests/Extensions/EncodeFormatExtensionsTests.cs
Normal file
|
@ -0,0 +1,31 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
||||
public class EncodeFormatExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void GetExtension_ShouldReturnCorrectExtensionForAllValues()
|
||||
{
|
||||
// Arrange
|
||||
var expectedExtensions = new Dictionary<EncodeFormat, string>
|
||||
{
|
||||
{ EncodeFormat.PNG, ".png" },
|
||||
{ EncodeFormat.WEBP, ".webp" },
|
||||
{ EncodeFormat.AVIF, ".avif" }
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
foreach (var format in Enum.GetValues(typeof(EncodeFormat)).Cast<EncodeFormat>())
|
||||
{
|
||||
var extension = format.GetExtension();
|
||||
Assert.Equal(expectedExtensions[format], extension);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -74,10 +74,10 @@ public class EnumerableExtensionsTests
|
|||
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"},
|
||||
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"}
|
||||
)]
|
||||
[InlineData(
|
||||
new[] {"01/001.jpg", "001.jpg"},
|
||||
new[] {"001.jpg", "01/001.jpg"}
|
||||
)]
|
||||
[InlineData(
|
||||
new[] {"01/001.jpg", "001.jpg"},
|
||||
new[] {"001.jpg", "01/001.jpg"}
|
||||
)]
|
||||
public void TestNaturalSort(string[] input, string[] expected)
|
||||
{
|
||||
Assert.Equal(expected, input.OrderByNatural(x => x).ToArray());
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using API.Entities.Enums;
|
||||
|
@ -6,7 +7,6 @@ using API.Extensions;
|
|||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using API.Tests.Helpers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
@ -18,9 +18,8 @@ public class ParserInfoListExtensions
|
|||
private readonly IDefaultParser _defaultParser;
|
||||
public ParserInfoListExtensions()
|
||||
{
|
||||
_defaultParser =
|
||||
new DefaultParser(new DirectoryService(Substitute.For<ILogger<DirectoryService>>(),
|
||||
new MockFileSystem()));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||
_defaultParser = new BasicParser(ds, new ImageParser(ds));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
|
@ -33,7 +32,7 @@ public class ParserInfoListExtensions
|
|||
|
||||
[Theory]
|
||||
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
|
||||
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
|
||||
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
|
||||
[InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
|
||||
public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo)
|
||||
{
|
||||
|
@ -41,8 +40,8 @@ public class ParserInfoListExtensions
|
|||
foreach (var filename in inputInfos)
|
||||
{
|
||||
infos.Add(_defaultParser.Parse(
|
||||
filename,
|
||||
string.Empty));
|
||||
Path.Join("E:/Manga/Cynthia the Mission/", filename),
|
||||
"E:/Manga/", "E:/Manga/", LibraryType.Manga));
|
||||
}
|
||||
|
||||
var files = inputChapters.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build()).ToList();
|
||||
|
@ -52,4 +51,26 @@ public class ParserInfoListExtensions
|
|||
|
||||
Assert.Equal(expectedHasInfo, infos.HasInfo(chapter));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HasInfoTest_SuccessWhenSpecial()
|
||||
{
|
||||
var infos = new[]
|
||||
{
|
||||
_defaultParser.Parse(
|
||||
"E:/Manga/Cynthia the Mission/Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip",
|
||||
"E:/Manga/", "E:/Manga/", LibraryType.Manga)
|
||||
};
|
||||
|
||||
var files = new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip"}
|
||||
.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build())
|
||||
.ToList();
|
||||
var chapter = new ChapterBuilder("Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip")
|
||||
.WithRange("Cynthia The Mission The Special SP01 [Desudesu&Brolen]")
|
||||
.WithFiles(files)
|
||||
.WithIsSpecial(true)
|
||||
.Build();
|
||||
|
||||
Assert.True(infos.HasInfo(chapter));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.Data.Misc;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Entities.Person;
|
||||
using API.Extensions.QueryExtensions;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
@ -45,17 +43,17 @@ public class QueryableExtensionsTests
|
|||
[InlineData(false, 1)]
|
||||
public void RestrictAgainstAgeRestriction_CollectionTag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||
{
|
||||
var items = new List<CollectionTag>()
|
||||
var items = new List<AppUserCollection>()
|
||||
{
|
||||
new CollectionTagBuilder("Test")
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
||||
new AppUserCollectionBuilder("Test")
|
||||
.WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
|
||||
.Build(),
|
||||
new CollectionTagBuilder("Test 2")
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
||||
new AppUserCollectionBuilder("Test 2")
|
||||
.WithItem(new SeriesBuilder("S2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build()).Build())
|
||||
.WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
|
||||
.Build(),
|
||||
new CollectionTagBuilder("Test 3")
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
|
||||
new AppUserCollectionBuilder("Test 3")
|
||||
.WithItem(new SeriesBuilder("S3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build()).Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
|
@ -123,29 +121,46 @@ public class QueryableExtensionsTests
|
|||
|
||||
[Theory]
|
||||
[InlineData(true, 2)]
|
||||
[InlineData(false, 1)]
|
||||
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||
[InlineData(false, 2)]
|
||||
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedPeopleCount)
|
||||
{
|
||||
var items = new List<Person>()
|
||||
// Arrange
|
||||
var items = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Test", PersonRole.Character)
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
||||
.Build(),
|
||||
new PersonBuilder("Test", PersonRole.Character)
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
||||
.Build(),
|
||||
new PersonBuilder("Test", PersonRole.Character)
|
||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
|
||||
.Build(),
|
||||
CreatePersonWithSeriesMetadata("Test1", AgeRating.Teen),
|
||||
CreatePersonWithSeriesMetadata("Test2", AgeRating.Unknown, AgeRating.Teen), // 2 series on this person, restrict will still allow access
|
||||
CreatePersonWithSeriesMetadata("Test3", AgeRating.X18Plus)
|
||||
};
|
||||
|
||||
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
|
||||
var ageRestriction = new AgeRestriction
|
||||
{
|
||||
AgeRating = AgeRating.Teen,
|
||||
IncludeUnknowns = includeUnknowns
|
||||
});
|
||||
Assert.Equal(expectedCount, filtered.Count());
|
||||
};
|
||||
|
||||
// Act
|
||||
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(ageRestriction);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedPeopleCount, filtered.Count());
|
||||
}
|
||||
|
||||
private static Person CreatePersonWithSeriesMetadata(string name, params AgeRating[] ageRatings)
|
||||
{
|
||||
var person = new PersonBuilder(name).Build();
|
||||
|
||||
foreach (var ageRating in ageRatings)
|
||||
{
|
||||
var seriesMetadata = new SeriesMetadataBuilder().WithAgeRating(ageRating).Build();
|
||||
person.SeriesMetadataPeople.Add(new SeriesMetadataPeople
|
||||
{
|
||||
SeriesMetadata = seriesMetadata,
|
||||
Person = person,
|
||||
Role = PersonRole.Character // Role is now part of the relationship
|
||||
});
|
||||
}
|
||||
|
||||
return person;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Linq;
|
||||
using API.Comparators;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
@ -17,22 +15,23 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Special 1")
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Special 2")
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(Parser.SpecialVolumeNumber + 2)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Special 1", series.GetCoverImage());
|
||||
|
@ -43,8 +42,8 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("13")
|
||||
.WithCoverImage("Chapter 13")
|
||||
.Build())
|
||||
|
@ -59,7 +58,7 @@ public class SeriesExtensionsTests
|
|||
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithName("Volume 2")
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 2")
|
||||
.Build())
|
||||
.Build())
|
||||
|
@ -67,12 +66,83 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_LooseChapters_WithSub1_Chapter()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("-1")
|
||||
.WithCoverImage("Chapter -1")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("0.5")
|
||||
.WithCoverImage("Chapter 0.5")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithCoverImage("Chapter 2")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithCoverImage("Chapter 1")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("3")
|
||||
.WithCoverImage("Chapter 3")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("4AU")
|
||||
.WithCoverImage("Chapter 4AU")
|
||||
.Build())
|
||||
.Build())
|
||||
|
||||
.Build();
|
||||
|
||||
|
||||
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks the case where there are specials and loose leafs, loose leaf chapters should be preferred
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GetCoverImage_LooseChapters_WithSub1_Chapter_WithSpecials()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithName(Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder("I am a Special")
|
||||
.WithCoverImage("I am a Special")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("I am a Special 2")
|
||||
.WithCoverImage("I am a Special 2")
|
||||
.Build())
|
||||
.Build())
|
||||
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("0.5")
|
||||
.WithCoverImage("Chapter 0.5")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("2")
|
||||
.WithCoverImage("Chapter 2")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithCoverImage("Chapter 1")
|
||||
.Build())
|
||||
.Build())
|
||||
|
||||
.Build();
|
||||
|
||||
|
||||
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_JustVolumes()
|
||||
{
|
||||
|
@ -81,14 +151,14 @@ public class SeriesExtensionsTests
|
|||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithName("Volume 1")
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 1 Chapter 1")
|
||||
.Build())
|
||||
.Build())
|
||||
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithName("Volume 2")
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 2")
|
||||
.Build())
|
||||
.Build())
|
||||
|
@ -109,19 +179,48 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_JustVolumes_ButVolume0()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName("Volume 0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 0")
|
||||
.Build())
|
||||
.Build())
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithName("Volume 1")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithCoverImage("Volume 1")
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_JustSpecials_WithDecimal()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("2.5")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Special 1")
|
||||
|
@ -135,7 +234,7 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Special 2", series.GetCoverImage());
|
||||
|
@ -146,8 +245,8 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("2.5")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2.5")
|
||||
|
@ -156,16 +255,19 @@ public class SeriesExtensionsTests
|
|||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithCoverImage("Special 1")
|
||||
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Chapter 2", series.GetCoverImage());
|
||||
|
@ -176,8 +278,8 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("2.5")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2.5")
|
||||
|
@ -186,14 +288,17 @@ public class SeriesExtensionsTests
|
|||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithCoverImage("Special 3")
|
||||
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 1")
|
||||
.Build())
|
||||
|
@ -202,7 +307,7 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||
|
@ -213,8 +318,8 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("2.5")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2.5")
|
||||
|
@ -223,14 +328,17 @@ public class SeriesExtensionsTests
|
|||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithCoverImage("Special 1")
|
||||
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 1")
|
||||
.Build())
|
||||
|
@ -239,7 +347,7 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||
|
@ -250,8 +358,8 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Ippo")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1426")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 1426")
|
||||
|
@ -260,21 +368,24 @@ public class SeriesExtensionsTests
|
|||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 1425")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithCoverImage("Special 1")
|
||||
.WithCoverImage("Special 3")
|
||||
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 1")
|
||||
.Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("137")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 137")
|
||||
.Build())
|
||||
|
@ -283,7 +394,7 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||
|
@ -294,8 +405,8 @@ public class SeriesExtensionsTests
|
|||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("2.5")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 2.5")
|
||||
|
@ -307,7 +418,7 @@ public class SeriesExtensionsTests
|
|||
.Build())
|
||||
.WithVolume(new VolumeBuilder("4")
|
||||
.WithMinNumber(4)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 4")
|
||||
.Build())
|
||||
|
@ -316,11 +427,77 @@ public class SeriesExtensionsTests
|
|||
|
||||
foreach (var vol in series.Volumes)
|
||||
{
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
||||
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||
}
|
||||
|
||||
Assert.Equal("Chapter 2", series.GetCoverImage());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GetCoverImage_LessThanIssue1()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 0")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 1")
|
||||
.Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithMinNumber(4)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 4")
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GetCoverImage_LessThanIssue1_WithNegative()
|
||||
{
|
||||
var series = new SeriesBuilder("Test 1")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||
.WithName(Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("-1")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter -1")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 0")
|
||||
.Build())
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Chapter 1")
|
||||
.Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||
.WithMinNumber(4)
|
||||
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||
.WithIsSpecial(false)
|
||||
.WithCoverImage("Volume 4")
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
81
API.Tests/Extensions/VersionExtensionTests.cs
Normal file
|
@ -0,0 +1,81 @@
|
|||
using System;
|
||||
using API.Extensions;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
||||
public class VersionHelperTests
|
||||
{
|
||||
[Fact]
|
||||
public void CompareWithoutRevision_ShouldReturnTrue_WhenMajorMinorBuildMatch()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = new Version(1, 2, 3, 4);
|
||||
var v2 = new Version(1, 2, 3, 5);
|
||||
|
||||
// Act
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareWithoutRevision_ShouldHandleBuildlessVersions()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = new Version(1, 2);
|
||||
var v2 = new Version(1, 2);
|
||||
|
||||
// Act
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1, 2, 3, 1, 2, 4)]
|
||||
[InlineData(1, 2, 3, 1, 2, 0)]
|
||||
public void CompareWithoutRevision_ShouldReturnFalse_WhenBuildDiffers(
|
||||
int major1, int minor1, int build1,
|
||||
int major2, int minor2, int build2)
|
||||
{
|
||||
var v1 = new Version(major1, minor1, build1);
|
||||
var v2 = new Version(major2, minor2, build2);
|
||||
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1, 2, 3, 1, 3, 3)]
|
||||
[InlineData(1, 2, 3, 1, 0, 3)]
|
||||
public void CompareWithoutRevision_ShouldReturnFalse_WhenMinorDiffers(
|
||||
int major1, int minor1, int build1,
|
||||
int major2, int minor2, int build2)
|
||||
{
|
||||
var v1 = new Version(major1, minor1, build1);
|
||||
var v2 = new Version(major2, minor2, build2);
|
||||
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1, 2, 3, 2, 2, 3)]
|
||||
[InlineData(1, 2, 3, 0, 2, 3)]
|
||||
public void CompareWithoutRevision_ShouldReturnFalse_WhenMajorDiffers(
|
||||
int major1, int minor1, int build1,
|
||||
int major2, int minor2, int build2)
|
||||
{
|
||||
var v1 = new Version(major1, minor1, build1);
|
||||
var v2 = new Version(major2, minor2, build2);
|
||||
|
||||
var result = v1.CompareWithoutRevision(v2);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
}
|
|
@ -3,7 +3,6 @@ using API.Entities;
|
|||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Tests.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Extensions;
|
||||
|
@ -21,12 +20,43 @@ public class VolumeListExtensionsTests
|
|||
.WithChapter(new ChapterBuilder("3").Build())
|
||||
.WithChapter(new ChapterBuilder("4").Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
||||
.Build(),
|
||||
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
var v = volumes.GetCoverImage(MangaFormat.Archive);
|
||||
Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCoverImage_ChoosesVolume1_WhenHalf()
|
||||
{
|
||||
var volumes = new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("0.5").Build())
|
||||
.Build(),
|
||||
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
var v = volumes.GetCoverImage(MangaFormat.Archive);
|
||||
Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
|
||||
}
|
||||
|
||||
|
@ -39,9 +69,14 @@ public class VolumeListExtensionsTests
|
|||
.WithChapter(new ChapterBuilder("3").Build())
|
||||
.WithChapter(new ChapterBuilder("4").Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
|
@ -57,9 +92,14 @@ public class VolumeListExtensionsTests
|
|||
.WithChapter(new ChapterBuilder("3").Build())
|
||||
.WithChapter(new ChapterBuilder("4").Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
|
@ -75,9 +115,14 @@ public class VolumeListExtensionsTests
|
|||
.WithChapter(new ChapterBuilder("3").Build())
|
||||
.WithChapter(new ChapterBuilder("4").Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
|
@ -95,7 +140,12 @@ public class VolumeListExtensionsTests
|
|||
.Build(),
|
||||
new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
|
||||
.Build(),
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithIsSpecial(true)
|
||||
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||
.Build())
|
||||
.Build(),
|
||||
};
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
|
@ -11,9 +10,9 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class CacheHelperTests
|
||||
public class CacheHelperTests: AbstractFsTest
|
||||
{
|
||||
private const string TestCoverImageDirectory = @"c:\";
|
||||
private static readonly string TestCoverImageDirectory = Root;
|
||||
private const string TestCoverImageFile = "thumbnail.jpg";
|
||||
private readonly string _testCoverPath = Path.Join(TestCoverImageDirectory, TestCoverImageFile);
|
||||
private const string TestCoverArchive = @"file in folder.zip";
|
||||
|
@ -37,24 +36,29 @@ public class CacheHelperTests
|
|||
|
||||
[Theory]
|
||||
[InlineData("", false)]
|
||||
[InlineData("C:/", false)]
|
||||
[InlineData(null, false)]
|
||||
public void CoverImageExists_DoesFileExist(string coverImage, bool exists)
|
||||
{
|
||||
Assert.Equal(exists, _cacheHelper.CoverImageExists(coverImage));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CoverImageExists_DoesFileExistRoot()
|
||||
{
|
||||
Assert.False(_cacheHelper.CoverImageExists(Root));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CoverImageExists_FileExists()
|
||||
{
|
||||
Assert.True(_cacheHelper.CoverImageExists(TestCoverArchive));
|
||||
Assert.True(_cacheHelper.CoverImageExists(Path.Join(TestCoverImageDirectory, TestCoverArchive)));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldUpdateCoverImage_OnFirstRun()
|
||||
{
|
||||
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.True(_cacheHelper.ShouldUpdateCoverImage(null, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -65,7 +69,7 @@ public class CacheHelperTests
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -76,7 +80,7 @@ public class CacheHelperTests
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked_2()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now,
|
||||
|
@ -87,7 +91,7 @@ public class CacheHelperTests
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -98,7 +102,7 @@ public class CacheHelperTests
|
|||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked_Modified()
|
||||
{
|
||||
// Represents first run
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now)
|
||||
.Build();
|
||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||
|
@ -122,7 +126,7 @@ public class CacheHelperTests
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var created = DateTime.Now.Subtract(TimeSpan.FromHours(1));
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(1)))
|
||||
.Build();
|
||||
|
||||
|
@ -133,9 +137,10 @@ public class CacheHelperTests
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceCreated()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
LastWriteTime =now,
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -147,12 +152,12 @@ public class CacheHelperTests
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithCreated(now.DateTime)
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
.Build();
|
||||
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
}
|
||||
|
@ -160,9 +165,10 @@ public class CacheHelperTests
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
LastWriteTime = now,
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -174,12 +180,12 @@ public class CacheHelperTests
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithCreated(now.DateTime)
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
.Build();
|
||||
|
||||
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
|
@ -188,9 +194,10 @@ public class CacheHelperTests
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified_ForceUpdate()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
LastWriteTime = now.DateTime,
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -202,12 +209,12 @@ public class CacheHelperTests
|
|||
var cacheHelper = new CacheHelper(fileService);
|
||||
|
||||
var chapter = new ChapterBuilder("1")
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithLastModified(now.DateTime)
|
||||
.WithCreated(now.DateTime)
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
.Build();
|
||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file));
|
||||
}
|
||||
|
@ -215,10 +222,11 @@ public class CacheHelperTests
|
|||
[Fact]
|
||||
public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = DateTimeOffset.Now,
|
||||
CreationTime = DateTimeOffset.Now
|
||||
LastWriteTime = now.DateTime,
|
||||
CreationTime = now.DateTime
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -234,8 +242,8 @@ public class CacheHelperTests
|
|||
.WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
|
||||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(now.DateTime)
|
||||
.Build();
|
||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
}
|
||||
|
@ -243,9 +251,10 @@ public class CacheHelperTests
|
|||
[Fact]
|
||||
public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan_ButLastModifiedSame()
|
||||
{
|
||||
var now = DateTimeOffset.Now;
|
||||
var filesystemFile = new MockFileData("")
|
||||
{
|
||||
LastWriteTime = DateTimeOffset.Now
|
||||
LastWriteTime =now.DateTime
|
||||
};
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
|
@ -262,7 +271,7 @@ public class CacheHelperTests
|
|||
.Build();
|
||||
|
||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
||||
.WithLastModified(now.DateTime)
|
||||
.Build();
|
||||
|
||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||
|
|
|
@ -1,118 +0,0 @@
|
|||
using System.Collections.Generic;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class GenreHelperTests
|
||||
{
|
||||
[Fact]
|
||||
public void UpdateGenre_ShouldAddNewGenre()
|
||||
{
|
||||
var allGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
var genreAdded = new List<Genre>();
|
||||
|
||||
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Adventure"}, genre =>
|
||||
{
|
||||
genreAdded.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Equal(2, genreAdded.Count);
|
||||
Assert.Equal(4, allGenres.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdateGenre_ShouldNotAddDuplicateGenre()
|
||||
{
|
||||
var allGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
|
||||
};
|
||||
var genreAdded = new List<Genre>();
|
||||
|
||||
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Scifi"}, genre =>
|
||||
{
|
||||
genreAdded.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Equal(3, allGenres.Count);
|
||||
Assert.Equal(2, genreAdded.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddGenre_ShouldAddOnlyNonExistingGenre()
|
||||
{
|
||||
var existingGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
|
||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Action").Build());
|
||||
Assert.Equal(3, existingGenres.Count);
|
||||
|
||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("action").Build());
|
||||
Assert.Equal(3, existingGenres.Count);
|
||||
|
||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Shonen").Build());
|
||||
Assert.Equal(4, existingGenres.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void KeepOnlySamePeopleBetweenLists()
|
||||
{
|
||||
var existingGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
};
|
||||
|
||||
var genreRemoved = new List<Genre>();
|
||||
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
|
||||
peopleFromChapters, genre =>
|
||||
{
|
||||
genreRemoved.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Single(genreRemoved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemoveEveryoneIfNothingInRemoveAllExcept()
|
||||
{
|
||||
var existingGenres = new List<Genre>
|
||||
{
|
||||
new GenreBuilder("Action").Build(),
|
||||
new GenreBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Genre>();
|
||||
|
||||
var genreRemoved = new List<Genre>();
|
||||
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
|
||||
peopleFromChapters, genre =>
|
||||
{
|
||||
genreRemoved.Add(genre);
|
||||
});
|
||||
|
||||
Assert.Equal(2, genreRemoved.Count);
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Entities;
|
||||
using API.Helpers;
|
||||
|
@ -49,17 +50,14 @@ public class OrderableHelperTests
|
|||
[Fact]
|
||||
public void ReorderItems_InvalidPosition_NoChange()
|
||||
{
|
||||
// Arrange
|
||||
var items = new List<AppUserSideNavStream>
|
||||
{
|
||||
new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
|
||||
new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
|
||||
};
|
||||
|
||||
// Act
|
||||
OrderableHelper.ReorderItems(items, 2, 3); // Position 3 is out of range
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
|
||||
Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
|
||||
}
|
||||
|
@ -80,7 +78,6 @@ public class OrderableHelperTests
|
|||
[Fact]
|
||||
public void ReorderItems_DoubleMove()
|
||||
{
|
||||
// Arrange
|
||||
var items = new List<AppUserSideNavStream>
|
||||
{
|
||||
new AppUserSideNavStream { Id = 1, Order = 0, Name = "0" },
|
||||
|
@ -94,7 +91,6 @@ public class OrderableHelperTests
|
|||
// Move 4 -> 1
|
||||
OrderableHelper.ReorderItems(items, 5, 1);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, items[0].Id);
|
||||
Assert.Equal(0, items[0].Order);
|
||||
Assert.Equal(5, items[1].Id);
|
||||
|
@ -109,4 +105,98 @@ public class OrderableHelperTests
|
|||
|
||||
Assert.Equal("034125", string.Join("", items.Select(s => s.Name)));
|
||||
}
|
||||
|
||||
private static List<ReadingListItem> CreateTestReadingListItems(int count = 4)
|
||||
{
|
||||
var items = new List<ReadingListItem>();
|
||||
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
items.Add(new ReadingListItem() { Id = i + 1, Order = count, ReadingListId = i + 1});
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToBeginning_CorrectOrder()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 3, 0);
|
||||
|
||||
Assert.Equal(3, items[0].Id);
|
||||
Assert.Equal(1, items[1].Id);
|
||||
Assert.Equal(2, items[2].Id);
|
||||
Assert.Equal(4, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToEnd_CorrectOrder()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 1, 3);
|
||||
|
||||
Assert.Equal(2, items[0].Id);
|
||||
Assert.Equal(3, items[1].Id);
|
||||
Assert.Equal(4, items[2].Id);
|
||||
Assert.Equal(1, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToMiddle_CorrectOrder()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 4, 2);
|
||||
|
||||
Assert.Equal(1, items[0].Id);
|
||||
Assert.Equal(2, items[1].Id);
|
||||
Assert.Equal(4, items[2].Id);
|
||||
Assert.Equal(3, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_MoveItemToOutOfBoundsPosition_MovesToEnd()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
OrderableHelper.ReorderItems(items, 2, 10);
|
||||
|
||||
Assert.Equal(1, items[0].Id);
|
||||
Assert.Equal(3, items[1].Id);
|
||||
Assert.Equal(4, items[2].Id);
|
||||
Assert.Equal(2, items[3].Id);
|
||||
|
||||
for (var i = 0; i < items.Count; i++)
|
||||
{
|
||||
Assert.Equal(i, items[i].Order);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReorderItems_NegativePosition_ThrowsArgumentException()
|
||||
{
|
||||
var items = CreateTestReadingListItems();
|
||||
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
OrderableHelper.ReorderItems(items, 2, -1)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks.Scanner;
|
||||
|
|
|
@ -1,415 +1,133 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.DTOs;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class PersonHelperTests
|
||||
public class PersonHelperTests : AbstractDbTest
|
||||
{
|
||||
#region UpdatePeople
|
||||
[Fact]
|
||||
public void UpdatePeople_ShouldAddNewPeople()
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
var allPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleAdded = new List<Person>();
|
||||
|
||||
PersonHelper.UpdatePeople(allPeople, new[] {"Joseph Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
||||
{
|
||||
peopleAdded.Add(person);
|
||||
});
|
||||
|
||||
Assert.Equal(2, peopleAdded.Count);
|
||||
Assert.Equal(4, allPeople.Count);
|
||||
_context.Series.RemoveRange(_context.Series.ToList());
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeople_ShouldNotAddDuplicatePeople()
|
||||
{
|
||||
var allPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Sally Ann", PersonRole.CoverArtist).Build(),
|
||||
|
||||
};
|
||||
var peopleAdded = new List<Person>();
|
||||
|
||||
PersonHelper.UpdatePeople(allPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.CoverArtist, person =>
|
||||
{
|
||||
peopleAdded.Add(person);
|
||||
});
|
||||
|
||||
Assert.Equal(3, allPeople.Count);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region UpdatePeopleList
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_NullTags_NoChanges()
|
||||
{
|
||||
// Arrange
|
||||
ICollection<PersonDto> tags = null;
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var allTags = new List<Person>();
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(PersonRole.Writer, tags, series, allTags, p => handleAddCalled = true, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.False(onModifiedCalled);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_AddNewTag_TagAddedAndOnModifiedCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>
|
||||
{
|
||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
||||
};
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var allTags = new List<Person>();
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.True(handleAddCalled);
|
||||
Assert.True(onModifiedCalled);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_RemoveExistingTag_TagRemovedAndOnModifiedCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>();
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var person = new PersonBuilder("John Doe", role).Build();
|
||||
person.Id = 1;
|
||||
series.Metadata.People.Add(person);
|
||||
var allTags = new List<Person>
|
||||
{
|
||||
person
|
||||
};
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.True(onModifiedCalled);
|
||||
Assert.Empty(series.Metadata.People);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_UpdateExistingTag_OnModifiedCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>
|
||||
{
|
||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
||||
};
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var person = new PersonBuilder("John Doe", role).Build();
|
||||
person.Id = 1;
|
||||
series.Metadata.People.Add(person);
|
||||
var allTags = new List<Person>
|
||||
{
|
||||
person
|
||||
};
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.False(onModifiedCalled);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdatePeopleList_NoChanges_HandleAddAndOnModifiedNotCalled()
|
||||
{
|
||||
// Arrange
|
||||
const PersonRole role = PersonRole.Writer;
|
||||
var tags = new List<PersonDto>
|
||||
{
|
||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
||||
};
|
||||
var series = new SeriesBuilder("Test Series").Build();
|
||||
var person = new PersonBuilder("John Doe", role).Build();
|
||||
person.Id = 1;
|
||||
series.Metadata.People.Add(person);
|
||||
var allTags = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Doe", role).Build()
|
||||
};
|
||||
var handleAddCalled = false;
|
||||
var onModifiedCalled = false;
|
||||
|
||||
// Act
|
||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
||||
{
|
||||
handleAddCalled = true;
|
||||
series.Metadata.People.Add(p);
|
||||
}, () => onModifiedCalled = true);
|
||||
|
||||
// Assert
|
||||
Assert.False(handleAddCalled);
|
||||
Assert.False(onModifiedCalled);
|
||||
Assert.Single(series.Metadata.People);
|
||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region RemovePeople
|
||||
[Fact]
|
||||
public void RemovePeople_ShouldRemovePeopleOfSameRole()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
||||
Assert.Single(peopleRemoved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemovePeople_ShouldRemovePeopleFromBothRoles()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
||||
Assert.Single(peopleRemoved);
|
||||
|
||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo"}, PersonRole.CoverArtist, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.Empty(existingPeople);
|
||||
Assert.Equal(2, peopleRemoved.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemovePeople_ShouldRemovePeopleOfSameRole_WhenNothingPassed()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
};
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.RemovePeople(existingPeople, new List<string>(), PersonRole.Writer, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
||||
Assert.Equal(2, peopleRemoved.Count);
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region KeepOnlySamePeopleBetweenLists
|
||||
[Fact]
|
||||
public void KeepOnlySamePeopleBetweenLists()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Sally", PersonRole.Writer).Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
};
|
||||
|
||||
var peopleRemoved = new List<Person>();
|
||||
PersonHelper.KeepOnlySamePeopleBetweenLists(existingPeople,
|
||||
peopleFromChapters, person =>
|
||||
{
|
||||
peopleRemoved.Add(person);
|
||||
});
|
||||
|
||||
Assert.Equal(2, peopleRemoved.Count);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region AddPeople
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>();
|
||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Single(metadataPeople);
|
||||
Assert.Contains(person, metadataPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonAlreadyExists()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Smith", PersonRole.Character)
|
||||
.WithId(1)
|
||||
.Build()
|
||||
};
|
||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Single(metadataPeople);
|
||||
Assert.NotNull(metadataPeople.SingleOrDefault(p =>
|
||||
p.Name.Equals(person.Name) && p.Role == person.Role && p.NormalizedName == person.NormalizedName));
|
||||
Assert.Equal(1, metadataPeople.First().Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonNameIsNullOrEmpty()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>();
|
||||
var person2 = new PersonBuilder(string.Empty, PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person2);
|
||||
|
||||
// Assert
|
||||
Assert.Empty(metadataPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsDifferentButRoleIsSame()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Smith", PersonRole.Character).Build()
|
||||
};
|
||||
var person = new PersonBuilder("John Doe", PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, metadataPeople.Count);
|
||||
Assert.Contains(person, metadataPeople);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsSameButRoleIsDifferent()
|
||||
{
|
||||
// Arrange
|
||||
var metadataPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("John Doe", PersonRole.Writer).Build()
|
||||
};
|
||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
||||
|
||||
// Act
|
||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, metadataPeople.Count);
|
||||
Assert.Contains(person, metadataPeople);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
[Fact]
|
||||
public void AddPeople_ShouldAddOnlyNonExistingPeople()
|
||||
{
|
||||
var existingPeople = new List<Person>
|
||||
{
|
||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
||||
new PersonBuilder("Sally", PersonRole.Writer).Build(),
|
||||
};
|
||||
|
||||
|
||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build());
|
||||
Assert.Equal(3, existingPeople.Count);
|
||||
|
||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.Writer).Build());
|
||||
Assert.Equal(3, existingPeople.Count);
|
||||
|
||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo Two", PersonRole.CoverArtist).Build());
|
||||
Assert.Equal(4, existingPeople.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
//
|
||||
// // 1. Test adding new people and keeping existing ones
|
||||
// [Fact]
|
||||
// public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
|
||||
// {
|
||||
// var existingPerson = new PersonBuilder("Joe Shmo").Build();
|
||||
// var chapter = new ChapterBuilder("1").Build();
|
||||
//
|
||||
// // Create an existing person and assign them to the series with a role
|
||||
// var series = new SeriesBuilder("Test 1")
|
||||
// .WithFormat(MangaFormat.Archive)
|
||||
// .WithMetadata(new SeriesMetadataBuilder()
|
||||
// .WithPerson(existingPerson, PersonRole.Editor)
|
||||
// .Build())
|
||||
// .WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
|
||||
// .Build();
|
||||
//
|
||||
// _unitOfWork.SeriesRepository.Add(series);
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// // Call UpdateChapterPeopleAsync with one existing and one new person
|
||||
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo", "New Person" }, PersonRole.Editor, _unitOfWork);
|
||||
//
|
||||
// // Assert existing person retained and new person added
|
||||
// var people = await _unitOfWork.PersonRepository.GetAllPeople();
|
||||
// Assert.Contains(people, p => p.Name == "Joe Shmo");
|
||||
// Assert.Contains(people, p => p.Name == "New Person");
|
||||
//
|
||||
// var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||
// Assert.Contains("Joe Shmo", chapterPeople);
|
||||
// Assert.Contains("New Person", chapterPeople);
|
||||
// }
|
||||
//
|
||||
// // 2. Test removing a person no longer in the list
|
||||
// [Fact]
|
||||
// public async Task UpdateChapterPeopleAsync_RemovePeople()
|
||||
// {
|
||||
// var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
|
||||
// var existingPerson2 = new PersonBuilder("Jane Doe").Build();
|
||||
// var chapter = new ChapterBuilder("1").Build();
|
||||
//
|
||||
// var series = new SeriesBuilder("Test 1")
|
||||
// .WithVolume(new VolumeBuilder("1")
|
||||
// .WithChapter(new ChapterBuilder("1")
|
||||
// .WithPerson(existingPerson1, PersonRole.Editor)
|
||||
// .WithPerson(existingPerson2, PersonRole.Editor)
|
||||
// .Build())
|
||||
// .Build())
|
||||
// .Build();
|
||||
//
|
||||
// _unitOfWork.SeriesRepository.Add(series);
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// // Call UpdateChapterPeopleAsync with only one person
|
||||
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
|
||||
//
|
||||
// var people = await _unitOfWork.PersonRepository.GetAllPeople();
|
||||
// Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
|
||||
//
|
||||
// var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||
// Assert.Contains("Joe Shmo", chapterPeople);
|
||||
// Assert.DoesNotContain("Jane Doe", chapterPeople);
|
||||
// }
|
||||
//
|
||||
// // 3. Test no changes when the list of people is the same
|
||||
// [Fact]
|
||||
// public async Task UpdateChapterPeopleAsync_NoChanges()
|
||||
// {
|
||||
// var existingPerson = new PersonBuilder("Joe Shmo").Build();
|
||||
// var chapter = new ChapterBuilder("1").Build();
|
||||
//
|
||||
// var series = new SeriesBuilder("Test 1")
|
||||
// .WithVolume(new VolumeBuilder("1")
|
||||
// .WithChapter(new ChapterBuilder("1")
|
||||
// .WithPerson(existingPerson, PersonRole.Editor)
|
||||
// .Build())
|
||||
// .Build())
|
||||
// .Build();
|
||||
//
|
||||
// _unitOfWork.SeriesRepository.Add(series);
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// // Call UpdateChapterPeopleAsync with the same list
|
||||
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
|
||||
//
|
||||
// var people = await _unitOfWork.PersonRepository.GetAllPeople();
|
||||
// Assert.Contains(people, p => p.Name == "Joe Shmo");
|
||||
//
|
||||
// var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||
// Assert.Contains("Joe Shmo", chapterPeople);
|
||||
// Assert.Single(chapter.People); // No duplicate entries
|
||||
// }
|
||||
//
|
||||
// // 4. Test multiple roles for a person
|
||||
// [Fact]
|
||||
// public async Task UpdateChapterPeopleAsync_MultipleRoles()
|
||||
// {
|
||||
// var person = new PersonBuilder("Joe Shmo").Build();
|
||||
// var chapter = new ChapterBuilder("1").Build();
|
||||
//
|
||||
// var series = new SeriesBuilder("Test 1")
|
||||
// .WithVolume(new VolumeBuilder("1")
|
||||
// .WithChapter(new ChapterBuilder("1")
|
||||
// .WithPerson(person, PersonRole.Writer) // Assign person as Writer
|
||||
// .Build())
|
||||
// .Build())
|
||||
// .Build();
|
||||
//
|
||||
// _unitOfWork.SeriesRepository.Add(series);
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// // Add same person as Editor
|
||||
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
|
||||
//
|
||||
// // Ensure that the same person is assigned with two roles
|
||||
// var chapterPeople = chapter.People.Where(cp => cp.Person.Name == "Joe Shmo").ToList();
|
||||
// Assert.Equal(2, chapterPeople.Count); // One for each role
|
||||
// Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Writer);
|
||||
// Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Editor);
|
||||
// }
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using API.Helpers;
|
||||
using Xunit;
|
||||
|
||||
|
@ -33,7 +34,7 @@ public class RateLimiterTests
|
|||
}
|
||||
|
||||
[Fact]
|
||||
public void AcquireTokens_Refill()
|
||||
public async Task AcquireTokens_Refill()
|
||||
{
|
||||
// Arrange
|
||||
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
|
||||
|
@ -43,14 +44,14 @@ public class RateLimiterTests
|
|||
limiter.TryAcquire("test_key");
|
||||
|
||||
// Wait for refill
|
||||
System.Threading.Thread.Sleep(1100);
|
||||
await Task.Delay(1100);
|
||||
|
||||
// Assert
|
||||
Assert.True(limiter.TryAcquire("test_key"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AcquireTokens_Refill_WithOff()
|
||||
public async Task AcquireTokens_Refill_WithOff()
|
||||
{
|
||||
// Arrange
|
||||
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
|
||||
|
@ -60,7 +61,7 @@ public class RateLimiterTests
|
|||
limiter.TryAcquire("test_key");
|
||||
|
||||
// Wait for refill
|
||||
System.Threading.Thread.Sleep(2100);
|
||||
await Task.Delay(2100);
|
||||
|
||||
// Assert
|
||||
Assert.False(limiter.TryAcquire("test_key"));
|
||||
|
|
258
API.Tests/Helpers/ReviewHelperTests.cs
Normal file
|
@ -0,0 +1,258 @@
|
|||
using API.Helpers;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Xunit;
|
||||
using API.DTOs.SeriesDetail;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class ReviewHelperTests
|
||||
{
|
||||
#region SelectSpectrumOfReviews Tests
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WhenLessThan10Reviews_ReturnsAllReviews()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(8);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(8, result.Count);
|
||||
Assert.Equal(reviews, result.OrderByDescending(r => r.Score));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WhenMoreThan10Reviews_Returns10Reviews()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(20);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(10, result.Count);
|
||||
Assert.Equal(reviews[0], result.First());
|
||||
Assert.Equal(reviews[19], result.Last());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WithExactly10Reviews_ReturnsAllReviews()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(10);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(10, result.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WithLargeNumberOfReviews_ReturnsCorrectSpectrum()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = CreateReviewList(100);
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(10, result.Count);
|
||||
Assert.Contains(reviews[0], result);
|
||||
Assert.Contains(reviews[1], result);
|
||||
Assert.Contains(reviews[98], result);
|
||||
Assert.Contains(reviews[99], result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_WithEmptyList_ReturnsEmptyList()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = new List<UserReviewDto>();
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SelectSpectrumOfReviews_ResultsOrderedByScoreDescending()
|
||||
{
|
||||
// Arrange
|
||||
var reviews = new List<UserReviewDto>
|
||||
{
|
||||
new UserReviewDto { Tagline = "1", Score = 3 },
|
||||
new UserReviewDto { Tagline = "2", Score = 5 },
|
||||
new UserReviewDto { Tagline = "3", Score = 1 },
|
||||
new UserReviewDto { Tagline = "4", Score = 4 },
|
||||
new UserReviewDto { Tagline = "5", Score = 2 }
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(5, result.Count);
|
||||
Assert.Equal(5, result[0].Score);
|
||||
Assert.Equal(4, result[1].Score);
|
||||
Assert.Equal(3, result[2].Score);
|
||||
Assert.Equal(2, result[3].Score);
|
||||
Assert.Equal(1, result[4].Score);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetCharacters Tests
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithNullBody_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
string body = null;
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithEmptyBody_ReturnsEmptyString()
|
||||
{
|
||||
// Arrange
|
||||
var body = string.Empty;
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithNoTextNodes_ReturnsEmptyString()
|
||||
{
|
||||
// Arrange
|
||||
const string body = "<div></div>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithLessCharactersThanLimit_ReturnsFullText()
|
||||
{
|
||||
// Arrange
|
||||
var body = "<p>This is a short review.</p>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("This is a short review.…", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_WithMoreCharactersThanLimit_TruncatesText()
|
||||
{
|
||||
// Arrange
|
||||
var body = "<p>" + new string('a', 200) + "</p>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(new string('a', 175) + "…", result);
|
||||
Assert.Equal(176, result.Length); // 175 characters + ellipsis
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_IgnoresScriptTags()
|
||||
{
|
||||
// Arrange
|
||||
const string body = "<p>Visible text</p><script>console.log('hidden');</script>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("Visible text…", result);
|
||||
Assert.DoesNotContain("hidden", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_RemovesMarkdownSymbols()
|
||||
{
|
||||
// Arrange
|
||||
const string body = "<p>This is **bold** and _italic_ text with [link](url).</p>";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("This is bold and italic text with link.…", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCharacters_HandlesComplexMarkdownAndHtml()
|
||||
{
|
||||
// Arrange
|
||||
const string body = """
|
||||
|
||||
<div>
|
||||
<h1># Header</h1>
|
||||
<p>This is ~~strikethrough~~ and __underlined__ text</p>
|
||||
<p>~~~code block~~~</p>
|
||||
<p>+++highlighted+++</p>
|
||||
<p>img123(image.jpg)</p>
|
||||
</div>
|
||||
""";
|
||||
|
||||
// Act
|
||||
var result = ReviewHelper.GetCharacters(body);
|
||||
|
||||
// Assert
|
||||
Assert.DoesNotContain("~~", result);
|
||||
Assert.DoesNotContain("__", result);
|
||||
Assert.DoesNotContain("~~~", result);
|
||||
Assert.DoesNotContain("+++", result);
|
||||
Assert.DoesNotContain("img123(", result);
|
||||
Assert.Contains("Header", result);
|
||||
Assert.Contains("strikethrough", result);
|
||||
Assert.Contains("underlined", result);
|
||||
Assert.Contains("code block", result);
|
||||
Assert.Contains("highlighted", result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static List<UserReviewDto> CreateReviewList(int count)
|
||||
{
|
||||
var reviews = new List<UserReviewDto>();
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
reviews.Add(new UserReviewDto
|
||||
{
|
||||
Tagline = $"{i + 1}",
|
||||
Score = count - i // This makes them ordered by score descending initially
|
||||
});
|
||||
}
|
||||
return reviews;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
208
API.Tests/Helpers/ScannerHelper.cs
Normal file
|
@ -0,0 +1,208 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading.Tasks;
|
||||
using System.Xml;
|
||||
using System.Xml.Serialization;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.Services.Tasks;
|
||||
using API.Services.Tasks.Metadata;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
#nullable enable
|
||||
|
||||
public class ScannerHelper
|
||||
{
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
|
||||
private readonly string _testcasesDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/TestCases");
|
||||
private readonly string _imagePath = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/1x1.png");
|
||||
private static readonly string[] ComicInfoExtensions = new[] { ".cbz", ".cbr", ".zip", ".rar" };
|
||||
|
||||
public ScannerHelper(IUnitOfWork unitOfWork, ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_unitOfWork = unitOfWork;
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
public async Task<Library> GenerateScannerData(string testcase, Dictionary<string, ComicInfo> comicInfos = null)
|
||||
{
|
||||
var testDirectoryPath = await GenerateTestDirectory(Path.Join(_testcasesDirectory, testcase), comicInfos);
|
||||
|
||||
var (publisher, type) = SplitPublisherAndLibraryType(Path.GetFileNameWithoutExtension(testcase));
|
||||
|
||||
var library = new LibraryBuilder(publisher, type)
|
||||
.WithFolders([new FolderPath() {Path = testDirectoryPath}])
|
||||
.Build();
|
||||
|
||||
var admin = new AppUserBuilder("admin", "admin@kavita.com", Seed.DefaultThemes[0])
|
||||
.WithLibrary(library)
|
||||
.Build();
|
||||
|
||||
_unitOfWork.UserRepository.Add(admin); // Admin is needed for generating collections/reading lists
|
||||
_unitOfWork.LibraryRepository.Add(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
return library;
|
||||
}
|
||||
|
||||
public ScannerService CreateServices(DirectoryService ds = null, IFileSystem fs = null)
|
||||
{
|
||||
fs ??= new FileSystem();
|
||||
ds ??= new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var archiveService = new ArchiveService(Substitute.For<ILogger<ArchiveService>>(), ds,
|
||||
Substitute.For<IImageService>(), Substitute.For<IMediaErrorService>());
|
||||
var readingItemService = new ReadingItemService(archiveService, Substitute.For<IBookService>(),
|
||||
Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>());
|
||||
|
||||
|
||||
var processSeries = new ProcessSeries(_unitOfWork, Substitute.For<ILogger<ProcessSeries>>(),
|
||||
Substitute.For<IEventHub>(),
|
||||
ds, Substitute.For<ICacheHelper>(), readingItemService, new FileService(fs),
|
||||
Substitute.For<IMetadataService>(),
|
||||
Substitute.For<IWordCountAnalyzerService>(),
|
||||
Substitute.For<IReadingListService>(),
|
||||
Substitute.For<IExternalMetadataService>());
|
||||
|
||||
var scanner = new ScannerService(_unitOfWork, Substitute.For<ILogger<ScannerService>>(),
|
||||
Substitute.For<IMetadataService>(),
|
||||
Substitute.For<ICacheService>(), Substitute.For<IEventHub>(), ds,
|
||||
readingItemService, processSeries, Substitute.For<IWordCountAnalyzerService>());
|
||||
return scanner;
|
||||
}
|
||||
|
||||
private static (string Publisher, LibraryType Type) SplitPublisherAndLibraryType(string input)
|
||||
{
|
||||
// Split the input string based on " - "
|
||||
var parts = input.Split(" - ", StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
if (parts.Length != 2)
|
||||
{
|
||||
throw new ArgumentException("Input must be in the format 'Publisher - LibraryType'");
|
||||
}
|
||||
|
||||
var publisher = parts[0].Trim();
|
||||
var libraryTypeString = parts[1].Trim();
|
||||
|
||||
// Try to parse the right-hand side as a LibraryType enum
|
||||
if (!Enum.TryParse<LibraryType>(libraryTypeString, out var libraryType))
|
||||
{
|
||||
throw new ArgumentException($"'{libraryTypeString}' is not a valid LibraryType");
|
||||
}
|
||||
|
||||
return (publisher, libraryType);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private async Task<string> GenerateTestDirectory(string mapPath, Dictionary<string, ComicInfo> comicInfos = null)
|
||||
{
|
||||
// Read the map file
|
||||
var mapContent = await File.ReadAllTextAsync(mapPath);
|
||||
|
||||
// Deserialize the JSON content into a list of strings using System.Text.Json
|
||||
var filePaths = JsonSerializer.Deserialize<List<string>>(mapContent);
|
||||
|
||||
// Create a test directory
|
||||
var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(mapPath));
|
||||
if (Directory.Exists(testDirectory))
|
||||
{
|
||||
Directory.Delete(testDirectory, true);
|
||||
}
|
||||
Directory.CreateDirectory(testDirectory);
|
||||
|
||||
// Generate the files and folders
|
||||
await Scaffold(testDirectory, filePaths, comicInfos);
|
||||
|
||||
_testOutputHelper.WriteLine($"Test Directory Path: {testDirectory}");
|
||||
|
||||
return Path.GetFullPath(testDirectory);
|
||||
}
|
||||
|
||||
|
||||
public async Task Scaffold(string testDirectory, List<string> filePaths, Dictionary<string, ComicInfo> comicInfos = null)
|
||||
{
|
||||
foreach (var relativePath in filePaths)
|
||||
{
|
||||
var fullPath = Path.Combine(testDirectory, relativePath);
|
||||
var fileDir = Path.GetDirectoryName(fullPath);
|
||||
|
||||
// Create the directory if it doesn't exist
|
||||
if (!Directory.Exists(fileDir))
|
||||
{
|
||||
Directory.CreateDirectory(fileDir);
|
||||
Console.WriteLine($"Created directory: {fileDir}");
|
||||
}
|
||||
|
||||
var ext = Path.GetExtension(fullPath).ToLower();
|
||||
if (ComicInfoExtensions.Contains(ext) && comicInfos != null && comicInfos.TryGetValue(Path.GetFileName(relativePath), out var info))
|
||||
{
|
||||
CreateMinimalCbz(fullPath, info);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Create an empty file
|
||||
await File.Create(fullPath).DisposeAsync();
|
||||
Console.WriteLine($"Created empty file: {fullPath}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void CreateMinimalCbz(string filePath, ComicInfo? comicInfo = null)
|
||||
{
|
||||
using (var archive = ZipFile.Open(filePath, ZipArchiveMode.Create))
|
||||
{
|
||||
// Add the 1x1 image to the archive
|
||||
archive.CreateEntryFromFile(_imagePath, "1x1.png");
|
||||
|
||||
if (comicInfo != null)
|
||||
{
|
||||
// Serialize ComicInfo object to XML
|
||||
var comicInfoXml = SerializeComicInfoToXml(comicInfo);
|
||||
|
||||
// Create an entry for ComicInfo.xml in the archive
|
||||
var entry = archive.CreateEntry("ComicInfo.xml");
|
||||
using var entryStream = entry.Open();
|
||||
using var writer = new StreamWriter(entryStream, Encoding.UTF8);
|
||||
|
||||
// Write the XML to the archive
|
||||
writer.Write(comicInfoXml);
|
||||
}
|
||||
|
||||
}
|
||||
Console.WriteLine($"Created minimal CBZ archive: {filePath} with{(comicInfo != null ? "" : "out")} metadata.");
|
||||
}
|
||||
|
||||
|
||||
private static string SerializeComicInfoToXml(ComicInfo comicInfo)
|
||||
{
|
||||
var xmlSerializer = new XmlSerializer(typeof(ComicInfo));
|
||||
using var stringWriter = new StringWriter();
|
||||
using (var xmlWriter = XmlWriter.Create(stringWriter, new XmlWriterSettings { Indent = true, Encoding = new UTF8Encoding(false), OmitXmlDeclaration = false}))
|
||||
{
|
||||
xmlSerializer.Serialize(xmlWriter, comicInfo);
|
||||
}
|
||||
|
||||
// For the love of god, I spent 2 hours trying to get utf-8 with no BOM
|
||||
return stringWriter.ToString().Replace("""<?xml version="1.0" encoding="utf-16"?>""",
|
||||
@"<?xml version='1.0' encoding='utf-8'?>");
|
||||
}
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
|
|
46
API.Tests/Helpers/StringHelperTests.cs
Normal file
|
@ -0,0 +1,46 @@
|
|||
using API.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class StringHelperTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> <br><br><br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>",
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>"
|
||||
)]
|
||||
[InlineData(
|
||||
"<p><a href=\"https://blog.goo.ne.jp/tamakiya_web\">Blog</a> | <a href=\"https://twitter.com/tamakinozomu\">Twitter</a> | <a href=\"https://www.pixiv.net/member.php?id=68961\">Pixiv</a> | <a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>",
|
||||
"<p><a href=\"https://blog.goo.ne.jp/tamakiya_web\">Blog</a> | <a href=\"https://twitter.com/tamakinozomu\">Twitter</a> | <a href=\"https://www.pixiv.net/member.php?id=68961\">Pixiv</a> | <a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>"
|
||||
)]
|
||||
public void TestSquashBreaklines(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, StringHelper.SquashBreaklines(input));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> (Source: Anime News Network)</p>",
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
|
||||
)]
|
||||
[InlineData(
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>(Source: Anime News Network)",
|
||||
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
|
||||
)]
|
||||
public void TestRemoveSourceInDescription(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
|
||||
}
|
||||
|
||||
|
||||
[Theory]
|
||||
[InlineData(
|
||||
"""<a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>""",
|
||||
"""<a href=\"https://pawoo.net/@tamakiya\">Pawoo</a></p>"""
|
||||
)]
|
||||
public void TestCorrectUrls(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, StringHelper.CorrectUrls(input));
|
||||
}
|
||||
}
|
|
@ -1,126 +0,0 @@
|
|||
using System.Collections.Generic;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Helpers;
|
||||
|
||||
public class TagHelperTests
|
||||
{
|
||||
[Fact]
|
||||
public void UpdateTag_ShouldAddNewTag()
|
||||
{
|
||||
var allTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
var tagAdded = new List<Tag>();
|
||||
|
||||
TagHelper.UpdateTag(allTags, new[] {"Action", "Adventure"}, (tag, added) =>
|
||||
{
|
||||
if (added)
|
||||
{
|
||||
tagAdded.Add(tag);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
Assert.Single(tagAdded);
|
||||
Assert.Equal(4, allTags.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdateTag_ShouldNotAddDuplicateTag()
|
||||
{
|
||||
var allTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
|
||||
};
|
||||
var tagAdded = new List<Tag>();
|
||||
|
||||
TagHelper.UpdateTag(allTags, new[] {"Action", "Scifi"}, (tag, added) =>
|
||||
{
|
||||
if (added)
|
||||
{
|
||||
tagAdded.Add(tag);
|
||||
}
|
||||
TagHelper.AddTagIfNotExists(allTags, tag);
|
||||
});
|
||||
|
||||
Assert.Equal(3, allTags.Count);
|
||||
Assert.Empty(tagAdded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddTag_ShouldAddOnlyNonExistingTag()
|
||||
{
|
||||
var existingTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
|
||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Action").Build());
|
||||
Assert.Equal(3, existingTags.Count);
|
||||
|
||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("action").Build());
|
||||
Assert.Equal(3, existingTags.Count);
|
||||
|
||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Shonen").Build());
|
||||
Assert.Equal(4, existingTags.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void KeepOnlySamePeopleBetweenLists()
|
||||
{
|
||||
var existingTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
};
|
||||
|
||||
var tagRemoved = new List<Tag>();
|
||||
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
|
||||
peopleFromChapters, tag =>
|
||||
{
|
||||
tagRemoved.Add(tag);
|
||||
});
|
||||
|
||||
Assert.Single(tagRemoved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RemoveEveryoneIfNothingInRemoveAllExcept()
|
||||
{
|
||||
var existingTags = new List<Tag>
|
||||
{
|
||||
new TagBuilder("Action").Build(),
|
||||
new TagBuilder("Sci-fi").Build(),
|
||||
};
|
||||
|
||||
var peopleFromChapters = new List<Tag>();
|
||||
|
||||
var tagRemoved = new List<Tag>();
|
||||
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
|
||||
peopleFromChapters, tag =>
|
||||
{
|
||||
tagRemoved.Add(tag);
|
||||
});
|
||||
|
||||
Assert.Equal(2, tagRemoved.Count);
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parser;
|
||||
|
||||
public class BookParserTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
|
||||
[InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
|
||||
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
|
||||
public void ParseSeriesTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
|
||||
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
|
||||
public void ParseVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
|
||||
}
|
||||
|
||||
// [Theory]
|
||||
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")]
|
||||
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")]
|
||||
// public void ReplaceFontSrcUrl(string input, string expected)
|
||||
// {
|
||||
// var apiBase = "TEST/";
|
||||
// var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
|
||||
// Assert.Equal(expected, actual);
|
||||
// }
|
||||
//
|
||||
// [Theory]
|
||||
// [InlineData("@import url('font.css');", "@import url('TEST/font.css');")]
|
||||
// public void ReplaceImportSrcUrl(string input, string expected)
|
||||
// {
|
||||
// var apiBase = "TEST/";
|
||||
// var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
|
||||
// Assert.Equal(expected, actual);
|
||||
// }
|
||||
|
||||
}
|
249
API.Tests/Parsers/BasicParserTests.cs
Normal file
|
@ -0,0 +1,249 @@
|
|||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class BasicParserTests : AbstractFsTest
|
||||
{
|
||||
private readonly BasicParser _parser;
|
||||
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private readonly string _rootDirectory;
|
||||
|
||||
public BasicParserTests()
|
||||
{
|
||||
var fileSystem = CreateFileSystem();
|
||||
_rootDirectory = Path.Join(DataDirectory, "Books/");
|
||||
fileSystem.AddDirectory(_rootDirectory);
|
||||
fileSystem.AddFile($"{_rootDirectory}Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
|
||||
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1 Chapter 2.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Chapter 3.cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("$\"{RootDirectory}Accel World/Accel World Gaiden SP01.cbz", new MockFileData(""));
|
||||
|
||||
|
||||
fileSystem.AddFile($"{_rootDirectory}Accel World/cover.png", new MockFileData(""));
|
||||
|
||||
fileSystem.AddFile($"{_rootDirectory}Batman/Batman #1.cbz", new MockFileData(""));
|
||||
|
||||
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||
_parser = new BasicParser(ds, new ImageParser(ds));
|
||||
}
|
||||
|
||||
#region Parse_Manga
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Accel World/cover.png", $"{_rootDirectory}Accel World/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_OtherImage_ShouldReturnNull()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Accel World/page 01.png", $"{_rootDirectory}Accel World/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a volume and chapter in filename, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_VolumeAndChapterInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz", $"{_rootDirectory}Mujaki no Rakuen/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Mujaki no Rakuen", actual.Series);
|
||||
Assert.Equal("12", actual.Volumes);
|
||||
Assert.Equal("76", actual.Chapters);
|
||||
Assert.False(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a volume in filename, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_JustVolumeInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz",
|
||||
$"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", actual.Series);
|
||||
Assert.Equal("1", actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.False(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a chapter only in filename, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_JustChapterInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Beelzebub/Beelzebub_01_[Noodles].zip",
|
||||
$"{_rootDirectory}Beelzebub/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Beelzebub", actual.Series);
|
||||
Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
|
||||
Assert.Equal("1", actual.Chapters);
|
||||
Assert.False(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is a SP Marker in filename, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialMarkerInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr",
|
||||
$"{_rootDirectory}Summer Time Rendering/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Summer Time Rendering", actual.Series);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when the filename parses as a special, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Volume SP01.cbr",
|
||||
$"{_rootDirectory}Summer Time Rendering/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Summer Time Rendering", actual.Series);
|
||||
Assert.Equal("Volume", actual.Title);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when the filename parses as a special, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialInFilename2()
|
||||
{
|
||||
var actual = _parser.Parse("M:/Kimi wa Midara na Boku no Joou/Specials/[Renzokusei] Special 1 SP02.zip",
|
||||
"M:/Kimi wa Midara na Boku no Joou/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Kimi wa Midara na Boku no Joou", actual.Series);
|
||||
Assert.Equal("[Renzokusei] Special 1", actual.Title);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when the filename parses as a special, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_SpecialInFilename_StrangeNaming()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}My Dress-Up Darling/SP01 1. Special Name.cbz",
|
||||
_rootDirectory,
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("My Dress-Up Darling", actual.Series);
|
||||
Assert.Equal("1. Special Name", actual.Title);
|
||||
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when there is an edition in filename, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_EditionInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz",
|
||||
$"{_rootDirectory}Air Gear/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Air Gear", actual.Series);
|
||||
Assert.Equal("1", actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.False(actual.IsSpecial);
|
||||
Assert.Equal("Omnibus", actual.Edition);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Parse_Books
|
||||
/// <summary>
|
||||
/// Tests that when there is a volume in filename, it appropriately parses
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_MangaBooks_JustVolumeInFilename()
|
||||
{
|
||||
var actual = _parser.Parse($"{_rootDirectory}Epubs/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub",
|
||||
$"{_rootDirectory}Epubs/",
|
||||
_rootDirectory, LibraryType.Manga);
|
||||
Assert.NotNull(actual);
|
||||
|
||||
Assert.Equal("Harrison, Kim - The Good, The Bad, and the Undead - Hollows", actual.Series);
|
||||
Assert.Equal("2.5", actual.Volumes);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IsApplicable
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on images and Image library type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||
{
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.ComicVine));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on images and Image library type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||
{
|
||||
Assert.True(_parser.IsApplicable("something.png", LibraryType.Manga));
|
||||
Assert.True(_parser.IsApplicable("something.png", LibraryType.Comic));
|
||||
Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
|
||||
Assert.True(_parser.IsApplicable("something.epub", LibraryType.LightNovel));
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
}
|
73
API.Tests/Parsers/BookParserTests.cs
Normal file
|
@ -0,0 +1,73 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class BookParserTests
|
||||
{
|
||||
private readonly BookParser _parser;
|
||||
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private const string RootDirectory = "C:/Books/";
|
||||
|
||||
public BookParserTests()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory("C:/Books/");
|
||||
fileSystem.AddFile("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/Adam Freeman - Pro ASP.NET Core 6.epub", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Books/My Fav Book SP01.epub", new MockFileData(""));
|
||||
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||
_parser = new BookParser(ds, Substitute.For<IBookService>(), new BasicParser(ds, new ImageParser(ds)));
|
||||
}
|
||||
|
||||
#region Parse
|
||||
|
||||
// TODO: I'm not sure how to actually test this as it relies on an epub parser to actually do anything
|
||||
|
||||
/// <summary>
|
||||
/// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
|
||||
/// </summary>
|
||||
// [Fact]
|
||||
// public void Parse_SeriesWithDirectoryName()
|
||||
// {
|
||||
// var actual = _parser.Parse("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", "C:/Books/Birds of Prey/",
|
||||
// RootDirectory, LibraryType.Book, new ComicInfo()
|
||||
// {
|
||||
// Series = "Harry Potter",
|
||||
// Volume = "1"
|
||||
// });
|
||||
//
|
||||
// Assert.NotNull(actual);
|
||||
// Assert.Equal("Harry Potter", actual.Series);
|
||||
// Assert.Equal("1", actual.Volumes);
|
||||
// }
|
||||
|
||||
#endregion
|
||||
|
||||
#region IsApplicable
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on images and Image library type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||
{
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Book));
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on images and Image library type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||
{
|
||||
Assert.True(_parser.IsApplicable("something.epub", LibraryType.Image));
|
||||
}
|
||||
#endregion
|
||||
}
|
115
API.Tests/Parsers/ComicVineParserTests.cs
Normal file
|
@ -0,0 +1,115 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class ComicVineParserTests
|
||||
{
|
||||
private readonly ComicVineParser _parser;
|
||||
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private const string RootDirectory = "C:/Comics/";
|
||||
|
||||
public ComicVineParserTests()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory("C:/Comics/");
|
||||
fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
|
||||
fileSystem.AddFile("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", new MockFileData(""));
|
||||
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||
_parser = new ComicVineParser(ds);
|
||||
}
|
||||
|
||||
#region Parse
|
||||
|
||||
/// <summary>
|
||||
/// Tests that when Series and Volume are filled out, Kavita uses that for the Series Name
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_SeriesWithComicInfo()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
|
||||
RootDirectory, LibraryType.ComicVine, new ComicInfo()
|
||||
{
|
||||
Series = "Birds of Prey",
|
||||
Volume = "2002"
|
||||
});
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey (2002)", actual.Series);
|
||||
Assert.Equal("2002", actual.Volumes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that no ComicInfo, take the Directory Name if it matches "Series (2002)" or "Series (2)"
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_SeriesWithDirectoryNameAsSeriesYear()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
|
||||
RootDirectory, LibraryType.ComicVine, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey (2002)", actual.Series);
|
||||
Assert.Equal("2002", actual.Volumes);
|
||||
Assert.Equal("1", actual.Chapters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that no ComicInfo, take a directory name up to root if it matches "Series (2002)" or "Series (2)"
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_SeriesWithADirectoryNameAsSeriesYear()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", "C:/Comics/DC Comics/",
|
||||
RootDirectory, LibraryType.ComicVine, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey (1999)", actual.Series);
|
||||
Assert.Equal("1999", actual.Volumes);
|
||||
Assert.Equal("1", actual.Chapters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that no ComicInfo and nothing matches Series (Volume), then just take the directory name as the Series
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_FallbackToDirectoryNameOnly()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", "C:/Comics/DC Comics/",
|
||||
RootDirectory, LibraryType.ComicVine, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Blood Syndicate", actual.Series);
|
||||
Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
|
||||
Assert.Equal("1", actual.Chapters);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region IsApplicable
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on ComicVine type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||
{
|
||||
Assert.False(_parser.IsApplicable("", LibraryType.Comic));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on ComicVine type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||
{
|
||||
Assert.True(_parser.IsApplicable("", LibraryType.ComicVine));
|
||||
}
|
||||
#endregion
|
||||
}
|
|
@ -1,7 +1,5 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
|
@ -10,7 +8,7 @@ using NSubstitute;
|
|||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Parser;
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class DefaultParserTests
|
||||
{
|
||||
|
@ -21,10 +19,12 @@ public class DefaultParserTests
|
|||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||
_defaultParser = new DefaultParser(directoryService);
|
||||
_defaultParser = new BasicParser(directoryService, new ImageParser(directoryService));
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#region ParseFromFallbackFolders
|
||||
[Theory]
|
||||
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
|
||||
|
@ -33,7 +33,7 @@ public class DefaultParserTests
|
|||
[InlineData("C:/", "C:/Something Random/Mujaki no Rakuen SP01.cbz", "Something Random")]
|
||||
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
|
||||
{
|
||||
var actual = _defaultParser.Parse(inputPath, rootDir);
|
||||
var actual = _defaultParser.Parse(inputPath, rootDir, rootDir, LibraryType.Manga, null);
|
||||
if (actual == null)
|
||||
{
|
||||
Assert.NotNull(actual);
|
||||
|
@ -44,19 +44,18 @@ public class DefaultParserTests
|
|||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
|
||||
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
|
||||
[InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster~0~1")]
|
||||
[InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", "Hajime no Ippo~0~0")]
|
||||
public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string expectedParseInfo)
|
||||
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", new [] {"Btooom!", "1", "1"})]
|
||||
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", new [] {"Btooom!", "1", "2"})]
|
||||
[InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", new [] {"Monster", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, "1"})]
|
||||
[InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", new [] {"Hajime no Ippo", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter})]
|
||||
public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string[] expectedParseInfo)
|
||||
{
|
||||
const string rootDirectory = "/manga/";
|
||||
var tokens = expectedParseInfo.Split("~");
|
||||
var actual = new ParserInfo {Series = "", Chapters = "0", Volumes = "0"};
|
||||
var actual = new ParserInfo {Series = "", Chapters = Parser.DefaultChapter, Volumes = Parser.LooseLeafVolume};
|
||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||
Assert.Equal(tokens[0], actual.Series);
|
||||
Assert.Equal(tokens[1], actual.Volumes);
|
||||
Assert.Equal(tokens[2], actual.Chapters);
|
||||
Assert.Equal(expectedParseInfo[0], actual.Series);
|
||||
Assert.Equal(expectedParseInfo[1], actual.Volumes);
|
||||
Assert.Equal(expectedParseInfo[2], actual.Chapters);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
|
@ -74,8 +73,8 @@ public class DefaultParserTests
|
|||
fs.AddDirectory(rootDirectory);
|
||||
fs.AddFile(inputFile, new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var parser = new DefaultParser(ds);
|
||||
var actual = parser.Parse(inputFile, rootDirectory);
|
||||
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, null);
|
||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||
Assert.Equal(expectedParseInfo, actual.Series);
|
||||
}
|
||||
|
@ -90,8 +89,8 @@ public class DefaultParserTests
|
|||
fs.AddDirectory(rootDirectory);
|
||||
fs.AddFile(inputFile, new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var parser = new DefaultParser(ds);
|
||||
var actual = parser.Parse(inputFile, rootDirectory);
|
||||
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, null);
|
||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||
Assert.Equal(expectedParseInfo, actual.Series);
|
||||
}
|
||||
|
@ -101,13 +100,6 @@ public class DefaultParserTests
|
|||
|
||||
#region Parse
|
||||
|
||||
[Fact]
|
||||
public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
|
||||
{
|
||||
const string rootPath = @"E:/Manga/";
|
||||
var actual = _defaultParser.Parse(@"E:/Manga/Accel World/cover.png", rootPath);
|
||||
Assert.Null(actual);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_ParseInfo_Manga()
|
||||
|
@ -127,19 +119,20 @@ public class DefaultParserTests
|
|||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
|
||||
Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
|
||||
filepath = @"E:/Manga/Beelzebub/Beelzebub_01_[Noodles].zip";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Beelzebub", Volumes = "0",
|
||||
Series = "Beelzebub", Volumes = Parser.LooseLeafVolume,
|
||||
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||
// Note: Lots of duplicates here. I think I can move them to the ParserTests itself
|
||||
filepath = @"E:/Manga/Ichinensei ni Nacchattara/Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Ichinensei ni Nacchattara", Volumes = "1",
|
||||
|
@ -147,71 +140,71 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
||||
filepath = @"E:/Manga/Tenjo Tenge (Color)/Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
|
||||
Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
|
||||
filepath = @"E:/Manga/Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)/Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
|
||||
Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
|
||||
filepath = @"E:/Manga/Dorohedoro/Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Dorohedoro", Volumes = "1", Edition = "",
|
||||
Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
|
||||
filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "APOSIMZ", Volumes = "0", Edition = "",
|
||||
Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
||||
filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
|
||||
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
|
||||
filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
|
||||
Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
|
||||
filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Summer Time Rendering", Volumes = "0", Edition = "",
|
||||
Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
|
||||
Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "",
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath, IsSpecial = true
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
|
||||
filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
|
||||
Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
|
||||
filepath = @"E:/Manga/Kono Subarashii Sekai ni Bakuen wo!/Vol. 00 Ch. 000.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
|
||||
|
@ -219,7 +212,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
|
||||
filepath = @"E:/Manga/Toukyou Akazukin/Vol. 01 Ch. 001.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
|
||||
|
@ -228,37 +221,37 @@ public class DefaultParserTests
|
|||
});
|
||||
|
||||
// If an image is cover exclusively, ignore it
|
||||
filepath = @"E:\Manga\Seraph of the End\cover.png";
|
||||
filepath = @"E:/Manga/Seraph of the End/cover.png";
|
||||
expected.Add(filepath, null);
|
||||
|
||||
filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
|
||||
filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "The Beginning After the End", Volumes = "0", Edition = "",
|
||||
Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Air Gear\Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
|
||||
filepath = @"E:/Manga/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
|
||||
Chapters = "0", Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||
filepath = @"E:/Manga/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
|
||||
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
foreach (var file in expected.Keys)
|
||||
{
|
||||
var expectedInfo = expected[file];
|
||||
var actual = _defaultParser.Parse(file, rootPath);
|
||||
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Manga, null);
|
||||
if (expectedInfo == null)
|
||||
{
|
||||
Assert.Null(actual);
|
||||
|
@ -283,20 +276,20 @@ public class DefaultParserTests
|
|||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
//[Fact]
|
||||
public void Parse_ParseInfo_Manga_ImageOnly()
|
||||
{
|
||||
// Images don't have root path as E:\Manga, but rather as the path of the folder
|
||||
// Images don't have root path as E:/Manga, but rather as the path of the folder
|
||||
|
||||
// Note: Fallback to folder will parse Monster #8 and get Monster
|
||||
var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
|
||||
var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg";
|
||||
var expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Monster #8", Volumes = "0", Edition = "",
|
||||
Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
var actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Monster #8");
|
||||
var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -314,7 +307,7 @@ public class DefaultParserTests
|
|||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||
filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Vol19/ch. 186/Vol. 19 p106.gif";
|
||||
expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
|
@ -322,7 +315,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\");
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -340,7 +333,7 @@ public class DefaultParserTests
|
|||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||
filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Blank Folder/Vol19/ch. 186/Vol. 19 p106.gif";
|
||||
expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
|
@ -348,7 +341,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\");
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
|
@ -379,7 +372,7 @@ public class DefaultParserTests
|
|||
filesystem.AddFile(@"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz", new MockFileData(""));
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var parser = new DefaultParser(ds);
|
||||
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||
|
||||
var filepath = @"E:/Manga/Foo 50/Foo 50 v1.cbz";
|
||||
// There is a bad parse for series like "Foo 50", so we have parsed chapter as 50
|
||||
|
@ -390,7 +383,7 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath
|
||||
};
|
||||
|
||||
var actual = parser.Parse(filepath, rootPath);
|
||||
var actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
|
@ -414,12 +407,12 @@ public class DefaultParserTests
|
|||
filepath = @"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz";
|
||||
expected = new ParserInfo
|
||||
{
|
||||
Series = "Foo 50", Volumes = "0", IsSpecial = true,
|
||||
Chapters = "50", Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
|
||||
Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
|
||||
Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
};
|
||||
|
||||
actual = parser.Parse(filepath, rootPath);
|
||||
actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, null);
|
||||
Assert.NotNull(actual);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expected.Format, actual.Format);
|
||||
|
@ -444,26 +437,26 @@ public class DefaultParserTests
|
|||
[Fact]
|
||||
public void Parse_ParseInfo_Comic()
|
||||
{
|
||||
const string rootPath = @"E:/Comics/";
|
||||
const string rootPath = "E:/Comics/";
|
||||
var expected = new Dictionary<string, ParserInfo>();
|
||||
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Teen Titans", Volumes = "0",
|
||||
Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
|
||||
Series = "Teen Titans", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume,
|
||||
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
// Fallback test with bad naming
|
||||
filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
|
||||
filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Babe", Volumes = "0", Edition = "",
|
||||
Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
|
||||
filepath = @"E:/Comics/Comics/Publisher/Batman the Detective (2021)/Batman the Detective - v6 - 11 - (2021).cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Batman the Detective", Volumes = "6", Edition = "",
|
||||
|
@ -471,10 +464,10 @@ public class DefaultParserTests
|
|||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
|
||||
filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "",
|
||||
Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
@ -482,7 +475,7 @@ public class DefaultParserTests
|
|||
foreach (var file in expected.Keys)
|
||||
{
|
||||
var expectedInfo = expected[file];
|
||||
var actual = _defaultParser.Parse(file, rootPath, LibraryType.Comic);
|
||||
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Comic, null);
|
||||
if (expectedInfo == null)
|
||||
{
|
||||
Assert.Null(actual);
|
97
API.Tests/Parsers/ImageParserTests.cs
Normal file
|
@ -0,0 +1,97 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class ImageParserTests
|
||||
{
|
||||
private readonly ImageParser _parser;
|
||||
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private const string RootDirectory = "C:/Comics/";
|
||||
|
||||
public ImageParserTests()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory("C:/Comics/");
|
||||
fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
|
||||
fileSystem.AddFile("C:/Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
|
||||
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||
_parser = new ImageParser(ds);
|
||||
}
|
||||
|
||||
#region Parse
|
||||
|
||||
/// <summary>
|
||||
/// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_SeriesWithDirectoryName()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01/01.jpg", "C:/Comics/Birds of Prey/",
|
||||
RootDirectory, LibraryType.Image, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey", actual.Series);
|
||||
Assert.Equal("1", actual.Chapters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_SeriesWithNoNestedChapter()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01 page 01.jpg", "C:/Comics/",
|
||||
RootDirectory, LibraryType.Image, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey", actual.Series);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder and everything else as a
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_SeriesWithLooseImages()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Comics/Birds of Prey/page 01.jpg", "C:/Comics/",
|
||||
RootDirectory, LibraryType.Image, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("Birds of Prey", actual.Series);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region IsApplicable
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on images and Image library type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||
{
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
|
||||
Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on images and Image library type
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||
{
|
||||
Assert.True(_parser.IsApplicable("something.png", LibraryType.Image));
|
||||
}
|
||||
#endregion
|
||||
}
|
71
API.Tests/Parsers/PdfParserTests.cs
Normal file
|
@ -0,0 +1,71 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parsers;
|
||||
|
||||
public class PdfParserTests
|
||||
{
|
||||
private readonly PdfParser _parser;
|
||||
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private const string RootDirectory = "C:/Books/";
|
||||
|
||||
public PdfParserTests()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory("C:/Books/");
|
||||
fileSystem.AddDirectory("C:/Books/Birds of Prey (2002)");
|
||||
fileSystem.AddFile("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf", new MockFileData(""));
|
||||
fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
|
||||
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||
_parser = new PdfParser(ds);
|
||||
}
|
||||
|
||||
#region Parse
|
||||
|
||||
/// <summary>
|
||||
/// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Parse_Book_SeriesWithDirectoryName()
|
||||
{
|
||||
var actual = _parser.Parse("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf",
|
||||
"C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/",
|
||||
RootDirectory, LibraryType.Book, null);
|
||||
|
||||
Assert.NotNull(actual);
|
||||
Assert.Equal("A Dictionary of Japanese Food - Ingredients and Culture", actual.Series);
|
||||
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||
Assert.True(actual.IsSpecial);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IsApplicable
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on pdfs
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||
{
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
|
||||
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
|
||||
Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
|
||||
Assert.False(_parser.IsApplicable("something.png", LibraryType.Book));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests that this Parser can only be used on pdfs
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||
{
|
||||
Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
|
||||
Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Manga));
|
||||
}
|
||||
#endregion
|
||||
}
|
24
API.Tests/Parsing/BookParsingTests.cs
Normal file
|
@ -0,0 +1,24 @@
|
|||
using API.Entities.Enums;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class BookParsingTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
|
||||
[InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
|
||||
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
|
||||
public void ParseSeriesTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Book));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
|
||||
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
|
||||
public void ParseVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Book));
|
||||
}
|
||||
}
|
|
@ -1,26 +1,11 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Services;
|
||||
using API.Entities.Enums;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Parser;
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class ComicParserTests
|
||||
public class ComicParsingTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly DefaultParser _defaultParser;
|
||||
|
||||
public ComicParserTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
_defaultParser =
|
||||
new DefaultParser(new DirectoryService(Substitute.For<ILogger<DirectoryService>>(),
|
||||
new MockFileSystem()));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")]
|
||||
|
@ -66,56 +51,58 @@ public class ComicParserTests
|
|||
[InlineData("Demon 012 (Sep 1973) c2c", "Demon")]
|
||||
[InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")]
|
||||
[InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")]
|
||||
[InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")]
|
||||
[InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")]
|
||||
[InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire Special - Adam Strange")]
|
||||
[InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis Extra - Rags Morales Sketches")]
|
||||
[InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")]
|
||||
[InlineData("Batgirl T2000 #57", "Batgirl")]
|
||||
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
|
||||
[InlineData("Conquistador_-Tome_2", "Conquistador")]
|
||||
[InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")]
|
||||
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
|
||||
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")]
|
||||
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Bd Fr-Aldebaran-Antares")]
|
||||
[InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")]
|
||||
[InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")]
|
||||
[InlineData("Kebab Том 1 Глава 1", "Kebab")]
|
||||
[InlineData("Манга Глава 1", "Манга")]
|
||||
[InlineData("ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก เล่ม 1", "ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก")]
|
||||
[InlineData("SKY WORLD สกายเวิลด์ เล่มที่ 1", "SKY WORLD สกายเวิลด์")]
|
||||
public void ParseComicSeriesTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename));
|
||||
Assert.Equal(expected, Parser.ParseComicSeries(filename));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", "0")]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
|
||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", "0")]
|
||||
[InlineData("Batman & Daredevil - King of New York", "0")]
|
||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")]
|
||||
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
|
||||
[InlineData("Batman & Wildcat (1 of 3)", "0")]
|
||||
[InlineData("Batman And Superman World's Finest #01", "0")]
|
||||
[InlineData("Babe 01", "0")]
|
||||
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")]
|
||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", Parser.LooseLeafVolume)]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Daredevil - King of New York", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Robin the Teen Wonder #0", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman & Wildcat (1 of 3)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman And Superman World's Finest #01", Parser.LooseLeafVolume)]
|
||||
[InlineData("Babe 01", Parser.LooseLeafVolume)]
|
||||
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
|
||||
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")]
|
||||
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Superman v1 024 (09-10 1943)", "1")]
|
||||
[InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
|
||||
[InlineData("Amazing Man Comics chapter 25", "0")]
|
||||
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")]
|
||||
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
|
||||
[InlineData("spawn-123", "0")]
|
||||
[InlineData("spawn-chapter-123", "0")]
|
||||
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")]
|
||||
[InlineData("Batman Beyond 04 (of 6) (1999)", "0")]
|
||||
[InlineData("Batman Beyond 001 (2012)", "0")]
|
||||
[InlineData("Batman Beyond 2.0 001 (2013)", "0")]
|
||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")]
|
||||
[InlineData("Amazing Man Comics chapter 25", Parser.LooseLeafVolume)]
|
||||
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", Parser.LooseLeafVolume)]
|
||||
[InlineData("spawn-123", Parser.LooseLeafVolume)]
|
||||
[InlineData("spawn-chapter-123", Parser.LooseLeafVolume)]
|
||||
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 04 (of 6) (1999)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 001 (2012)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman Beyond 2.0 001 (2013)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.LooseLeafVolume)]
|
||||
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
|
||||
[InlineData("Batgirl V2000 #57", "2000")]
|
||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")]
|
||||
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")]
|
||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", Parser.LooseLeafVolume)]
|
||||
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
|
||||
[InlineData("Daredevil - v6.5", "6.5")]
|
||||
// Tome Tests
|
||||
|
@ -125,22 +112,25 @@ public class ComicParserTests
|
|||
[InlineData("Conquistador_Tome_2", "2")]
|
||||
[InlineData("Max_l_explorateur-_Tome_0", "0")]
|
||||
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
|
||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "0")]
|
||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", Parser.LooseLeafVolume)]
|
||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
|
||||
// Russian Tests
|
||||
[InlineData("Kebab Том 1 Глава 3", "1")]
|
||||
[InlineData("Манга Глава 2", "0")]
|
||||
[InlineData("Манга Глава 2", Parser.LooseLeafVolume)]
|
||||
[InlineData("ย้อนเวลากลับมาร้าย เล่ม 1", "1")]
|
||||
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "1")]
|
||||
[InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", Parser.LooseLeafVolume)]
|
||||
public void ParseComicVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename));
|
||||
Assert.Equal(expected, Parser.ParseComicVolume(filename));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
|
||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.DefaultChapter)]
|
||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.DefaultChapter)]
|
||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
|
||||
[InlineData("Batman & Daredevil - King of New York", "0")]
|
||||
[InlineData("Batman & Daredevil - King of New York", Parser.DefaultChapter)]
|
||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
|
||||
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
|
||||
[InlineData("Batman & Wildcat (1 of 3)", "1")]
|
||||
|
@ -164,8 +154,8 @@ public class ComicParserTests
|
|||
[InlineData("Batman Beyond 001 (2012)", "1")]
|
||||
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
|
||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
|
||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
|
||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", Parser.DefaultChapter)]
|
||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.DefaultChapter)]
|
||||
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
|
||||
[InlineData("Batgirl V2000 #57", "57")]
|
||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
|
||||
|
@ -174,43 +164,47 @@ public class ComicParserTests
|
|||
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
|
||||
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
|
||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
|
||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "0")]
|
||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", Parser.DefaultChapter)]
|
||||
[InlineData("Kebab Том 1 Глава 3", "3")]
|
||||
[InlineData("Манга Глава 2", "2")]
|
||||
[InlineData("Манга 2 Глава", "2")]
|
||||
[InlineData("Манга Том 1 2 Глава", "2")]
|
||||
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
|
||||
[InlineData("Max Level Returner ตอนที่ 5", "5")]
|
||||
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
|
||||
public void ParseComicChapterTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(filename));
|
||||
Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Comic));
|
||||
}
|
||||
|
||||
|
||||
[Theory]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
|
||||
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", false)]
|
||||
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", false)]
|
||||
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
|
||||
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
|
||||
[InlineData("Boule et Bill - THS -Bill à disparu", true)]
|
||||
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)]
|
||||
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)]
|
||||
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", false)]
|
||||
[InlineData("Boule et Bill - THS -Bill à disparu", false)]
|
||||
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", false)]
|
||||
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", false)]
|
||||
[InlineData("laughs", false)]
|
||||
[InlineData("Annual Days of Summer", true)]
|
||||
[InlineData("Adventure Time 2013 Annual #001 (2013)", true)]
|
||||
[InlineData("Adventure Time 2013_Annual_#001 (2013)", true)]
|
||||
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)]
|
||||
[InlineData("Annual Days of Summer", false)]
|
||||
[InlineData("Adventure Time 2013 Annual #001 (2013)", false)]
|
||||
[InlineData("Adventure Time 2013_Annual_#001 (2013)", false)]
|
||||
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", false)]
|
||||
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
|
||||
[InlineData("Mazebook 001", false)]
|
||||
[InlineData("X-23 One Shot (2010)", true)]
|
||||
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)]
|
||||
[InlineData("Batman Beyond Annual", true)]
|
||||
[InlineData("Batman Beyond Bonus", true)]
|
||||
[InlineData("Batman Beyond OneShot", true)]
|
||||
[InlineData("Batman Beyond Specials", true)]
|
||||
[InlineData("Batman Beyond Omnibus (1999)", true)]
|
||||
[InlineData("Batman Beyond Omnibus", true)]
|
||||
[InlineData("01 Annual Batman Beyond", true)]
|
||||
[InlineData("X-23 One Shot (2010)", false)]
|
||||
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", false)]
|
||||
[InlineData("Batman Beyond Annual", false)]
|
||||
[InlineData("Batman Beyond Bonus", false)]
|
||||
[InlineData("Batman Beyond OneShot", false)]
|
||||
[InlineData("Batman Beyond Specials", false)]
|
||||
[InlineData("Batman Beyond Omnibus (1999)", false)]
|
||||
[InlineData("Batman Beyond Omnibus", false)]
|
||||
[InlineData("01 Annual Batman Beyond", false)]
|
||||
[InlineData("Blood Syndicate Annual #001", false)]
|
||||
public void IsComicSpecialTest(string input, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsComicSpecial(input));
|
||||
Assert.Equal(expected, Parser.IsSpecial(input, LibraryType.Comic));
|
||||
}
|
||||
}
|
107
API.Tests/Parsing/ImageParsingTests.cs
Normal file
|
@ -0,0 +1,107 @@
|
|||
using System.IO.Abstractions.TestingHelpers;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class ImageParsingTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly ImageParser _parser;
|
||||
|
||||
public ImageParsingTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||
_parser = new ImageParser(directoryService);
|
||||
}
|
||||
|
||||
//[Fact]
|
||||
public void Parse_ParseInfo_Manga_ImageOnly()
|
||||
{
|
||||
// Images don't have root path as E:\Manga, but rather as the path of the folder
|
||||
|
||||
// Note: Fallback to folder will parse Monster #8 and get Monster
|
||||
var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
|
||||
var expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
var actual2 = _parser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Image, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
_testOutputHelper.WriteLine("Format ✓");
|
||||
Assert.Equal(expectedInfo2.Series, actual2.Series);
|
||||
_testOutputHelper.WriteLine("Series ✓");
|
||||
Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
|
||||
_testOutputHelper.WriteLine("Chapters ✓");
|
||||
Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
|
||||
_testOutputHelper.WriteLine("Volumes ✓");
|
||||
Assert.Equal(expectedInfo2.Edition, actual2.Edition);
|
||||
_testOutputHelper.WriteLine("Edition ✓");
|
||||
Assert.Equal(expectedInfo2.Filename, actual2.Filename);
|
||||
_testOutputHelper.WriteLine("Filename ✓");
|
||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||
expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
_testOutputHelper.WriteLine("Format ✓");
|
||||
Assert.Equal(expectedInfo2.Series, actual2.Series);
|
||||
_testOutputHelper.WriteLine("Series ✓");
|
||||
Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
|
||||
_testOutputHelper.WriteLine("Chapters ✓");
|
||||
Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
|
||||
_testOutputHelper.WriteLine("Volumes ✓");
|
||||
Assert.Equal(expectedInfo2.Edition, actual2.Edition);
|
||||
_testOutputHelper.WriteLine("Edition ✓");
|
||||
Assert.Equal(expectedInfo2.Filename, actual2.Filename);
|
||||
_testOutputHelper.WriteLine("Filename ✓");
|
||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||
expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
_testOutputHelper.WriteLine("Format ✓");
|
||||
Assert.Equal(expectedInfo2.Series, actual2.Series);
|
||||
_testOutputHelper.WriteLine("Series ✓");
|
||||
Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
|
||||
_testOutputHelper.WriteLine("Chapters ✓");
|
||||
Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
|
||||
_testOutputHelper.WriteLine("Volumes ✓");
|
||||
Assert.Equal(expectedInfo2.Edition, actual2.Edition);
|
||||
_testOutputHelper.WriteLine("Edition ✓");
|
||||
Assert.Equal(expectedInfo2.Filename, actual2.Filename);
|
||||
_testOutputHelper.WriteLine("Filename ✓");
|
||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
}
|
||||
}
|
|
@ -1,18 +1,10 @@
|
|||
using API.Entities.Enums;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Parser;
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class MangaParserTests
|
||||
public class MangaParsingTests
|
||||
{
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
public MangaParserTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
||||
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
|
||||
|
@ -25,7 +17,7 @@ public class MangaParserTests
|
|||
[InlineData("v001", "1")]
|
||||
[InlineData("Vol 1", "1")]
|
||||
[InlineData("vol_356-1", "356")] // Mangapy syntax
|
||||
[InlineData("No Volume", "0")]
|
||||
[InlineData("No Volume", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
|
||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1.1")]
|
||||
[InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")]
|
||||
|
@ -40,18 +32,18 @@ public class MangaParserTests
|
|||
[InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")]
|
||||
[InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")]
|
||||
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")]
|
||||
[InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")]
|
||||
[InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")]
|
||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")]
|
||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
||||
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
|
||||
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
|
||||
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
|
||||
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
|
||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")]
|
||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Vagabond_v03", "3")]
|
||||
[InlineData("Mujaki No Rakune Volume 10.cbz", "10")]
|
||||
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "0")]
|
||||
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
|
||||
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
|
||||
[InlineData("Gantz.V26.cbz", "26")]
|
||||
|
@ -60,7 +52,7 @@ public class MangaParserTests
|
|||
[InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")]
|
||||
[InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")]
|
||||
[InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")]
|
||||
[InlineData("Noblesse - Episode 406 (52 Pages).7z", "0")]
|
||||
[InlineData("Noblesse - Episode 406 (52 Pages).7z", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("X-Men v1 #201 (September 2007).cbz", "1")]
|
||||
[InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")]
|
||||
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")]
|
||||
|
@ -72,21 +64,23 @@ public class MangaParserTests
|
|||
[InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")]
|
||||
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "3.5")]
|
||||
[InlineData("Kebab Том 1 Глава 3", "1")]
|
||||
[InlineData("Манга Глава 2", "0")]
|
||||
[InlineData("Манга Глава 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||
[InlineData("Манга Тома 1-4", "1-4")]
|
||||
[InlineData("Манга Том 1-4", "1-4")]
|
||||
[InlineData("조선왕조실톡 106화", "106")]
|
||||
[InlineData("죽음 13회", "13")]
|
||||
[InlineData("동의보감 13장", "13")]
|
||||
[InlineData("몰?루 아카이브 7.5권", "7.5")]
|
||||
[InlineData("주술회전 1.5권", "1.5")]
|
||||
[InlineData("63권#200", "63")]
|
||||
[InlineData("시즌34삽화2", "34")]
|
||||
[InlineData("Accel World Chapter 001 Volume 002", "2")]
|
||||
[InlineData("Accel World Volume 2", "2")]
|
||||
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.31 Omake", "30")]
|
||||
[InlineData("Zom 100 - Bucket List of the Dead v01", "1")]
|
||||
public void ParseVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Manga));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
|
@ -139,7 +133,6 @@ public class MangaParserTests
|
|||
[InlineData("Vagabond_v03", "Vagabond")]
|
||||
[InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")]
|
||||
[InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")]
|
||||
[InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")]
|
||||
[InlineData("Baketeriya ch01-05.zip", "Baketeriya")]
|
||||
[InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")]
|
||||
[InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")]
|
||||
|
@ -206,21 +199,30 @@ public class MangaParserTests
|
|||
[InlineData("test 2 years 1권", "test 2 years")]
|
||||
[InlineData("test 2 years 1화", "test 2 years")]
|
||||
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.30 Omake", "Nagasarete Airantou")]
|
||||
[InlineData("Cynthia The Mission - c000 - c006 (v06)", "Cynthia The Mission")]
|
||||
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1", "เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท")]
|
||||
[InlineData("Max Level Returner เล่มที่ 5", "Max Level Returner")]
|
||||
[InlineData("หนึ่งความคิด นิจนิรันดร์ เล่ม 2", "หนึ่งความคิด นิจนิรันดร์")]
|
||||
[InlineData("不安の種\uff0b - 01", "不安の種\uff0b")]
|
||||
[InlineData("Giant Ojou-sama - Ch. 33.5 - Volume 04 Bonus Chapter", "Giant Ojou-sama")]
|
||||
[InlineData("[218565]-(C92) [BRIO (Puyocha)] Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", "")]
|
||||
[InlineData("Monster #8 Ch. 001", "Monster #8")]
|
||||
[InlineData("Zom 100 - Bucket List of the Dead v01", "Zom 100 - Bucket List of the Dead")]
|
||||
public void ParseSeriesTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Manga));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
||||
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
|
||||
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")]
|
||||
[InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")]
|
||||
[InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")]
|
||||
[InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")]
|
||||
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")]
|
||||
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("c001", "1")]
|
||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "0")]
|
||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
|
||||
[InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")]
|
||||
[InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")]
|
||||
|
@ -243,7 +245,7 @@ public class MangaParserTests
|
|||
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")]
|
||||
[InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")]
|
||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")]
|
||||
[InlineData("Vol 1", "0")]
|
||||
[InlineData("Vol 1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
||||
[InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")]
|
||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")]
|
||||
|
@ -255,10 +257,10 @@ public class MangaParserTests
|
|||
[InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")]
|
||||
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
|
||||
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
|
||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
|
||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Beelzebub_153b_RHS.zip", "153.5")]
|
||||
[InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
|
||||
[InlineData("Transferred to another world magical swordsman v1.1", "0")]
|
||||
[InlineData("Transferred to another world magical swordsman v1.1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")]
|
||||
[InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")]
|
||||
[InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")]
|
||||
|
@ -277,26 +279,31 @@ public class MangaParserTests
|
|||
[InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")]
|
||||
[InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")]
|
||||
[InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")]
|
||||
[InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")]
|
||||
[InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")]
|
||||
[InlineData("Samurai Jack Vol. 01 - The threads of Time", "0")]
|
||||
[InlineData("Samurai Jack Vol. 01 - The threads of Time", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")]
|
||||
[InlineData("자유록 13회#2", "13")]
|
||||
[InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")]
|
||||
[InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")]
|
||||
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "0")]
|
||||
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Kebab Том 1 Глава 3", "3")]
|
||||
[InlineData("Манга Глава 2", "2")]
|
||||
[InlineData("Манга 2 Глава", "2")]
|
||||
[InlineData("Манга Том 1 2 Глава", "2")]
|
||||
[InlineData("Accel World Chapter 001 Volume 002", "1")]
|
||||
[InlineData("Bleach 001-003", "1-3")]
|
||||
[InlineData("Accel World Volume 2", "0")]
|
||||
[InlineData("Accel World Volume 2", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98", "90-98")]
|
||||
[InlineData("Historys Strongest Disciple Kenichi c01-c04", "1-4")]
|
||||
[InlineData("Adabana c00-02", "0-2")]
|
||||
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
|
||||
[InlineData("Max Level Returner ตอนที่ 5", "5")]
|
||||
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
|
||||
[InlineData("Monster #8 Ch. 001", "1")]
|
||||
public void ParseChaptersTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Manga));
|
||||
}
|
||||
|
||||
|
||||
|
@ -316,25 +323,25 @@ public class MangaParserTests
|
|||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
|
||||
}
|
||||
[Theory]
|
||||
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
|
||||
[InlineData("Beelzebub_Omake_June_2012_RHS", true)]
|
||||
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
|
||||
[InlineData("Beelzebub_Omake_June_2012_RHS", false)]
|
||||
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
|
||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
|
||||
[InlineData("Ani-Hina Art Collection.cbz", true)]
|
||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
|
||||
[InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", false)]
|
||||
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", false)]
|
||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", false)]
|
||||
[InlineData("Ani-Hina Art Collection.cbz", false)]
|
||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", false)]
|
||||
[InlineData("A Town Where You Live - Bonus Chapter.zip", false)]
|
||||
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
|
||||
[InlineData("Beastars - SP01", false)]
|
||||
[InlineData("Beastars SP01", false)]
|
||||
[InlineData("Beastars - SP01", true)]
|
||||
[InlineData("Beastars SP01", true)]
|
||||
[InlineData("The League of Extraordinary Gentlemen", false)]
|
||||
[InlineData("The League of Extra-ordinary Gentlemen", false)]
|
||||
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]
|
||||
[InlineData("Hajime no Ippo - Artbook", false)]
|
||||
public void IsMangaSpecialTest(string input, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsMangaSpecial(input));
|
||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsSpecial(input, LibraryType.Manga));
|
||||
}
|
||||
|
||||
[Theory]
|
|
@ -2,7 +2,7 @@
|
|||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Parser;
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class ParserInfoTests
|
||||
{
|
||||
|
@ -11,14 +11,14 @@ public class ParserInfoTests
|
|||
{
|
||||
var p1 = new ParserInfo()
|
||||
{
|
||||
Chapters = "0",
|
||||
Chapters = Parser.DefaultChapter,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/darker than black.cbz",
|
||||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = "0"
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var p2 = new ParserInfo()
|
||||
|
@ -30,7 +30,7 @@ public class ParserInfoTests
|
|||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "Darker Than Black",
|
||||
Volumes = "0"
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var expected = new ParserInfo()
|
||||
|
@ -42,7 +42,7 @@ public class ParserInfoTests
|
|||
IsSpecial = false,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = "0"
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
};
|
||||
p1.Merge(p2);
|
||||
|
||||
|
@ -62,12 +62,12 @@ public class ParserInfoTests
|
|||
IsSpecial = true,
|
||||
Series = "darker than black",
|
||||
Title = "darker than black",
|
||||
Volumes = "0"
|
||||
Volumes = Parser.LooseLeafVolume
|
||||
};
|
||||
|
||||
var p2 = new ParserInfo()
|
||||
{
|
||||
Chapters = "0",
|
||||
Chapters = Parser.DefaultChapter,
|
||||
Edition = "",
|
||||
Format = MangaFormat.Archive,
|
||||
FullFilePath = "/manga/darker than black.cbz",
|
|
@ -3,18 +3,32 @@ using System.Linq;
|
|||
using Xunit;
|
||||
using static API.Services.Tasks.Scanner.Parser.Parser;
|
||||
|
||||
namespace API.Tests.Parser;
|
||||
namespace API.Tests.Parsing;
|
||||
|
||||
public class ParserTests
|
||||
public class ParsingTests
|
||||
{
|
||||
[Fact]
|
||||
public void ShouldWork()
|
||||
{
|
||||
var s = 6.5f + "";
|
||||
var s = 6.5f.ToString(CultureInfo.InvariantCulture);
|
||||
var a = float.Parse(s, CultureInfo.InvariantCulture);
|
||||
Assert.Equal(6.5f, a);
|
||||
|
||||
s = 6.5f + "";
|
||||
a = float.Parse(s, CultureInfo.CurrentCulture);
|
||||
Assert.Equal(6.5f, a);
|
||||
}
|
||||
|
||||
// [Theory]
|
||||
// [InlineData("de-DE")]
|
||||
// [InlineData("en-US")]
|
||||
// public void ShouldParse(string culture)
|
||||
// {
|
||||
// var s = 6.5f + "";
|
||||
// var a = float.Parse(s, CultureInfo.CreateSpecificCulture(culture));
|
||||
// Assert.Equal(6.5f, a);
|
||||
// }
|
||||
|
||||
[Theory]
|
||||
[InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")]
|
||||
[InlineData("Shmo, Joe", "Shmo, Joe")]
|
||||
|
@ -29,6 +43,7 @@ public class ParserTests
|
|||
[InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")]
|
||||
[InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")]
|
||||
[InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")]
|
||||
[InlineData("SP01 1. DEAD Tube Prologue", "1. DEAD Tube Prologue")]
|
||||
public void CleanSpecialTitleTest(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, CleanSpecialTitle(input));
|
||||
|
@ -45,6 +60,18 @@ public class ParserTests
|
|||
Assert.Equal(expected, HasSpecialMarker(input));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Beastars - SP01", 1)]
|
||||
[InlineData("Beastars SP01", 1)]
|
||||
[InlineData("Beastars Special 01", 0)]
|
||||
[InlineData("Beastars Extra 01", 0)]
|
||||
[InlineData("Batman Beyond - Return of the Joker (2001) SP01", 1)]
|
||||
[InlineData("Batman Beyond - Return of the Joker (2001)", 0)]
|
||||
public void ParseSpecialIndexTest(string input, int expected)
|
||||
{
|
||||
Assert.Equal(expected, ParseSpecialIndex(input));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("0001", "1")]
|
||||
[InlineData("1", "1")]
|
||||
|
@ -71,7 +98,8 @@ public class ParserTests
|
|||
[InlineData("-The Title", false, "The Title")]
|
||||
[InlineData("- The Title", false, "The Title")]
|
||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")]
|
||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)",
|
||||
true, "Batman - Detective Comics - Rebirth Deluxe Edition Book 04")]
|
||||
[InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")]
|
||||
[InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")]
|
||||
[InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")]
|
||||
|
@ -155,6 +183,7 @@ public class ParserTests
|
|||
[InlineData("3.5", 3.5)]
|
||||
[InlineData("3.5-4.0", 3.5)]
|
||||
[InlineData("asdfasdf", 0.0)]
|
||||
[InlineData("-10", -10.0)]
|
||||
public void MinimumNumberFromRangeTest(string input, float expected)
|
||||
{
|
||||
Assert.Equal(expected, MinNumberFromRange(input));
|
||||
|
@ -171,6 +200,7 @@ public class ParserTests
|
|||
[InlineData("3.5", 3.5)]
|
||||
[InlineData("3.5-4.0", 4.0)]
|
||||
[InlineData("asdfasdf", 0.0)]
|
||||
[InlineData("-10", -10.0)]
|
||||
public void MaximumNumberFromRangeTest(string input, float expected)
|
||||
{
|
||||
Assert.Equal(expected, MaxNumberFromRange(input));
|
||||
|
@ -186,6 +216,7 @@ public class ParserTests
|
|||
[InlineData("카비타", "카비타")]
|
||||
[InlineData("06", "06")]
|
||||
[InlineData("", "")]
|
||||
[InlineData("不安の種+", "不安の種+")]
|
||||
public void NormalizeTest(string input, string expected)
|
||||
{
|
||||
Assert.Equal(expected, Normalize(input));
|
||||
|
@ -220,6 +251,7 @@ public class ParserTests
|
|||
[InlineData("ch1/backcover.png", false)]
|
||||
[InlineData("backcover.png", false)]
|
||||
[InlineData("back_cover.png", false)]
|
||||
[InlineData("LD Blacklands #1 35 (back cover).png", false)]
|
||||
public void IsCoverImageTest(string inputPath, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, IsCoverImage(inputPath));
|
||||
|
@ -235,6 +267,7 @@ public class ParserTests
|
|||
[InlineData("@recycle/Love Hina/", true)]
|
||||
[InlineData("E:/Test/__MACOSX/Love Hina/", true)]
|
||||
[InlineData("E:/Test/.caltrash/Love Hina/", true)]
|
||||
[InlineData("E:/Test/.yacreaderlibrary/Love Hina/", true)]
|
||||
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
|
|
@ -15,7 +15,6 @@ using Microsoft.EntityFrameworkCore;
|
|||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Repository;
|
||||
|
||||
|
@ -114,65 +113,65 @@ public class CollectionTagRepositoryTests
|
|||
|
||||
#endregion
|
||||
|
||||
#region RemoveTagsWithoutSeries
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
|
||||
{
|
||||
var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
||||
var series = new SeriesBuilder("Test 1").Build();
|
||||
var commonTag = new CollectionTagBuilder("Tag 1").Build();
|
||||
series.Metadata.CollectionTags.Add(commonTag);
|
||||
series.Metadata.CollectionTags.Add(new CollectionTagBuilder("Tag 2").Build());
|
||||
|
||||
var series2 = new SeriesBuilder("Test 1").Build();
|
||||
series2.Metadata.CollectionTags.Add(commonTag);
|
||||
library.Series.Add(series);
|
||||
library.Series.Add(series2);
|
||||
_unitOfWork.LibraryRepository.Add(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
||||
Assert.Single(series2.Metadata.CollectionTags);
|
||||
|
||||
// Delete both series
|
||||
_unitOfWork.SeriesRepository.Remove(series);
|
||||
_unitOfWork.SeriesRepository.Remove(series2);
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Validate that both tags exist
|
||||
Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
||||
|
||||
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
||||
|
||||
Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
|
||||
{
|
||||
var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
||||
var series = new SeriesBuilder("Test 1").Build();
|
||||
var commonTag = new CollectionTagBuilder("Tag 1").Build();
|
||||
series.Metadata.CollectionTags.Add(commonTag);
|
||||
series.Metadata.CollectionTags.Add(new CollectionTagBuilder("Tag 2").Build());
|
||||
|
||||
var series2 = new SeriesBuilder("Test 1").Build();
|
||||
series2.Metadata.CollectionTags.Add(commonTag);
|
||||
library.Series.Add(series);
|
||||
library.Series.Add(series2);
|
||||
_unitOfWork.LibraryRepository.Add(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
||||
Assert.Single(series2.Metadata.CollectionTags);
|
||||
|
||||
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
||||
|
||||
// Validate that both tags exist
|
||||
Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
||||
}
|
||||
|
||||
#endregion
|
||||
// #region RemoveTagsWithoutSeries
|
||||
//
|
||||
// [Fact]
|
||||
// public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
|
||||
// {
|
||||
// var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
||||
// var series = new SeriesBuilder("Test 1").Build();
|
||||
// var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
|
||||
// series.Metadata.CollectionTags.Add(commonTag);
|
||||
// series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
|
||||
//
|
||||
// var series2 = new SeriesBuilder("Test 1").Build();
|
||||
// series2.Metadata.CollectionTags.Add(commonTag);
|
||||
// library.Series.Add(series);
|
||||
// library.Series.Add(series2);
|
||||
// _unitOfWork.LibraryRepository.Add(library);
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
||||
// Assert.Single(series2.Metadata.CollectionTags);
|
||||
//
|
||||
// // Delete both series
|
||||
// _unitOfWork.SeriesRepository.Remove(series);
|
||||
// _unitOfWork.SeriesRepository.Remove(series2);
|
||||
//
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// // Validate that both tags exist
|
||||
// Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
||||
//
|
||||
// await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
||||
//
|
||||
// Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
|
||||
// }
|
||||
//
|
||||
// [Fact]
|
||||
// public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
|
||||
// {
|
||||
// var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
||||
// var series = new SeriesBuilder("Test 1").Build();
|
||||
// var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
|
||||
// series.Metadata.CollectionTags.Add(commonTag);
|
||||
// series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
|
||||
//
|
||||
// var series2 = new SeriesBuilder("Test 1").Build();
|
||||
// series2.Metadata.CollectionTags.Add(commonTag);
|
||||
// library.Series.Add(series);
|
||||
// library.Series.Add(series2);
|
||||
// _unitOfWork.LibraryRepository.Add(library);
|
||||
// await _unitOfWork.CommitAsync();
|
||||
//
|
||||
// Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
||||
// Assert.Single(series2.Metadata.CollectionTags);
|
||||
//
|
||||
// await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
||||
//
|
||||
// // Validate that both tags exist
|
||||
// Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
||||
// }
|
||||
//
|
||||
// #endregion
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ using System.Threading.Tasks;
|
|||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
|
@ -159,4 +158,6 @@ public class SeriesRepositoryTests
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: GetSeriesDtoForLibraryIdV2Async Tests (On Deck)
|
||||
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ using System.Linq;
|
|||
using API.Archive;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using EasyCaching.Core;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NetVips;
|
||||
using NSubstitute;
|
||||
|
@ -29,7 +28,7 @@ public class ArchiveServiceTests
|
|||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
_archiveService = new ArchiveService(_logger, _directoryService,
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService, Substitute.For<IEasyCachingProviderFactory>()),
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService),
|
||||
Substitute.For<IMediaErrorService>());
|
||||
}
|
||||
|
||||
|
@ -167,7 +166,7 @@ public class ArchiveServiceTests
|
|||
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
|
||||
{
|
||||
var ds = Substitute.For<DirectoryService>(_directoryServiceLogger, new FileSystem());
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds, Substitute.For<IEasyCachingProviderFactory>());
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds);
|
||||
var archiveService = Substitute.For<ArchiveService>(_logger, ds, imageService, Substitute.For<IMediaErrorService>());
|
||||
|
||||
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
|
||||
|
@ -198,7 +197,7 @@ public class ArchiveServiceTests
|
|||
[InlineData("sorting.zip", "sorting.expected.png")]
|
||||
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
|
||||
{
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService, Substitute.For<IEasyCachingProviderFactory>());
|
||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
|
||||
var archiveService = Substitute.For<ArchiveService>(_logger,
|
||||
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService,
|
||||
Substitute.For<IMediaErrorService>());
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.Data.Common;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
|
@ -21,7 +19,7 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class BackupServiceTests
|
||||
public class BackupServiceTests: AbstractFsTest
|
||||
{
|
||||
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
|
@ -31,13 +29,6 @@ public class BackupServiceTests
|
|||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string LogDirectory = "C:/kavita/config/logs/";
|
||||
private const string ConfigDirectory = "C:/kavita/config/";
|
||||
private const string BookmarkDirectory = "C:/kavita/config/bookmarks";
|
||||
private const string ThemesDirectory = "C:/kavita/config/theme";
|
||||
|
||||
public BackupServiceTests()
|
||||
{
|
||||
|
@ -82,7 +73,7 @@ public class BackupServiceTests
|
|||
|
||||
_context.ServerSetting.Update(setting);
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
@ -94,22 +85,6 @@ public class BackupServiceTests
|
|||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(LogDirectory);
|
||||
fileSystem.AddDirectory(ThemesDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory("C:/data/");
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using EasyCaching.Core;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
@ -17,7 +18,7 @@ public class BookServiceTests
|
|||
{
|
||||
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||
_bookService = new BookService(_logger, directoryService,
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService, Substitute.For<IEasyCachingProviderFactory>())
|
||||
new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService)
|
||||
, Substitute.For<IMediaErrorService>());
|
||||
}
|
||||
|
||||
|
@ -81,4 +82,64 @@ public class BookServiceTests
|
|||
Assert.Equal("Accel World", comicInfo.Series);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldHaveComicInfoForPdf()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var document = Path.Join(testDirectory, "test.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("Variations Chromatiques de concert", comicInfo.Title);
|
||||
Assert.Equal("Georges Bizet \\(1838-1875\\)", comicInfo.Writer);
|
||||
}
|
||||
|
||||
//[Fact]
|
||||
public void ShouldUsePdfInfoDict()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Library/Books/PDFs");
|
||||
var document = Path.Join(testDirectory, "Rollo at Work SP01.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal("Rollo at Work", comicInfo.Title);
|
||||
Assert.Equal("Jacob Abbott", comicInfo.Writer);
|
||||
Assert.Equal(2008, comicInfo.Year);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShouldHandleIndirectPdfObjects()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var document = Path.Join(testDirectory, "indirect.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.NotNull(comicInfo);
|
||||
Assert.Equal(2018, comicInfo.Year);
|
||||
Assert.Equal(8, comicInfo.Month);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FailGracefullyWithEncryptedPdf()
|
||||
{
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var document = Path.Join(testDirectory, "encrypted.pdf");
|
||||
var comicInfo = _bookService.GetComicInfo(document);
|
||||
Assert.Null(comicInfo);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SeriesFallBackToMetadataTitle()
|
||||
{
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||
var pdfParser = new PdfParser(ds);
|
||||
|
||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||
var filePath = Path.Join(testDirectory, "Bizet-Variations_Chromatiques_de_concert_Theme_A4.pdf");
|
||||
|
||||
var comicInfo = _bookService.GetComicInfo(filePath);
|
||||
Assert.NotNull(comicInfo);
|
||||
|
||||
var parserInfo = pdfParser.Parse(filePath, testDirectory, ds.GetParentDirectoryName(testDirectory), LibraryType.Book, comicInfo);
|
||||
Assert.NotNull(parserInfo);
|
||||
Assert.Equal(parserInfo.Title, comicInfo.Title);
|
||||
Assert.Equal(parserInfo.Series, comicInfo.Title);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,12 +9,9 @@ using API.Data.Repositories;
|
|||
using API.DTOs.Reader;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.SignalR;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
@ -25,17 +22,12 @@ using Xunit;
|
|||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class BookmarkServiceTests
|
||||
public class BookmarkServiceTests: AbstractFsTest
|
||||
{
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
|
||||
|
||||
|
||||
public BookmarkServiceTests()
|
||||
{
|
||||
|
@ -88,7 +80,7 @@ Substitute.For<IMediaConversionService>());
|
|||
_context.ServerSetting.Update(setting);
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
@ -102,20 +94,6 @@ Substitute.For<IMediaConversionService>());
|
|||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(BookmarkDirectory);
|
||||
fileSystem.AddDirectory("C:/data/");
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region BookmarkPage
|
||||
|
@ -132,7 +110,7 @@ Substitute.For<IMediaConversionService>());
|
|||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithFormat(MangaFormat.Epub)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.Build())
|
||||
.Build())
|
||||
|
@ -181,7 +159,7 @@ Substitute.For<IMediaConversionService>());
|
|||
.WithFormat(MangaFormat.Epub)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("0")
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.Data.Common;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
|
@ -52,17 +50,17 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
|
|||
throw new System.NotImplementedException();
|
||||
}
|
||||
|
||||
public ParserInfo Parse(string path, string rootPath, LibraryType type)
|
||||
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
throw new System.NotImplementedException();
|
||||
}
|
||||
|
||||
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
|
||||
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
throw new System.NotImplementedException();
|
||||
}
|
||||
}
|
||||
public class CacheServiceTests
|
||||
public class CacheServiceTests: AbstractFsTest
|
||||
{
|
||||
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
|
@ -71,11 +69,6 @@ public class CacheServiceTests
|
|||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string DataDirectory = "C:/data/";
|
||||
|
||||
public CacheServiceTests()
|
||||
{
|
||||
var contextOptions = new DbContextOptionsBuilder()
|
||||
|
@ -118,7 +111,7 @@ public class CacheServiceTests
|
|||
_context.ServerSetting.Update(setting);
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
@ -130,19 +123,6 @@ public class CacheServiceTests
|
|||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ensure
|
||||
|
@ -156,7 +136,9 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(),
|
||||
Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
await ResetDB();
|
||||
var s = new SeriesBuilder("Test").Build();
|
||||
|
@ -231,7 +213,8 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
cleanupService.CleanupChapters(new []{1, 3});
|
||||
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
|
||||
|
@ -252,14 +235,15 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
var c = new ChapterBuilder("1")
|
||||
.WithFile(new MangaFileBuilder($"{DataDirectory}1.epub", MangaFormat.Epub).Build())
|
||||
.WithFile(new MangaFileBuilder($"{DataDirectory}2.epub", MangaFormat.Epub).Build())
|
||||
.Build();
|
||||
cs.GetCachedFile(c);
|
||||
Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
|
||||
Assert.Equal($"{DataDirectory}1.epub", cs.GetCachedFile(c));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
@ -292,7 +276,8 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
// Flatten to prepare for how GetFullPath expects
|
||||
ds.Flatten($"{CacheDirectory}1/");
|
||||
|
@ -335,7 +320,8 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
// Flatten to prepare for how GetFullPath expects
|
||||
ds.Flatten($"{CacheDirectory}1/");
|
||||
|
@ -375,7 +361,8 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
// Flatten to prepare for how GetFullPath expects
|
||||
ds.Flatten($"{CacheDirectory}1/");
|
||||
|
@ -419,7 +406,8 @@ public class CacheServiceTests
|
|||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||
Substitute.For<IBookmarkService>());
|
||||
|
||||
// Flatten to prepare for how GetFullPath expects
|
||||
ds.Flatten($"{CacheDirectory}1/");
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.Filtering;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
|
@ -30,11 +27,10 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
|
||||
private readonly IReaderService _readerService;
|
||||
|
||||
|
||||
public CleanupServiceTests() : base()
|
||||
{
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
||||
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||
.Build());
|
||||
|
||||
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
|
||||
|
@ -139,7 +135,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
// Add 2 series with cover images
|
||||
_context.Series.Add(new SeriesBuilder("Test 1")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("0").WithCoverImage("v01_c01.jpg").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c01.jpg").Build())
|
||||
.WithCoverImage("v01_c01.jpg")
|
||||
.Build())
|
||||
.WithCoverImage("series_01.jpg")
|
||||
|
@ -148,7 +144,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
|
||||
_context.Series.Add(new SeriesBuilder("Test 2")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("0").WithCoverImage("v01_c03.jpg").Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c03.jpg").Build())
|
||||
.WithCoverImage("v01_c03.jpg")
|
||||
.Build())
|
||||
.WithCoverImage("series_03.jpg")
|
||||
|
@ -167,53 +163,53 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
}
|
||||
#endregion
|
||||
|
||||
#region DeleteTagCoverImages
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
|
||||
{
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
|
||||
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
|
||||
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
|
||||
|
||||
// Delete all Series to reset state
|
||||
await ResetDb();
|
||||
|
||||
// Add 2 series with cover images
|
||||
|
||||
_context.Series.Add(new SeriesBuilder("Test 1")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithCollectionTag(new CollectionTagBuilder("Something")
|
||||
.WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
|
||||
.Build())
|
||||
.Build())
|
||||
.WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
|
||||
.WithLibraryId(1)
|
||||
.Build());
|
||||
|
||||
_context.Series.Add(new SeriesBuilder("Test 2")
|
||||
.WithMetadata(new SeriesMetadataBuilder()
|
||||
.WithCollectionTag(new CollectionTagBuilder("Something")
|
||||
.WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
|
||||
.Build())
|
||||
.Build())
|
||||
.WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
|
||||
.WithLibraryId(1)
|
||||
.Build());
|
||||
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
ds);
|
||||
|
||||
await cleanupService.DeleteTagCoverImages();
|
||||
|
||||
Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
|
||||
}
|
||||
|
||||
#endregion
|
||||
// #region DeleteTagCoverImages
|
||||
//
|
||||
// [Fact]
|
||||
// public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
|
||||
// {
|
||||
// var filesystem = CreateFileSystem();
|
||||
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
|
||||
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
|
||||
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
|
||||
//
|
||||
// // Delete all Series to reset state
|
||||
// await ResetDb();
|
||||
//
|
||||
// // Add 2 series with cover images
|
||||
//
|
||||
// _context.Series.Add(new SeriesBuilder("Test 1")
|
||||
// .WithMetadata(new SeriesMetadataBuilder()
|
||||
// .WithCollectionTag(new AppUserCollectionBuilder("Something")
|
||||
// .WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
|
||||
// .Build())
|
||||
// .Build())
|
||||
// .WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
|
||||
// .WithLibraryId(1)
|
||||
// .Build());
|
||||
//
|
||||
// _context.Series.Add(new SeriesBuilder("Test 2")
|
||||
// .WithMetadata(new SeriesMetadataBuilder()
|
||||
// .WithCollectionTag(new AppUserCollectionBuilder("Something")
|
||||
// .WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
|
||||
// .Build())
|
||||
// .Build())
|
||||
// .WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
|
||||
// .WithLibraryId(1)
|
||||
// .Build());
|
||||
//
|
||||
//
|
||||
// await _context.SaveChangesAsync();
|
||||
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
// var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||
// ds);
|
||||
//
|
||||
// await cleanupService.DeleteTagCoverImages();
|
||||
//
|
||||
// Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
|
||||
// }
|
||||
//
|
||||
// #endregion
|
||||
|
||||
#region DeleteReadingListCoverImages
|
||||
[Fact]
|
||||
|
@ -389,13 +385,12 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
[Fact]
|
||||
public async Task CleanupDbEntries_CleanupAbandonedChapters()
|
||||
{
|
||||
var c = new ChapterBuilder("0")
|
||||
var c = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithPages(1)
|
||||
.Build();
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithFormat(MangaFormat.Epub)
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithMinNumber(1)
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(c)
|
||||
.Build())
|
||||
.Build();
|
||||
|
@ -436,24 +431,26 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
[Fact]
|
||||
public async Task CleanupDbEntries_RemoveTagsWithoutSeries()
|
||||
{
|
||||
var c = new CollectionTag()
|
||||
var s = new SeriesBuilder("Test")
|
||||
.WithFormat(MangaFormat.Epub)
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.Build();
|
||||
s.Library = new LibraryBuilder("Test LIb").Build();
|
||||
_context.Series.Add(s);
|
||||
|
||||
var c = new AppUserCollection()
|
||||
{
|
||||
Title = "Test Tag",
|
||||
NormalizedTitle = "Test Tag".ToNormalized(),
|
||||
AgeRating = AgeRating.Unknown,
|
||||
Items = new List<Series>() {s}
|
||||
};
|
||||
var s = new SeriesBuilder("Test")
|
||||
.WithFormat(MangaFormat.Epub)
|
||||
.WithMetadata(new SeriesMetadataBuilder().WithCollectionTag(c).Build())
|
||||
.Build();
|
||||
s.Library = new LibraryBuilder("Test LIb").Build();
|
||||
|
||||
_context.Series.Add(s);
|
||||
|
||||
_context.AppUser.Add(new AppUser()
|
||||
{
|
||||
UserName = "majora2007"
|
||||
UserName = "majora2007",
|
||||
Collections = new List<AppUserCollection>() {c}
|
||||
});
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
||||
|
@ -466,7 +463,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
|
||||
await cleanupService.CleanupDbEntries();
|
||||
|
||||
Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
|
||||
Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
@ -520,6 +517,71 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
}
|
||||
#endregion
|
||||
|
||||
#region ConsolidateProgress
|
||||
|
||||
[Fact]
|
||||
public async Task ConsolidateProgress_ShouldRemoveDuplicates()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var s = new SeriesBuilder("Test ConsolidateProgress_ShouldRemoveDuplicates")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithPages(3)
|
||||
.Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
s.Library = new LibraryBuilder("Test Lib").Build();
|
||||
_context.Series.Add(s);
|
||||
|
||||
var user = new AppUser()
|
||||
{
|
||||
UserName = "ConsolidateProgress_ShouldRemoveDuplicates",
|
||||
};
|
||||
_context.AppUser.Add(user);
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Add 2 progress events
|
||||
user.Progresses ??= [];
|
||||
user.Progresses.Add(new AppUserProgress()
|
||||
{
|
||||
ChapterId = 1,
|
||||
VolumeId = 1,
|
||||
SeriesId = 1,
|
||||
LibraryId = s.LibraryId,
|
||||
PagesRead = 1,
|
||||
});
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Add a duplicate with higher page number
|
||||
user.Progresses.Add(new AppUserProgress()
|
||||
{
|
||||
ChapterId = 1,
|
||||
VolumeId = 1,
|
||||
SeriesId = 1,
|
||||
LibraryId = s.LibraryId,
|
||||
PagesRead = 3,
|
||||
});
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
Assert.Equal(2, (await _unitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
|
||||
|
||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
||||
Substitute.For<IEventHub>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||
|
||||
|
||||
await cleanupService.ConsolidateProgress();
|
||||
|
||||
var progress = await _unitOfWork.AppUserProgressRepository.GetAllProgress();
|
||||
|
||||
Assert.Single(progress);
|
||||
Assert.True(progress.First().PagesRead == 3);
|
||||
}
|
||||
#endregion
|
||||
|
||||
|
||||
#region EnsureChapterProgressIsCapped
|
||||
|
||||
|
@ -537,7 +599,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
c.UserProgress = new List<AppUserProgress>();
|
||||
s.Volumes = new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0").WithChapter(c).Build()
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume).WithChapter(c).Build()
|
||||
};
|
||||
_context.Series.Add(s);
|
||||
|
||||
|
@ -586,7 +648,7 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
}
|
||||
#endregion
|
||||
|
||||
// #region CleanupBookmarks
|
||||
#region CleanupBookmarks
|
||||
//
|
||||
// [Fact]
|
||||
// public async Task CleanupBookmarks_LeaveAllFiles()
|
||||
|
@ -723,5 +785,5 @@ public class CleanupServiceTests : AbstractDbTest
|
|||
// Assert.Equal(1, ds.FileSystem.Directory.GetDirectories($"{BookmarkDirectory}1/1/").Length);
|
||||
// }
|
||||
//
|
||||
// #endregion
|
||||
#endregion
|
||||
}
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Constants;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.CollectionTags;
|
||||
using API.DTOs.Collection;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.SignalR;
|
||||
using API.Tests.Helpers;
|
||||
using Kavita.Common;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
|
@ -25,7 +28,7 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
_context.CollectionTag.RemoveRange(_context.CollectionTag.ToList());
|
||||
_context.AppUserCollection.RemoveRange(_context.AppUserCollection.ToList());
|
||||
_context.Library.RemoveRange(_context.Library.ToList());
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
@ -33,119 +36,494 @@ public class CollectionTagServiceTests : AbstractDbTest
|
|||
|
||||
private async Task SeedSeries()
|
||||
{
|
||||
if (_context.CollectionTag.Any()) return;
|
||||
if (_context.AppUserCollection.Any()) return;
|
||||
|
||||
var s1 = new SeriesBuilder("Series 1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build()).Build();
|
||||
var s2 = new SeriesBuilder("Series 2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build()).Build();
|
||||
_context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
|
||||
.WithSeries(new SeriesBuilder("Series 1").Build())
|
||||
.WithSeries(new SeriesBuilder("Series 2").Build())
|
||||
.WithSeries(s1)
|
||||
.WithSeries(s2)
|
||||
.Build());
|
||||
|
||||
_context.CollectionTag.Add(new CollectionTagBuilder("Tag 1").Build());
|
||||
_context.CollectionTag.Add(new CollectionTagBuilder("Tag 2").WithIsPromoted(true).Build());
|
||||
var user = new AppUserBuilder("majora2007", "majora2007", Seed.DefaultThemes.First()).Build();
|
||||
user.Collections = new List<AppUserCollection>()
|
||||
{
|
||||
new AppUserCollectionBuilder("Tag 1").WithItems(new []{s1}).Build(),
|
||||
new AppUserCollectionBuilder("Tag 2").WithItems(new []{s1, s2}).WithIsPromoted(true).Build()
|
||||
};
|
||||
_unitOfWork.UserRepository.Add(user);
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
#region DeleteTag
|
||||
|
||||
[Fact]
|
||||
public async Task TagExistsByName_ShouldFindTag()
|
||||
public async Task DeleteTag_ShouldDeleteTag_WhenTagExists()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
Assert.True(await _service.TagExistsByName("Tag 1"));
|
||||
Assert.True(await _service.TagExistsByName("tag 1"));
|
||||
Assert.False(await _service.TagExistsByName("tag5"));
|
||||
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act
|
||||
var result = await _service.DeleteTag(1, user);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
var deletedTag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.Null(deletedTag);
|
||||
Assert.Single(user.Collections); // Only one collection should remain
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteTag_ShouldReturnTrue_WhenTagDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act - Try to delete a non-existent tag
|
||||
var result = await _service.DeleteTag(999, user);
|
||||
|
||||
// Assert
|
||||
Assert.True(result); // Should return true because the tag is already "deleted"
|
||||
Assert.Equal(2, user.Collections.Count); // Both collections should remain
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteTag_ShouldNotAffectOtherTags()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act
|
||||
var result = await _service.DeleteTag(1, user);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
var remainingTag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(remainingTag);
|
||||
Assert.Equal("Tag 2", remainingTag.Title);
|
||||
Assert.True(remainingTag.Promoted);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UpdateTag
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldUpdateFields()
|
||||
{
|
||||
await SeedSeries();
|
||||
|
||||
_context.CollectionTag.Add(new CollectionTagBuilder("UpdateTag_ShouldUpdateFields").WithId(3).WithIsPromoted(true).Build());
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldUpdateFields").WithIsPromoted(true).Build());
|
||||
_unitOfWork.UserRepository.Update(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _service.UpdateTag(new CollectionTagDto()
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "UpdateTag_ShouldUpdateFields",
|
||||
Id = 3,
|
||||
Promoted = true,
|
||||
Summary = "Test Summary",
|
||||
});
|
||||
AgeRating = AgeRating.Unknown
|
||||
}, 1);
|
||||
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(3);
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.Promoted);
|
||||
Assert.True(!string.IsNullOrEmpty(tag.Summary));
|
||||
Assert.False(string.IsNullOrEmpty(tag.Summary));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// UpdateTag should not change any title if non-Kavita source
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource()
|
||||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource").WithSource(ScrobbleProvider.Mal).Build());
|
||||
_unitOfWork.UserRepository.Update(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "New Title",
|
||||
Id = 3,
|
||||
Promoted = true,
|
||||
Summary = "Test Summary",
|
||||
AgeRating = AgeRating.Unknown
|
||||
}, 1);
|
||||
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource", tag.Title);
|
||||
Assert.False(string.IsNullOrEmpty(tag.Summary));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AddTagToSeries_ShouldAddTagToAllSeries()
|
||||
public async Task UpdateTag_ShouldThrowException_WhenTagDoesNotExist()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
var ids = new[] {1, 2};
|
||||
await _service.AddTagToSeries(await _unitOfWork.CollectionTagRepository.GetTagAsync(1, CollectionTagIncludes.SeriesMetadata), ids);
|
||||
|
||||
var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(ids);
|
||||
Assert.Contains(metadatas.ElementAt(0).CollectionTags, t => t.Title.Equals("Tag 1"));
|
||||
Assert.Contains(metadatas.ElementAt(1).CollectionTags, t => t.Title.Equals("Tag 1"));
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Non-existent Tag",
|
||||
Id = 999, // Non-existent ID
|
||||
Promoted = false
|
||||
}, 1));
|
||||
|
||||
Assert.Equal("collection-doesnt-exist", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldRemoveMultiple()
|
||||
public async Task UpdateTag_ShouldThrowException_WhenUserDoesNotOwnTag()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Create a second user
|
||||
var user2 = new AppUserBuilder("user2", "user2", Seed.DefaultThemes.First()).Build();
|
||||
_unitOfWork.UserRepository.Add(user2);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1, // This belongs to user1
|
||||
Promoted = false
|
||||
}, 2)); // User with ID 2
|
||||
|
||||
Assert.Equal("access-denied", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldThrowException_WhenTitleIsEmpty()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = " ", // Empty after trimming
|
||||
Id = 1,
|
||||
Promoted = false
|
||||
}, 1));
|
||||
|
||||
Assert.Equal("collection-tag-title-required", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldThrowException_WhenTitleAlreadyExists()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act & Assert
|
||||
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 2", // Already exists
|
||||
Id = 1, // Trying to rename Tag 1 to Tag 2
|
||||
Promoted = false
|
||||
}, 1));
|
||||
|
||||
Assert.Equal("collection-tag-duplicate", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldUpdateCoverImageSettings()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Act
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
CoverImageLocked = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.CoverImageLocked);
|
||||
|
||||
// Now test unlocking the cover image
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
CoverImageLocked = false
|
||||
}, 1);
|
||||
|
||||
tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.False(tag.CoverImageLocked);
|
||||
Assert.Equal(string.Empty, tag.CoverImage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldAllowPromoteForAdminRole()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Setup a user with admin role
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
await AddUserWithRole(user.Id, PolicyConstants.AdminRole);
|
||||
|
||||
|
||||
// Act - Try to promote a tag that wasn't previously promoted
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
Promoted = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.Promoted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldAllowPromoteForPromoteRole()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Setup a user with promote role
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Mock to return promote role for the user
|
||||
await AddUserWithRole(user.Id, PolicyConstants.PromoteRole);
|
||||
|
||||
// Act - Try to promote a tag that wasn't previously promoted
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
Promoted = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.True(tag.Promoted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateTag_ShouldNotChangePromotion_WhenUserHasNoPermission()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Setup a user with no special roles
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Act - Try to promote a tag without proper role
|
||||
await _service.UpdateTag(new AppUserCollectionDto()
|
||||
{
|
||||
Title = "Tag 1",
|
||||
Id = 1,
|
||||
Promoted = true
|
||||
}, 1);
|
||||
|
||||
// Assert
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.False(tag.Promoted); // Should remain unpromoted
|
||||
}
|
||||
#endregion
|
||||
|
||||
|
||||
#region RemoveTagFromSeries
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_RemoveSeriesFromTag()
|
||||
{
|
||||
await SeedSeries();
|
||||
var ids = new[] {1, 2};
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(2, CollectionTagIncludes.SeriesMetadata);
|
||||
await _service.AddTagToSeries(tag, ids);
|
||||
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Tag 2 has 2 series
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
var userCollections = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.Equal(2, userCollections!.Collections.Count);
|
||||
Assert.Single(tag.Items);
|
||||
Assert.Equal(2, tag.Items.First().Id);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensure the rating of the tag updates after a series change
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_RemoveSeriesFromTag_UpdatesRating()
|
||||
{
|
||||
await SeedSeries();
|
||||
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Tag 2 has 2 series
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
|
||||
var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {1});
|
||||
|
||||
Assert.Single(metadatas);
|
||||
Assert.Empty(metadatas.First().CollectionTags);
|
||||
Assert.NotEmpty(await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {2}));
|
||||
Assert.Equal(AgeRating.G, tag.AgeRating);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Should remove the tag when there are no items left on the tag
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task GetTagOrCreate_ShouldReturnNewTag()
|
||||
public async Task RemoveTagFromSeries_RemoveSeriesFromTag_DeleteTagWhenNoSeriesLeft()
|
||||
{
|
||||
await SeedSeries();
|
||||
var tag = await _service.GetTagOrCreate(0, "GetTagOrCreate_ShouldReturnNewTag");
|
||||
|
||||
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||
Assert.NotNull(user);
|
||||
|
||||
// Tag 1 has 1 series
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(0, tag.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetTagOrCreate_ShouldReturnExistingTag()
|
||||
{
|
||||
await SeedSeries();
|
||||
var tag = await _service.GetTagOrCreate(1, "Some new tag");
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(1, tag.Id);
|
||||
Assert.Equal("Tag 1", tag.Title);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagsWithoutSeries_ShouldRemoveAbandonedEntries()
|
||||
{
|
||||
await SeedSeries();
|
||||
// Setup a tag with one series
|
||||
var tag = await _service.GetTagOrCreate(0, "Tag with a series");
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {1});
|
||||
tag.SeriesMetadatas.Add(metadatas.First());
|
||||
var tagId = tag.Id;
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Validate it doesn't remove tags it shouldn't
|
||||
await _service.RemoveTagsWithoutSeries();
|
||||
Assert.NotNull(await _unitOfWork.CollectionTagRepository.GetTagAsync(tagId));
|
||||
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
|
||||
// Validate it does remove tags it should
|
||||
await _service.RemoveTagsWithoutSeries();
|
||||
Assert.Null(await _unitOfWork.CollectionTagRepository.GetTagAsync(tagId));
|
||||
var tag2 = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.Null(tag2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldReturnFalse_WhenTagIsNull()
|
||||
{
|
||||
// Act
|
||||
var result = await _service.RemoveTagFromSeries(null, [1]);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldHandleEmptySeriesIdsList()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
var initialItemCount = tag.Items.Count;
|
||||
|
||||
// Act
|
||||
var result = await _service.RemoveTagFromSeries(tag, Array.Empty<int>());
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldHandleNonExistentSeriesIds()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
var initialItemCount = tag.Items.Count;
|
||||
|
||||
// Act - Try to remove a series that doesn't exist in the tag
|
||||
var result = await _service.RemoveTagFromSeries(tag, [999]);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldHandleNullItemsList()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.NotNull(tag);
|
||||
|
||||
// Force null items list
|
||||
tag.Items = null;
|
||||
_unitOfWork.CollectionTagRepository.Update(tag);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Act
|
||||
var result = await _service.RemoveTagFromSeries(tag, [1]);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
// The tag should not be removed since the items list was null, not empty
|
||||
var tagAfter = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||
Assert.Null(tagAfter);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoveTagFromSeries_ShouldUpdateAgeRating_WhenMultipleSeriesRemain()
|
||||
{
|
||||
// Arrange
|
||||
await SeedSeries();
|
||||
|
||||
// Add a third series with a different age rating
|
||||
var s3 = new SeriesBuilder("Series 3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.PG).Build()).Build();
|
||||
_context.Library.First().Series.Add(s3);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Add series 3 to tag 2
|
||||
var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
tag.Items.Add(s3);
|
||||
_unitOfWork.CollectionTagRepository.Update(tag);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Act - Remove the series with Mature rating
|
||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||
|
||||
// Assert
|
||||
tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||
Assert.NotNull(tag);
|
||||
Assert.Equal(2, tag.Items.Count);
|
||||
|
||||
// The age rating should be updated to the highest remaining rating (PG)
|
||||
Assert.Equal(AgeRating.PG, tag.AgeRating);
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
||||
|
|
|
@ -1,20 +1,30 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using API.Services;
|
||||
using Kavita.Common.Helpers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class DirectoryServiceTests
|
||||
public class DirectoryServiceTests: AbstractFsTest
|
||||
{
|
||||
private readonly ILogger<DirectoryService> _logger = Substitute.For<ILogger<DirectoryService>>();
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
|
||||
public DirectoryServiceTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
_testOutputHelper = testOutputHelper;
|
||||
}
|
||||
|
||||
|
||||
#region TraverseTreeParallelForEach
|
||||
|
@ -372,9 +382,16 @@ public class DirectoryServiceTests
|
|||
#endregion
|
||||
|
||||
#region IsDriveMounted
|
||||
// The root directory (/) is always mounted on non windows
|
||||
[Fact]
|
||||
public void IsDriveMounted_DriveIsNotMounted()
|
||||
{
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
|
||||
return;
|
||||
}
|
||||
|
||||
const string testDirectory = "c:/manga/";
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
||||
|
@ -386,6 +403,12 @@ public class DirectoryServiceTests
|
|||
[Fact]
|
||||
public void IsDriveMounted_DriveIsMounted()
|
||||
{
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
|
||||
return;
|
||||
}
|
||||
|
||||
const string testDirectory = "c:/manga/";
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
||||
|
@ -721,6 +744,54 @@ public class DirectoryServiceTests
|
|||
|
||||
#endregion
|
||||
|
||||
#region FindLowestDirectoriesFromFiles
|
||||
|
||||
[Theory]
|
||||
[InlineData(new [] {"C:/Manga/"},
|
||||
new [] {"C:/Manga/Love Hina/Vol. 01.cbz"},
|
||||
"C:/Manga/Love Hina")]
|
||||
[InlineData(new [] {"C:/Manga/"},
|
||||
new [] {"C:/Manga/Romance/Love Hina/Vol. 01.cbz"},
|
||||
"C:/Manga/Romance/Love Hina")]
|
||||
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"},
|
||||
new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"},
|
||||
"C:/Manga/Dir 1/Love Hina")]
|
||||
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"},
|
||||
new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"},
|
||||
null)]
|
||||
[InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"},
|
||||
new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"},
|
||||
@"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")]
|
||||
[InlineData(new [] {"C:/Manga/"},
|
||||
new [] {"C:/Manga/Love Hina/Vol. 01.cbz", "C:/Manga/Love Hina/Specials/Sp01.cbz"},
|
||||
"C:/Manga/Love Hina")]
|
||||
[InlineData(new [] {"/manga"},
|
||||
new [] {"/manga/Love Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
|
||||
"/manga/Love Hina")]
|
||||
[InlineData(new [] {"/manga"},
|
||||
new [] {"/manga/Love Hina/Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
|
||||
"/manga/Love Hina")]
|
||||
[InlineData(new [] {"/manga"},
|
||||
new [] {"/manga/Dress Up Darling/Dress Up Darling Ch 01.cbz", "/manga/Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz"},
|
||||
"/manga/Dress Up Darling")]
|
||||
public void FindLowestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
foreach (var directory in rootDirectories)
|
||||
{
|
||||
fileSystem.AddDirectory(directory);
|
||||
}
|
||||
foreach (var f in files)
|
||||
{
|
||||
fileSystem.AddFile(f, new MockFileData(""));
|
||||
}
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
var actual = ds.FindLowestDirectoriesFromFiles(rootDirectories, files);
|
||||
Assert.Equal(expectedDirectory, actual);
|
||||
}
|
||||
|
||||
#endregion
|
||||
#region GetFoldersTillRoot
|
||||
|
||||
[Theory]
|
||||
|
@ -851,12 +922,14 @@ public class DirectoryServiceTests
|
|||
#region GetHumanReadableBytes
|
||||
|
||||
[Theory]
|
||||
[InlineData(1200, "1.17 KB")]
|
||||
[InlineData(1, "1 B")]
|
||||
[InlineData(10000000, "9.54 MB")]
|
||||
[InlineData(10000000000, "9.31 GB")]
|
||||
public void GetHumanReadableBytesTest(long bytes, string expected)
|
||||
[InlineData(1200, 1.17, " KB")]
|
||||
[InlineData(1, 1, " B")]
|
||||
[InlineData(10000000, 9.54, " MB")]
|
||||
[InlineData(10000000000, 9.31, " GB")]
|
||||
public void GetHumanReadableBytesTest(long bytes, float number, string suffix)
|
||||
{
|
||||
// GetHumanReadableBytes is user facing, should be in CultureInfo.CurrentCulture
|
||||
var expected = number.ToString(CultureInfo.CurrentCulture) + suffix;
|
||||
Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes));
|
||||
}
|
||||
#endregion
|
||||
|
@ -878,8 +951,9 @@ public class DirectoryServiceTests
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
||||
var globMatcher = new GlobMatcher();
|
||||
globMatcher.AddExclude("*.*");
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||
|
||||
Assert.Empty(allFiles);
|
||||
|
||||
|
@ -903,7 +977,9 @@ public class DirectoryServiceTests
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
||||
var globMatcher = new GlobMatcher();
|
||||
globMatcher.AddExclude("**/Accel World/*");
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||
|
||||
Assert.Single(allFiles); // Ignore files are not counted in files, only valid extensions
|
||||
|
||||
|
@ -932,7 +1008,10 @@ public class DirectoryServiceTests
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
||||
var globMatcher = new GlobMatcher();
|
||||
globMatcher.AddExclude("**/Accel World/*");
|
||||
globMatcher.AddExclude("**/ArtBooks/*");
|
||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||
|
||||
Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions
|
||||
|
||||
|
@ -986,11 +1065,14 @@ public class DirectoryServiceTests
|
|||
#region GetParentDirectory
|
||||
|
||||
[Theory]
|
||||
[InlineData(@"C:/file.txt", "C:/")]
|
||||
[InlineData(@"C:/folder/file.txt", "C:/folder")]
|
||||
[InlineData(@"C:/folder/subfolder/file.txt", "C:/folder/subfolder")]
|
||||
[InlineData(@"file.txt", "")]
|
||||
[InlineData(@"folder/file.txt", "folder")]
|
||||
[InlineData(@"folder/subfolder/file.txt", "folder/subfolder")]
|
||||
public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected)
|
||||
{
|
||||
path = Root + path;
|
||||
expected = Root + expected;
|
||||
|
||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||
{
|
||||
{ path, new MockFileData(string.Empty)}
|
||||
|
@ -1000,11 +1082,14 @@ public class DirectoryServiceTests
|
|||
Assert.Equal(expected, ds.GetParentDirectoryName(path));
|
||||
}
|
||||
[Theory]
|
||||
[InlineData(@"C:/folder", "C:/")]
|
||||
[InlineData(@"C:/folder/subfolder", "C:/folder")]
|
||||
[InlineData(@"C:/folder/subfolder/another", "C:/folder/subfolder")]
|
||||
[InlineData(@"folder", "")]
|
||||
[InlineData(@"folder/subfolder", "folder")]
|
||||
[InlineData(@"folder/subfolder/another", "folder/subfolder")]
|
||||
public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected)
|
||||
{
|
||||
path = Root + path;
|
||||
expected = Root + expected;
|
||||
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory(path);
|
||||
|
||||
|
|
2860
API.Tests/Services/ExternalMetadataServiceTests.cs
Normal file
221
API.Tests/Services/ImageServiceTests.cs
Normal file
|
@ -0,0 +1,221 @@
|
|||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using API.Entities.Enums;
|
||||
using API.Services;
|
||||
using NetVips;
|
||||
using Xunit;
|
||||
using Image = NetVips.Image;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class ImageServiceTests
|
||||
{
|
||||
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/Covers");
|
||||
private readonly string _testDirectoryColorScapes = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/ColorScapes");
|
||||
private const string OutputPattern = "_output";
|
||||
private const string BaselinePattern = "_baseline";
|
||||
|
||||
/// <summary>
|
||||
/// Run this once to get the baseline generation
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void GenerateBaseline()
|
||||
{
|
||||
GenerateFiles(BaselinePattern);
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Change the Scaling/Crop code then run this continuously
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void TestScaling()
|
||||
{
|
||||
GenerateFiles(OutputPattern);
|
||||
GenerateHtmlFile();
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
private void GenerateFiles(string outputExtension)
|
||||
{
|
||||
// Step 1: Delete any images that have _output in the name
|
||||
var outputFiles = Directory.GetFiles(_testDirectory, "*_output.*");
|
||||
foreach (var file in outputFiles)
|
||||
{
|
||||
File.Delete(file);
|
||||
}
|
||||
|
||||
// Step 2: Scan the _testDirectory for images
|
||||
var imageFiles = Directory.GetFiles(_testDirectory, "*.*")
|
||||
.Where(file => !file.EndsWith("html"))
|
||||
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||
.ToList();
|
||||
|
||||
// Step 3: Process each image
|
||||
foreach (var imagePath in imageFiles)
|
||||
{
|
||||
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||
var dims = CoverImageSize.Default.GetDimensions();
|
||||
using var sourceImage = Image.NewFromFile(imagePath, false, Enums.Access.SequentialUnbuffered);
|
||||
|
||||
var size = ImageService.GetSizeForDimensions(sourceImage, dims.Width, dims.Height);
|
||||
var crop = ImageService.GetCropForDimensions(sourceImage, dims.Width, dims.Height);
|
||||
|
||||
using var thumbnail = Image.Thumbnail(imagePath, dims.Width, dims.Height,
|
||||
size: size,
|
||||
crop: crop);
|
||||
|
||||
var outputFileName = fileName + outputExtension + ".png";
|
||||
thumbnail.WriteToFile(Path.Join(_testDirectory, outputFileName));
|
||||
}
|
||||
}
|
||||
|
||||
private void GenerateHtmlFile()
|
||||
{
|
||||
var imageFiles = Directory.GetFiles(_testDirectory, "*.*")
|
||||
.Where(file => !file.EndsWith("html"))
|
||||
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||
.ToList();
|
||||
|
||||
var htmlBuilder = new StringBuilder();
|
||||
htmlBuilder.AppendLine("<!DOCTYPE html>");
|
||||
htmlBuilder.AppendLine("<html lang=\"en\">");
|
||||
htmlBuilder.AppendLine("<head>");
|
||||
htmlBuilder.AppendLine("<meta charset=\"UTF-8\">");
|
||||
htmlBuilder.AppendLine("<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">");
|
||||
htmlBuilder.AppendLine("<title>Image Comparison</title>");
|
||||
htmlBuilder.AppendLine("<style>");
|
||||
htmlBuilder.AppendLine("body { font-family: Arial, sans-serif; }");
|
||||
htmlBuilder.AppendLine(".container { display: flex; flex-wrap: wrap; }");
|
||||
htmlBuilder.AppendLine(".image-row { display: flex; align-items: center; margin-bottom: 20px; width: 100% }");
|
||||
htmlBuilder.AppendLine(".image-row img { margin-right: 10px; max-width: 200px; height: auto; }");
|
||||
htmlBuilder.AppendLine("</style>");
|
||||
htmlBuilder.AppendLine("</head>");
|
||||
htmlBuilder.AppendLine("<body>");
|
||||
htmlBuilder.AppendLine("<div class=\"container\">");
|
||||
|
||||
foreach (var imagePath in imageFiles)
|
||||
{
|
||||
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||
var baselinePath = Path.Combine(_testDirectory, fileName + "_baseline.png");
|
||||
var outputPath = Path.Combine(_testDirectory, fileName + "_output.png");
|
||||
var dims = CoverImageSize.Default.GetDimensions();
|
||||
|
||||
using var sourceImage = Image.NewFromFile(imagePath, false, Enums.Access.SequentialUnbuffered);
|
||||
htmlBuilder.AppendLine("<div class=\"image-row\">");
|
||||
htmlBuilder.AppendLine($"<p>{fileName} ({((double) sourceImage.Width / sourceImage.Height).ToString("F2")}) - {ImageService.WillScaleWell(sourceImage, dims.Width, dims.Height)}</p>");
|
||||
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(imagePath)}\" alt=\"{fileName}\">");
|
||||
if (File.Exists(baselinePath))
|
||||
{
|
||||
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(baselinePath)}\" alt=\"{fileName} baseline\">");
|
||||
}
|
||||
if (File.Exists(outputPath))
|
||||
{
|
||||
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(outputPath)}\" alt=\"{fileName} output\">");
|
||||
}
|
||||
htmlBuilder.AppendLine("</div>");
|
||||
}
|
||||
|
||||
htmlBuilder.AppendLine("</div>");
|
||||
htmlBuilder.AppendLine("</body>");
|
||||
htmlBuilder.AppendLine("</html>");
|
||||
|
||||
File.WriteAllText(Path.Combine(_testDirectory, "index.html"), htmlBuilder.ToString());
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public void TestColorScapes()
|
||||
{
|
||||
// Step 1: Delete any images that have _output in the name
|
||||
var outputFiles = Directory.GetFiles(_testDirectoryColorScapes, "*_output.*");
|
||||
foreach (var file in outputFiles)
|
||||
{
|
||||
File.Delete(file);
|
||||
}
|
||||
|
||||
// Step 2: Scan the _testDirectory for images
|
||||
var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
|
||||
.Where(file => !file.EndsWith("html"))
|
||||
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||
.ToList();
|
||||
|
||||
// Step 3: Process each image
|
||||
foreach (var imagePath in imageFiles)
|
||||
{
|
||||
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||
var colors = ImageService.CalculateColorScape(imagePath);
|
||||
|
||||
// Generate primary color image
|
||||
GenerateColorImage(colors.Primary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png"));
|
||||
|
||||
// Generate secondary color image
|
||||
GenerateColorImage(colors.Secondary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png"));
|
||||
}
|
||||
|
||||
// Step 4: Generate HTML file
|
||||
GenerateHtmlFileForColorScape();
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
private static void GenerateColorImage(string hexColor, string outputPath)
|
||||
{
|
||||
var color = ImageService.HexToRgb(hexColor);
|
||||
using var colorImage = Image.Black(200, 100);
|
||||
using var output = colorImage + new[] { color.R / 255.0, color.G / 255.0, color.B / 255.0 };
|
||||
output.WriteToFile(outputPath);
|
||||
}
|
||||
|
||||
private void GenerateHtmlFileForColorScape()
|
||||
{
|
||||
var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
|
||||
.Where(file => !file.EndsWith("html"))
|
||||
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||
.ToList();
|
||||
|
||||
var htmlBuilder = new StringBuilder();
|
||||
htmlBuilder.AppendLine("<!DOCTYPE html>");
|
||||
htmlBuilder.AppendLine("<html lang=\"en\">");
|
||||
htmlBuilder.AppendLine("<head>");
|
||||
htmlBuilder.AppendLine("<meta charset=\"UTF-8\">");
|
||||
htmlBuilder.AppendLine("<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">");
|
||||
htmlBuilder.AppendLine("<title>Color Scape Comparison</title>");
|
||||
htmlBuilder.AppendLine("<style>");
|
||||
htmlBuilder.AppendLine("body { font-family: Arial, sans-serif; }");
|
||||
htmlBuilder.AppendLine(".container { display: flex; flex-wrap: wrap; }");
|
||||
htmlBuilder.AppendLine(".image-row { display: flex; align-items: center; margin-bottom: 20px; width: 100% }");
|
||||
htmlBuilder.AppendLine(".image-row img { margin-right: 10px; max-width: 200px; height: auto; }");
|
||||
htmlBuilder.AppendLine(".color-square { width: 100px; height: 100px; margin-right: 10px; }");
|
||||
htmlBuilder.AppendLine("</style>");
|
||||
htmlBuilder.AppendLine("</head>");
|
||||
htmlBuilder.AppendLine("<body>");
|
||||
htmlBuilder.AppendLine("<div class=\"container\">");
|
||||
|
||||
foreach (var imagePath in imageFiles)
|
||||
{
|
||||
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||
var primaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png");
|
||||
var secondaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png");
|
||||
|
||||
htmlBuilder.AppendLine("<div class=\"image-row\">");
|
||||
htmlBuilder.AppendLine($"<p>{fileName}</p>");
|
||||
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(imagePath)}\" alt=\"{fileName}\">");
|
||||
if (File.Exists(primaryPath))
|
||||
{
|
||||
htmlBuilder.AppendLine($"<img class=\"color-square\" src=\"./{Path.GetFileName(primaryPath)}\" alt=\"{fileName} primary color\">");
|
||||
}
|
||||
if (File.Exists(secondaryPath))
|
||||
{
|
||||
htmlBuilder.AppendLine($"<img class=\"color-square\" src=\"./{Path.GetFileName(secondaryPath)}\" alt=\"{fileName} secondary color\">");
|
||||
}
|
||||
htmlBuilder.AppendLine("</div>");
|
||||
}
|
||||
|
||||
htmlBuilder.AppendLine("</div>");
|
||||
htmlBuilder.AppendLine("</body>");
|
||||
htmlBuilder.AppendLine("</html>");
|
||||
|
||||
File.WriteAllText(Path.Combine(_testDirectoryColorScapes, "colorscape_index.html"), htmlBuilder.ToString());
|
||||
}
|
||||
}
|
|
@ -1,37 +1,41 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.Common;
|
||||
using System.IO;
|
||||
using System.IO.Abstractions;
|
||||
using System.IO.Abstractions.TestingHelpers;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using API.SignalR;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using API.Tests.Helpers;
|
||||
using Hangfire;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
internal class MockReadingItemService : IReadingItemService
|
||||
public class MockReadingItemService : IReadingItemService
|
||||
{
|
||||
private readonly IDefaultParser _defaultParser;
|
||||
private readonly BasicParser _basicParser;
|
||||
private readonly ComicVineParser _comicVineParser;
|
||||
private readonly ImageParser _imageParser;
|
||||
private readonly BookParser _bookParser;
|
||||
private readonly PdfParser _pdfParser;
|
||||
|
||||
public MockReadingItemService(IDefaultParser defaultParser)
|
||||
public MockReadingItemService(IDirectoryService directoryService, IBookService bookService)
|
||||
{
|
||||
_defaultParser = defaultParser;
|
||||
_imageParser = new ImageParser(directoryService);
|
||||
_basicParser = new BasicParser(directoryService, _imageParser);
|
||||
_bookParser = new BookParser(directoryService, bookService, _basicParser);
|
||||
_comicVineParser = new ComicVineParser(directoryService);
|
||||
_pdfParser = new PdfParser(directoryService);
|
||||
}
|
||||
|
||||
public ComicInfo GetComicInfo(string filePath)
|
||||
|
@ -54,99 +58,57 @@ internal class MockReadingItemService : IReadingItemService
|
|||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public ParserInfo Parse(string path, string rootPath, LibraryType type)
|
||||
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
return _defaultParser.Parse(path, rootPath, type);
|
||||
if (_comicVineParser.IsApplicable(path, type))
|
||||
{
|
||||
return _comicVineParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_imageParser.IsApplicable(path, type))
|
||||
{
|
||||
return _imageParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_bookParser.IsApplicable(path, type))
|
||||
{
|
||||
return _bookParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_pdfParser.IsApplicable(path, type))
|
||||
{
|
||||
return _pdfParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
if (_basicParser.IsApplicable(path, type))
|
||||
{
|
||||
return _basicParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
|
||||
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
return _defaultParser.Parse(path, rootPath, type);
|
||||
return Parse(path, rootPath, libraryRoot, type);
|
||||
}
|
||||
}
|
||||
|
||||
public class ParseScannedFilesTests
|
||||
public class ParseScannedFilesTests : AbstractDbTest
|
||||
{
|
||||
private readonly ILogger<ParseScannedFiles> _logger = Substitute.For<ILogger<ParseScannedFiles>>();
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ScannerHelper _scannerHelper;
|
||||
|
||||
private readonly DbConnection _connection;
|
||||
private readonly DataContext _context;
|
||||
|
||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
||||
private const string DataDirectory = "C:/data/";
|
||||
|
||||
public ParseScannedFilesTests()
|
||||
public ParseScannedFilesTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
var contextOptions = new DbContextOptionsBuilder()
|
||||
.UseSqlite(CreateInMemoryDatabase())
|
||||
.Options;
|
||||
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
||||
|
||||
_context = new DataContext(contextOptions);
|
||||
Task.Run(SeedDb).GetAwaiter().GetResult();
|
||||
|
||||
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
|
||||
|
||||
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
|
||||
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||
_scannerHelper = new ScannerHelper(_unitOfWork, testOutputHelper);
|
||||
}
|
||||
|
||||
#region Setup
|
||||
|
||||
private static DbConnection CreateInMemoryDatabase()
|
||||
{
|
||||
var connection = new SqliteConnection("Filename=:memory:");
|
||||
|
||||
connection.Open();
|
||||
|
||||
return connection;
|
||||
}
|
||||
|
||||
private async Task<bool> SeedDb()
|
||||
{
|
||||
await _context.Database.MigrateAsync();
|
||||
var filesystem = CreateFileSystem();
|
||||
|
||||
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
||||
|
||||
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
||||
setting.Value = CacheDirectory;
|
||||
|
||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
||||
setting.Value = BackupDirectory;
|
||||
|
||||
_context.ServerSetting.Update(setting);
|
||||
|
||||
_context.Library.Add(new LibraryBuilder("Manga")
|
||||
.WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
|
||||
.Build());
|
||||
return await _context.SaveChangesAsync() > 0;
|
||||
}
|
||||
|
||||
private async Task ResetDB()
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
_context.Series.RemoveRange(_context.Series.ToList());
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
private static MockFileSystem CreateFileSystem()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
||||
fileSystem.AddDirectory("C:/kavita/config/");
|
||||
fileSystem.AddDirectory(CacheDirectory);
|
||||
fileSystem.AddDirectory(CoverImageDirectory);
|
||||
fileSystem.AddDirectory(BackupDirectory);
|
||||
fileSystem.AddDirectory(DataDirectory);
|
||||
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region MergeName
|
||||
|
||||
// NOTE: I don't think I can test MergeName as it relies on Tracking Files, which is more complicated than I need
|
||||
|
@ -219,48 +181,45 @@ public class ParseScannedFilesTests
|
|||
|
||||
#region ScanLibrariesForSeries
|
||||
|
||||
/// <summary>
|
||||
/// Test that when a folder has 2 series with a localizedSeries, they combine into one final series
|
||||
/// </summary>
|
||||
// [Fact]
|
||||
// public async Task ScanLibrariesForSeries_ShouldCombineSeries()
|
||||
// {
|
||||
// // TODO: Implement these unit tests
|
||||
// }
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrariesForSeries_ShouldFindFiles()
|
||||
{
|
||||
var fileSystem = new MockFileSystem();
|
||||
fileSystem.AddDirectory("C:/Data/");
|
||||
fileSystem.AddFile("C:/Data/Accel World v1.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile("C:/Data/Accel World v2.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile("C:/Data/Accel World v2.pdf", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile("C:/Data/Nothing.pdf", new MockFileData(string.Empty));
|
||||
fileSystem.AddDirectory(Root + "Data/");
|
||||
fileSystem.AddFile(Root + "Data/Accel World v1.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile(Root + "Data/Accel World v2.cbz", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile(Root + "Data/Accel World v2.pdf", new MockFileData(string.Empty));
|
||||
fileSystem.AddFile(Root + "Data/Nothing.pdf", new MockFileData(string.Empty));
|
||||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
|
||||
Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
|
||||
{
|
||||
var skippedScan = parsedInfo.Item1;
|
||||
var parsedFiles = parsedInfo.Item2;
|
||||
if (parsedFiles.Count == 0) return Task.CompletedTask;
|
||||
|
||||
var foundParsedSeries = new ParsedSeries()
|
||||
{
|
||||
Name = parsedFiles.First().Series,
|
||||
NormalizedName = parsedFiles.First().Series.ToNormalized(),
|
||||
Format = parsedFiles.First().Format
|
||||
};
|
||||
|
||||
parsedSeries.Add(foundParsedSeries, parsedFiles);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
var library =
|
||||
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
|
||||
library.Type = LibraryType.Manga;
|
||||
await psf.ScanLibrariesForSeries(library, new List<string>() {"C:/Data/"}, false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), TrackFiles);
|
||||
var parsedSeries = await psf.ScanLibrariesForSeries(library, new List<string>() {Root + "Data/"}, false,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(1));
|
||||
|
||||
|
||||
Assert.Equal(3, parsedSeries.Values.Count);
|
||||
Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
|
||||
// Assert.Equal(3, parsedSeries.Values.Count);
|
||||
// Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
|
||||
|
||||
Assert.Equal(3, parsedSeries.Count);
|
||||
Assert.NotEmpty(parsedSeries.Select(p => p.ParsedSeries).Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
@ -289,18 +248,16 @@ public class ParseScannedFilesTests
|
|||
var fileSystem = CreateTestFilesystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var directoriesSeen = new HashSet<string>();
|
||||
var library =
|
||||
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
await psf.ProcessFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),
|
||||
(files, directoryPath) =>
|
||||
var scanResults = await psf.ScanFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
directoriesSeen.Add(directoryPath);
|
||||
return Task.CompletedTask;
|
||||
}, library);
|
||||
directoriesSeen.Add(scanResult.Folder);
|
||||
}
|
||||
|
||||
Assert.Equal(2, directoriesSeen.Count);
|
||||
}
|
||||
|
@ -311,16 +268,20 @@ public class ParseScannedFilesTests
|
|||
var fileSystem = CreateTestFilesystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
|
||||
var directoriesSeen = new HashSet<string>();
|
||||
await psf.ProcessFiles("C:/Data/", false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),
|
||||
(files, directoryPath) =>
|
||||
var scanResults = await psf.ScanFiles("C:/Data/", false,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
directoriesSeen.Add(directoryPath);
|
||||
return Task.CompletedTask;
|
||||
}, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes));
|
||||
directoriesSeen.Add(scanResult.Folder);
|
||||
}
|
||||
|
||||
Assert.Single(directoriesSeen);
|
||||
directoriesSeen.TryGetValue("C:/Data/", out var actual);
|
||||
|
@ -342,18 +303,14 @@ public class ParseScannedFilesTests
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var callCount = 0;
|
||||
await psf.ProcessFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),(files, folderPath) =>
|
||||
{
|
||||
callCount++;
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
var scanResults = await psf.ScanFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes));
|
||||
|
||||
Assert.Equal(2, callCount);
|
||||
Assert.Equal(2, scanResults.Count);
|
||||
}
|
||||
|
||||
|
||||
|
@ -375,18 +332,235 @@ public class ParseScannedFilesTests
|
|||
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var callCount = 0;
|
||||
await psf.ProcessFiles("C:/Data", false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),(files, folderPath) =>
|
||||
{
|
||||
callCount++;
|
||||
return Task.CompletedTask;
|
||||
}, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes));
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||
Assert.NotNull(library);
|
||||
var scanResults = await psf.ScanFiles("C:/Data", false,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||
|
||||
Assert.Equal(1, callCount);
|
||||
Assert.Single(scanResults);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
// TODO: Add back in (removed for Hotfix v0.8.5.x)
|
||||
//[Fact]
|
||||
public async Task HasSeriesFolderNotChangedSinceLastScan_AllSeriesFoldersHaveChanges()
|
||||
{
|
||||
const string testcase = "Subfolders always scanning all series changes - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||
|
||||
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||
Assert.Single(frieren.Volumes);
|
||||
|
||||
var executionerAndHerWayOfLife = postLib.Series.First(x => x.Name == "The Executioner and Her Way of Life");
|
||||
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
|
||||
|
||||
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||
|
||||
// Add a new chapter to a volume of the series, and scan. Validate that only, and all directories of this
|
||||
// series are marked as HasChanged
|
||||
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "The Executioner and Her Way of Life"),
|
||||
"The Executioner and Her Way of Life Vol. 1");
|
||||
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0001.cbz"),
|
||||
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
|
||||
|
||||
// 4 series, of which 2 have volumes as directories
|
||||
var folderMap = await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id);
|
||||
Assert.Equal(6, folderMap.Count);
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true, folderMap, postLib);
|
||||
var changes = res.Where(sc => sc.HasChanged).ToList();
|
||||
Assert.Equal(2, changes.Count);
|
||||
// Only volumes of The Executioner and Her Way of Life should be marked as HasChanged (Spice and Wolf also has 2 volumes dirs)
|
||||
Assert.Equal(2, changes.Count(sc => sc.Folder.Contains("The Executioner and Her Way of Life")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HasSeriesFolderNotChangedSinceLastScan_PublisherLayout()
|
||||
{
|
||||
const string testcase = "Subfolder always scanning fix publisher layout - Comic.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||
|
||||
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||
Assert.Equal(2, frieren.Volumes.Count);
|
||||
|
||||
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||
|
||||
// Add a volume to a series, and scan. Ensure only this series is marked as HasChanged
|
||||
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "YenPress"), "The Executioner and Her Way of Life");
|
||||
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1.cbz"),
|
||||
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 2.cbz"));
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
var changes = res.Count(sc => sc.HasChanged);
|
||||
Assert.Equal(1, changes);
|
||||
}
|
||||
|
||||
// TODO: Add back in (removed for Hotfix v0.8.5.x)
|
||||
//[Fact]
|
||||
public async Task SubFoldersNoSubFolders_SkipAll()
|
||||
{
|
||||
const string testcase = "Subfolders and files at root - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
// Needs to be actual time as the write time is now, so if we set LastFolderChecked in the past
|
||||
// it'll always a scan as it was changed since the last scan.
|
||||
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
Assert.DoesNotContain(res, sc => sc.HasChanged);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
|
||||
{
|
||||
const string testcase = "Subfolders and files at root - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
|
||||
_context.Series.Update(spiceAndWolf);
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
// Add file at series root
|
||||
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
|
||||
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
|
||||
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
var changes = res.Count(sc => sc.HasChanged);
|
||||
Assert.Equal(2, changes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
|
||||
{
|
||||
const string testcase = "Subfolders and files at root - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var fs = new FileSystem();
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||
|
||||
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
|
||||
_context.Series.Update(spiceAndWolf);
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
// Add file in subfolder
|
||||
var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
|
||||
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0011.cbz"),
|
||||
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
|
||||
|
||||
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||
var changes = res.Count(sc => sc.HasChanged);
|
||||
Assert.Equal(2, changes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,23 +1,8 @@
|
|||
using System.IO;
|
||||
using API.Data;
|
||||
using API.Data.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Metadata;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class ProcessSeriesTests
|
||||
{
|
||||
|
||||
// TODO: Implement
|
||||
|
||||
#region UpdateSeriesMetadata
|
||||
|
||||
|
|
|
@ -11,15 +11,11 @@ using API.DTOs.ReadingLists;
|
|||
using API.DTOs.ReadingLists.CBL;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.Services.Tasks;
|
||||
using API.SignalR;
|
||||
using API.Tests.Helpers;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
@ -52,7 +48,9 @@ public class ReadingListServiceTests
|
|||
var mapper = config.CreateMapper();
|
||||
_unitOfWork = new UnitOfWork(_context, mapper, null!);
|
||||
|
||||
_readingListService = new ReadingListService(_unitOfWork, Substitute.For<ILogger<ReadingListService>>(), Substitute.For<IEventHub>());
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||
_readingListService = new ReadingListService(_unitOfWork, Substitute.For<ILogger<ReadingListService>>(),
|
||||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(), ds);
|
||||
|
||||
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(),
|
||||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
||||
|
@ -128,7 +126,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithAgeRating(AgeRating.Everyone)
|
||||
.Build()
|
||||
|
@ -177,7 +175,7 @@ public class ReadingListServiceTests
|
|||
.WithSeries(new SeriesBuilder("Test")
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithAgeRating(AgeRating.Everyone)
|
||||
.Build()
|
||||
|
@ -236,7 +234,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithAgeRating(AgeRating.Everyone)
|
||||
.Build()
|
||||
|
@ -296,7 +294,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithAgeRating(AgeRating.Everyone)
|
||||
.Build()
|
||||
|
@ -375,7 +373,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithAgeRating(AgeRating.Everyone)
|
||||
.Build()
|
||||
|
@ -432,7 +430,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithAgeRating(AgeRating.Everyone)
|
||||
.Build()
|
||||
|
@ -497,7 +495,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.Build()
|
||||
)
|
||||
|
@ -538,7 +536,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.Build()
|
||||
)
|
||||
|
@ -581,6 +579,93 @@ public class ReadingListServiceTests
|
|||
Assert.Equal(AgeRating.G, readingList.AgeRating);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateReadingListAgeRatingForSeries()
|
||||
{
|
||||
await ResetDb();
|
||||
var spiceAndWolf = new SeriesBuilder("Spice and Wolf")
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes([
|
||||
new VolumeBuilder("1")
|
||||
.WithChapters([
|
||||
new ChapterBuilder("1").Build(),
|
||||
new ChapterBuilder("2").Build(),
|
||||
]).Build()
|
||||
]).Build();
|
||||
spiceAndWolf.Metadata.AgeRating = AgeRating.Everyone;
|
||||
|
||||
var othersidePicnic = new SeriesBuilder("Otherside Picnic ")
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes([
|
||||
new VolumeBuilder("1")
|
||||
.WithChapters([
|
||||
new ChapterBuilder("1").Build(),
|
||||
new ChapterBuilder("2").Build(),
|
||||
]).Build()
|
||||
]).Build();
|
||||
othersidePicnic.Metadata.AgeRating = AgeRating.Everyone;
|
||||
|
||||
_context.AppUser.Add(new AppUser()
|
||||
{
|
||||
UserName = "Amelia",
|
||||
ReadingLists = new List<ReadingList>(),
|
||||
Libraries = new List<Library>
|
||||
{
|
||||
new LibraryBuilder("Test Library", LibraryType.LightNovel)
|
||||
.WithSeries(spiceAndWolf)
|
||||
.WithSeries(othersidePicnic)
|
||||
.Build(),
|
||||
},
|
||||
});
|
||||
|
||||
await _context.SaveChangesAsync();
|
||||
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("Amelia", AppUserIncludes.ReadingLists);
|
||||
Assert.NotNull(user);
|
||||
|
||||
var myTestReadingList = new ReadingListBuilder("MyReadingList").Build();
|
||||
var mySecondTestReadingList = new ReadingListBuilder("MySecondReadingList").Build();
|
||||
var myThirdTestReadingList = new ReadingListBuilder("MyThirdReadingList").Build();
|
||||
user.ReadingLists = new List<ReadingList>()
|
||||
{
|
||||
myTestReadingList,
|
||||
mySecondTestReadingList,
|
||||
myThirdTestReadingList,
|
||||
};
|
||||
|
||||
|
||||
await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List<int> {1, 2}, myTestReadingList);
|
||||
await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List<int> {3, 4}, myTestReadingList);
|
||||
await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List<int> {1, 2}, myThirdTestReadingList);
|
||||
await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List<int> {3, 4}, mySecondTestReadingList);
|
||||
|
||||
|
||||
_unitOfWork.UserRepository.Update(user);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
await _readingListService.CalculateReadingListAgeRating(myTestReadingList);
|
||||
await _readingListService.CalculateReadingListAgeRating(mySecondTestReadingList);
|
||||
Assert.Equal(AgeRating.Everyone, myTestReadingList.AgeRating);
|
||||
Assert.Equal(AgeRating.Everyone, mySecondTestReadingList.AgeRating);
|
||||
Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
|
||||
|
||||
await _readingListService.UpdateReadingListAgeRatingForSeries(othersidePicnic.Id, AgeRating.Mature);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Reading lists containing Otherside Picnic are updated
|
||||
myTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
|
||||
Assert.NotNull(myTestReadingList);
|
||||
Assert.Equal(AgeRating.Mature, myTestReadingList.AgeRating);
|
||||
|
||||
mySecondTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(2);
|
||||
Assert.NotNull(mySecondTestReadingList);
|
||||
Assert.Equal(AgeRating.Mature, mySecondTestReadingList.AgeRating);
|
||||
|
||||
// Unrelated reading list is not updated
|
||||
myThirdTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(3);
|
||||
Assert.NotNull(myThirdTestReadingList);
|
||||
Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CalculateStartAndEndDates
|
||||
|
@ -593,7 +678,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.Build()
|
||||
)
|
||||
|
@ -645,7 +730,7 @@ public class ReadingListServiceTests
|
|||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.WithVolumes(new List<Volume>()
|
||||
{
|
||||
new VolumeBuilder("0")
|
||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1")
|
||||
.WithReleaseDate(new DateTime(2005, 03, 01))
|
||||
.Build()
|
||||
|
@ -711,6 +796,9 @@ public class ReadingListServiceTests
|
|||
Assert.Equal("Issue #1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", "1", "The Title")));
|
||||
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", chapterTitleName: "The Title")));
|
||||
Assert.Equal("The Title", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterTitleName: "The Title")));
|
||||
var dto = CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterNumber: "The Special Title");
|
||||
dto.IsSpecial = true;
|
||||
Assert.Equal("The Special Title", ReadingListService.FormatTitle(dto));
|
||||
|
||||
// Book Library & Archive
|
||||
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Book, "1")));
|
||||
|
@ -736,8 +824,8 @@ public class ReadingListServiceTests
|
|||
}
|
||||
|
||||
private static ReadingListItemDto CreateListItemDto(MangaFormat seriesFormat, LibraryType libraryType,
|
||||
string volumeNumber = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
|
||||
string chapterNumber = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||
string volumeNumber = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
|
||||
string chapterNumber =API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||
string chapterTitleName = "")
|
||||
{
|
||||
return new ReadingListItemDto()
|
||||
|
@ -1205,6 +1293,65 @@ public class ReadingListServiceTests
|
|||
Assert.Equal(2, createdList.Items.First(item => item.Order == 2).ChapterId);
|
||||
Assert.Equal(4, createdList.Items.First(item => item.Order == 3).ChapterId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This test is about ensuring Annuals that are a separate series can be linked up properly (ComicVine)
|
||||
/// </summary>
|
||||
//[Fact]
|
||||
public async Task CreateReadingListFromCBL_ShouldCreateList_WithAnnuals()
|
||||
{
|
||||
// TODO: Implement this correctly
|
||||
await ResetDb();
|
||||
var cblReadingList = LoadCblFromPath("Annual.cbl");
|
||||
|
||||
// Mock up our series
|
||||
var fablesSeries = new SeriesBuilder("Fables")
|
||||
.WithVolume(new VolumeBuilder("2002")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder("2").Build())
|
||||
.WithChapter(new ChapterBuilder("3").Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
var fables2Series = new SeriesBuilder("Fables Annual")
|
||||
.WithVolume(new VolumeBuilder("2003")
|
||||
.WithMinNumber(1)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.Build())
|
||||
.Build();
|
||||
|
||||
_context.AppUser.Add(new AppUser()
|
||||
{
|
||||
UserName = "majora2007",
|
||||
ReadingLists = new List<ReadingList>(),
|
||||
Libraries = new List<Library>()
|
||||
{
|
||||
new LibraryBuilder("Test LIb 2", LibraryType.Book)
|
||||
.WithSeries(fablesSeries)
|
||||
.WithSeries(fables2Series)
|
||||
.Build()
|
||||
},
|
||||
});
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var importSummary = await _readingListService.CreateReadingListFromCbl(1, cblReadingList);
|
||||
|
||||
Assert.Equal(CblImportResult.Success, importSummary.Success);
|
||||
Assert.NotEmpty(importSummary.Results);
|
||||
|
||||
var createdList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
|
||||
|
||||
Assert.NotNull(createdList);
|
||||
Assert.Equal("Annual", createdList.Title);
|
||||
|
||||
Assert.Equal(4, createdList.Items.Count);
|
||||
Assert.Equal(1, createdList.Items.First(item => item.Order == 0).ChapterId);
|
||||
Assert.Equal(2, createdList.Items.First(item => item.Order == 1).ChapterId);
|
||||
Assert.Equal(4, createdList.Items.First(item => item.Order == 2).ChapterId);
|
||||
Assert.Equal(3, createdList.Items.First(item => item.Order == 3).ChapterId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CreateReadingListsFromSeries
|
||||
|
@ -1239,7 +1386,7 @@ public class ReadingListServiceTests
|
|||
|
||||
var series2 = new SeriesBuilder("Series 2")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("1").Build())
|
||||
.WithChapter(new ChapterBuilder("2").Build())
|
||||
.Build())
|
||||
|
|
|
@ -1,71 +1,941 @@
|
|||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data.Metadata;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.Metadata;
|
||||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Tasks;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using API.Tests.Helpers;
|
||||
using Hangfire;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class ScannerServiceTests
|
||||
public class ScannerServiceTests : AbstractDbTest
|
||||
{
|
||||
[Fact]
|
||||
public void FindSeriesNotOnDisk_Should_Remove1()
|
||||
private readonly ITestOutputHelper _testOutputHelper;
|
||||
private readonly ScannerHelper _scannerHelper;
|
||||
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
|
||||
|
||||
public ScannerServiceTests(ITestOutputHelper testOutputHelper)
|
||||
{
|
||||
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
_testOutputHelper = testOutputHelper;
|
||||
|
||||
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
|
||||
//AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
|
||||
// Set up Hangfire to use in-memory storage for testing
|
||||
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||
_scannerHelper = new ScannerHelper(_unitOfWork, testOutputHelper);
|
||||
}
|
||||
|
||||
var existingSeries = new List<Series>
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
_context.Library.RemoveRange(_context.Library);
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
|
||||
protected async Task SetAllSeriesLastScannedInThePast(Library library, TimeSpan? duration = null)
|
||||
{
|
||||
foreach (var series in library.Series)
|
||||
{
|
||||
new SeriesBuilder("Darker Than Black")
|
||||
.WithFormat(MangaFormat.Epub)
|
||||
await SetLastScannedInThePast(series, duration, false);
|
||||
}
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithName("1")
|
||||
.Build())
|
||||
.WithLocalizedName("Darker Than Black")
|
||||
.Build()
|
||||
};
|
||||
protected async Task SetLastScannedInThePast(Series series, TimeSpan? duration = null, bool save = true)
|
||||
{
|
||||
duration ??= TimeSpan.FromMinutes(2);
|
||||
series.LastFolderScanned = DateTime.Now.Subtract(duration.Value);
|
||||
_context.Series.Update(series);
|
||||
|
||||
Assert.Single(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
|
||||
if (save)
|
||||
{
|
||||
await _context.SaveChangesAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
|
||||
public async Task ScanLibrary_ComicVine_PublisherFolder()
|
||||
{
|
||||
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
var testcase = "Publisher - ComicVine.json";
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive});
|
||||
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive});
|
||||
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive});
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
}
|
||||
|
||||
var existingSeries = new List<Series>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_ShouldCombineNestedFolder()
|
||||
{
|
||||
var testcase = "Series and Series-Series Combined - Manga.json";
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(2, postLib.Series.First().Volumes.Count);
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_FlatSeries()
|
||||
{
|
||||
const string testcase = "Flat Series - Manga.json";
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(3, postLib.Series.First().Volumes.Count);
|
||||
|
||||
// TODO: Trigger a deletion of ch 10
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_FlatSeriesWithSpecialFolder()
|
||||
{
|
||||
const string testcase = "Flat Series with Specials Folder Alt Naming - Manga.json";
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(4, postLib.Series.First().Volumes.Count);
|
||||
Assert.NotNull(postLib.Series.First().Volumes.FirstOrDefault(v => v.Chapters.FirstOrDefault(c => c.IsSpecial) != null));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_FlatSeriesWithSpecialFolder_AlternativeNaming()
|
||||
{
|
||||
const string testcase = "Flat Series with Specials Folder Alt Naming - Manga.json";
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(4, postLib.Series.First().Volumes.Count);
|
||||
Assert.NotNull(postLib.Series.First().Volumes.FirstOrDefault(v => v.Chapters.FirstOrDefault(c => c.IsSpecial) != null));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_FlatSeriesWithSpecial()
|
||||
{
|
||||
const string testcase = "Flat Special - Manga.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(3, postLib.Series.First().Volumes.Count);
|
||||
Assert.NotNull(postLib.Series.First().Volumes.FirstOrDefault(v => v.Chapters.FirstOrDefault(c => c.IsSpecial) != null));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_SeriesWithUnbalancedParenthesis()
|
||||
{
|
||||
const string testcase = "Scan Library Parses as ( - Manga.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var series = postLib.Series.First();
|
||||
|
||||
Assert.Equal("Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", series.Name);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This is testing that if the first file is named A and has a localized name of B if all other files are named B, it should still group and name the series A
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_LocalizedSeries()
|
||||
{
|
||||
const string testcase = "Series with Localized - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("My Dress-Up Darling v01.cbz", new ComicInfo()
|
||||
{
|
||||
new SeriesBuilder("Cage of Eden")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
Series = "My Dress-Up Darling",
|
||||
LocalizedSeries = "Sono Bisque Doll wa Koi wo Suru"
|
||||
});
|
||||
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithName("1")
|
||||
.Build())
|
||||
.WithLocalizedName("Darker Than Black")
|
||||
.Build(),
|
||||
new SeriesBuilder("Darker Than Black")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithName("1")
|
||||
.Build())
|
||||
.WithLocalizedName("Darker Than Black")
|
||||
.Build(),
|
||||
};
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(3, postLib.Series.First().Volumes.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_LocalizedSeries2()
|
||||
{
|
||||
const string testcase = "Series with Localized 2 - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("Immoral Guild v01.cbz", new ComicInfo()
|
||||
{
|
||||
Series = "Immoral Guild",
|
||||
LocalizedSeries = "Futoku no Guild" // Filename has a capital N and localizedSeries has lowercase
|
||||
});
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var s = postLib.Series.First();
|
||||
Assert.Equal("Immoral Guild", s.Name);
|
||||
Assert.Equal("Futoku no Guild", s.LocalizedName);
|
||||
Assert.Equal(3, s.Volumes.Count);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Special Keywords shouldn't be removed from the series name and thus these 2 should group
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_ExtraShouldNotAffect()
|
||||
{
|
||||
const string testcase = "Series with Extra - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("Vol.01.cbz", new ComicInfo()
|
||||
{
|
||||
Series = "The Novel's Extra",
|
||||
});
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var s = postLib.Series.First();
|
||||
Assert.Equal("The Novel's Extra", s.Name);
|
||||
Assert.Equal(2, s.Volumes.Count);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Files under a folder with a SP marker should group into one issue
|
||||
/// </summary>
|
||||
/// <remarks>https://github.com/Kareadita/Kavita/issues/3299</remarks>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_ImageSeries_SpecialGrouping()
|
||||
{
|
||||
const string testcase = "Image Series with SP Folder - Manga.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(3, postLib.Series.First().Volumes.Count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This test is currently disabled because the Image parser is unable to support multiple files mapping into one single Special.
|
||||
/// https://github.com/Kareadita/Kavita/issues/3299
|
||||
/// </summary>
|
||||
public async Task ScanLibrary_ImageSeries_SpecialGrouping_NonEnglish()
|
||||
{
|
||||
const string testcase = "Image Series with SP Folder (Non English) - Image.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var series = postLib.Series.First();
|
||||
Assert.Equal(3, series.Volumes.Count);
|
||||
var specialVolume = series.Volumes.FirstOrDefault(v => v.Name == Parser.SpecialVolume);
|
||||
Assert.NotNull(specialVolume);
|
||||
Assert.Single(specialVolume.Chapters);
|
||||
Assert.True(specialVolume.Chapters.First().IsSpecial);
|
||||
//Assert.Equal("葬送のフリーレン 公式ファンブック SP01", specialVolume.Chapters.First().Title);
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_PublishersInheritFromChapters()
|
||||
{
|
||||
const string testcase = "Flat Special - Manga.json";
|
||||
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("Uzaki-chan Wants to Hang Out! v01 (2019) (Digital) (danke-Empire).cbz", new ComicInfo()
|
||||
{
|
||||
Publisher = "Correct Publisher"
|
||||
});
|
||||
infos.Add("Uzaki-chan Wants to Hang Out! - 2022 New Years Special SP01.cbz", new ComicInfo()
|
||||
{
|
||||
Publisher = "Special Publisher"
|
||||
});
|
||||
infos.Add("Uzaki-chan Wants to Hang Out! - Ch. 103 - Kouhai and Control.cbz", new ComicInfo()
|
||||
{
|
||||
Publisher = "Chapter Publisher"
|
||||
});
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var publishers = postLib.Series.First().Metadata.People
|
||||
.Where(p => p.Role == PersonRole.Publisher);
|
||||
Assert.Equal(3, publishers.Count());
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Tests that pdf parser handles the loose chapters correctly
|
||||
/// https://github.com/Kareadita/Kavita/issues/3148
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_LooseChapters_Pdf()
|
||||
{
|
||||
const string testcase = "PDF Comic Chapters - Comic.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var series = postLib.Series.First();
|
||||
Assert.Single(series.Volumes);
|
||||
Assert.Equal(4, series.Volumes.First().Chapters.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_LooseChapters_Pdf_LN()
|
||||
{
|
||||
const string testcase = "PDF Comic Chapters - LightNovel.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var series = postLib.Series.First();
|
||||
Assert.Single(series.Volumes);
|
||||
Assert.Equal(4, series.Volumes.First().Chapters.Count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This is the same as doing ScanFolder as the case where it can find the series is just ScanSeries
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ScanSeries_NewChapterInNestedFolder()
|
||||
{
|
||||
const string testcase = "Series with Localized - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("My Dress-Up Darling v01.cbz", new ComicInfo()
|
||||
{
|
||||
Series = "My Dress-Up Darling",
|
||||
LocalizedSeries = "Sono Bisque Doll wa Koi wo Suru"
|
||||
});
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var series = postLib.Series.First();
|
||||
Assert.Equal(3, series.Volumes.Count);
|
||||
|
||||
// Bootstrap a new file in the nested "Sono Bisque Doll wa Koi wo Suru" directory and perform a series scan
|
||||
var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(testcase));
|
||||
await _scannerHelper.Scaffold(testDirectory, ["My Dress-Up Darling/Sono Bisque Doll wa Koi wo Suru ch 11.cbz"]);
|
||||
|
||||
// Now that a new file exists in the subdirectory, scan again
|
||||
await scanner.ScanSeries(series.Id);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Equal(3, series.Volumes.Count);
|
||||
Assert.Equal(2, series.Volumes.First(v => v.MinNumber.Is(Parser.LooseLeafVolumeNumber)).Chapters.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_LocalizedSeries_MatchesFilename()
|
||||
{
|
||||
const string testcase = "Localized Name matches Filename - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("Futoku no Guild v01.cbz", new ComicInfo()
|
||||
{
|
||||
Series = "Immoral Guild",
|
||||
LocalizedSeries = "Futoku no Guild"
|
||||
});
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var s = postLib.Series.First();
|
||||
Assert.Equal("Immoral Guild", s.Name);
|
||||
Assert.Equal("Futoku no Guild", s.LocalizedName);
|
||||
Assert.Single(s.Volumes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_LocalizedSeries_MatchesFilename_SameNames()
|
||||
{
|
||||
const string testcase = "Localized Name matches Filename - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
infos.Add("Futoku no Guild v01.cbz", new ComicInfo()
|
||||
{
|
||||
Series = "Futoku no Guild",
|
||||
LocalizedSeries = "Futoku no Guild"
|
||||
});
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var s = postLib.Series.First();
|
||||
Assert.Equal("Futoku no Guild", s.Name);
|
||||
Assert.Equal("Futoku no Guild", s.LocalizedName);
|
||||
Assert.Single(s.Volumes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_ExcludePattern_Works()
|
||||
{
|
||||
const string testcase = "Exclude Pattern 1 - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
library.LibraryExcludePatterns = [new LibraryExcludePattern() {Pattern = "**/Extra/*"}];
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var s = postLib.Series.First();
|
||||
Assert.Equal(2, s.Volumes.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_ExcludePattern_FlippedSlashes_Works()
|
||||
{
|
||||
const string testcase = "Exclude Pattern 1 - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
library.LibraryExcludePatterns = [new LibraryExcludePattern() {Pattern = "**\\Extra\\*"}];
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
var s = postLib.Series.First();
|
||||
Assert.Equal(2, s.Volumes.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_MultipleRoots_MultipleScans_DataPersists_Forced()
|
||||
{
|
||||
const string testcase = "Multiple Roots - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
var testDirectoryPath =
|
||||
Path.Join(
|
||||
Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests"),
|
||||
testcase.Replace(".json", string.Empty));
|
||||
library.Folders =
|
||||
[
|
||||
new FolderPath() {Path = Path.Join(testDirectoryPath, "Root 1")},
|
||||
new FolderPath() {Path = Path.Join(testDirectoryPath, "Root 2")}
|
||||
];
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(2, postLib.Series.Count);
|
||||
var s = postLib.Series.First(s => s.Name == "Plush");
|
||||
Assert.Equal(2, s.Volumes.Count);
|
||||
var s2 = postLib.Series.First(s => s.Name == "Accel");
|
||||
Assert.Single(s2.Volumes);
|
||||
|
||||
// Make a change (copy a file into only 1 root)
|
||||
var root1PlushFolder = Path.Join(testDirectoryPath, "Root 1/Antarctic Press/Plush");
|
||||
File.Copy(Path.Join(root1PlushFolder, "Plush v02.cbz"), Path.Join(root1PlushFolder, "Plush v03.cbz"));
|
||||
|
||||
// Rescan to ensure nothing changes yet again
|
||||
await scanner.ScanLibrary(library.Id, true);
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.Equal(2, postLib.Series.Count);
|
||||
s = postLib.Series.First(s => s.Name == "Plush");
|
||||
Assert.Equal(3, s.Volumes.Count);
|
||||
s2 = postLib.Series.First(s => s.Name == "Accel");
|
||||
Assert.Single(s2.Volumes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Regression bug appeared where multi-root and one root gets a new file, on next scan of library,
|
||||
/// the series in the other root are deleted. (This is actually failing because the file in Root 1 isn't being detected)
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_MultipleRoots_MultipleScans_DataPersists_NonForced()
|
||||
{
|
||||
const string testcase = "Multiple Roots - Manga.json";
|
||||
|
||||
// Get the first file and generate a ComicInfo
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
var testDirectoryPath =
|
||||
Path.Join(
|
||||
Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests"),
|
||||
testcase.Replace(".json", string.Empty));
|
||||
library.Folders =
|
||||
[
|
||||
new FolderPath() {Path = Path.Join(testDirectoryPath, "Root 1")},
|
||||
new FolderPath() {Path = Path.Join(testDirectoryPath, "Root 2")}
|
||||
];
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(2, postLib.Series.Count);
|
||||
var s = postLib.Series.First(s => s.Name == "Plush");
|
||||
Assert.Equal(2, s.Volumes.Count);
|
||||
var s2 = postLib.Series.First(s => s.Name == "Accel");
|
||||
Assert.Single(s2.Volumes);
|
||||
|
||||
// Make a change (copy a file into only 1 root)
|
||||
var root1PlushFolder = Path.Join(testDirectoryPath, "Root 1/Antarctic Press/Plush");
|
||||
File.Copy(Path.Join(root1PlushFolder, "Plush v02.cbz"), Path.Join(root1PlushFolder, "Plush v03.cbz"));
|
||||
|
||||
// Emulate time passage by updating lastFolderScan to be a min in the past
|
||||
await SetLastScannedInThePast(s);
|
||||
|
||||
// Rescan to ensure nothing changes yet again
|
||||
await scanner.ScanLibrary(library.Id, false);
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.Equal(2, postLib.Series.Count);
|
||||
s = postLib.Series.First(s => s.Name == "Plush");
|
||||
Assert.Equal(3, s.Volumes.Count);
|
||||
s2 = postLib.Series.First(s => s.Name == "Accel");
|
||||
Assert.Single(s2.Volumes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_AlternatingRemoval_IssueReplication()
|
||||
{
|
||||
// https://github.com/Kareadita/Kavita/issues/3476#issuecomment-2661635558
|
||||
const string testcase = "Alternating Removal - Manga.json";
|
||||
|
||||
// Setup: Generate test library
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
var testDirectoryPath = Path.Combine(Directory.GetCurrentDirectory(),
|
||||
"../../../Services/Test Data/ScannerService/ScanTests",
|
||||
testcase.Replace(".json", string.Empty));
|
||||
|
||||
library.Folders =
|
||||
[
|
||||
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 1") },
|
||||
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 2") }
|
||||
];
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
|
||||
// First Scan: Everything should be added
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Accel");
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Plush");
|
||||
|
||||
// Second Scan: Remove Root 2, expect Accel to be removed
|
||||
library.Folders = [new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 1") }];
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Emulate time passage by updating lastFolderScan to be a min in the past
|
||||
foreach (var s in postLib.Series)
|
||||
{
|
||||
s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
|
||||
_context.Series.Update(s);
|
||||
}
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.DoesNotContain(postLib.Series, s => s.Name == "Accel"); // Ensure Accel is gone
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Plush");
|
||||
|
||||
// Third Scan: Re-add Root 2, Accel should come back
|
||||
library.Folders =
|
||||
[
|
||||
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 1") },
|
||||
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 2") }
|
||||
];
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
// Emulate time passage by updating lastFolderScan to be a min in the past
|
||||
foreach (var s in postLib.Series)
|
||||
{
|
||||
s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
|
||||
_context.Series.Update(s);
|
||||
}
|
||||
await _context.SaveChangesAsync();
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Accel"); // Accel should be back
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Plush");
|
||||
|
||||
// Emulate time passage by updating lastFolderScan to be a min in the past
|
||||
await SetAllSeriesLastScannedInThePast(postLib);
|
||||
|
||||
// Fourth Scan: Run again to check stability (should not remove Accel)
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Accel");
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Plush");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanLibrary_DeleteSeriesInUI_ComeBack()
|
||||
{
|
||||
const string testcase = "Delete Series In UI - Manga.json";
|
||||
|
||||
// Setup: Generate test library
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
|
||||
var testDirectoryPath = Path.Combine(Directory.GetCurrentDirectory(),
|
||||
"../../../Services/Test Data/ScannerService/ScanTests",
|
||||
testcase.Replace(".json", string.Empty));
|
||||
|
||||
library.Folders =
|
||||
[
|
||||
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 1") },
|
||||
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 2") }
|
||||
];
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
|
||||
// First Scan: Everything should be added
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Accel");
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Plush");
|
||||
|
||||
// Second Scan: Delete the Series
|
||||
library.Series = [];
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Empty(postLib.Series);
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Accel"); // Ensure Accel is gone
|
||||
Assert.Contains(postLib.Series, s => s.Name == "Plush");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubFolders_NoRemovals_ChangesFound()
|
||||
{
|
||||
const string testcase = "Subfolders always scanning all series changes - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||
Assert.Single(frieren.Volumes);
|
||||
Assert.Equal(2, frieren.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
var executionerAndHerWayOfLife = postLib.Series.First(x => x.Name == "The Executioner and Her Way of Life");
|
||||
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
|
||||
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
await SetAllSeriesLastScannedInThePast(postLib);
|
||||
|
||||
// Add a new chapter to a volume of the series, and scan. Validate that no chapters were lost, and the new
|
||||
// chapter was added
|
||||
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "The Executioner and Her Way of Life"),
|
||||
"The Executioner and Her Way of Life Vol. 1");
|
||||
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0001.cbz"),
|
||||
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(4, postLib.Series.Count);
|
||||
|
||||
spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||
Assert.Single(frieren.Volumes);
|
||||
Assert.Equal(2, frieren.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
executionerAndHerWayOfLife = postLib.Series.First(x => x.Name == "The Executioner and Her Way of Life");
|
||||
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
|
||||
Assert.Equal(3, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count)); // Incremented by 1
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemovalPickedUp_NoOtherChanges()
|
||||
{
|
||||
const string testcase = "Series removed when no other changes are made - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Equal(2, postLib.Series.Count);
|
||||
|
||||
var executionerCopyDir = Path.Join(testDirectoryPath, "The Executioner and Her Way of Life");
|
||||
Directory.Delete(executionerCopyDir, true);
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
Assert.Single(postLib.Series, s => s.Name == "Spice and Wolf");
|
||||
Assert.Equal(2, postLib.Series.First().Volumes.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubFoldersNoSubFolders_CorrectPickupAfterAdd()
|
||||
{
|
||||
// This test case is used in multiple tests and can result in conflict if not separated
|
||||
const string testcase = "Subfolders and files at root (2) - Manga.json";
|
||||
var infos = new Dictionary<string, ComicInfo>();
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||
var testDirectoryPath = library.Folders.First().Path;
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
await SetLastScannedInThePast(spiceAndWolf);
|
||||
|
||||
// Add volume to Spice and Wolf series directory
|
||||
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
|
||||
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
|
||||
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(5, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
await SetLastScannedInThePast(spiceAndWolf);
|
||||
|
||||
// Add file in subfolder
|
||||
spiceAndWolfDir = Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3");
|
||||
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0012.cbz"),
|
||||
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
|
||||
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
|
||||
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
Assert.Single(postLib.Series);
|
||||
|
||||
spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||
Assert.Equal(4, spiceAndWolf.Volumes.Count);
|
||||
Assert.Equal(6, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Ensure when Kavita scans, the sort order of chapters is correct
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ScanLibrary_SortOrderWorks()
|
||||
{
|
||||
const string testcase = "Sort Order - Manga.json";
|
||||
|
||||
var library = await _scannerHelper.GenerateScannerData(testcase);
|
||||
|
||||
|
||||
var scanner = _scannerHelper.CreateServices();
|
||||
await scanner.ScanLibrary(library.Id);
|
||||
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||
Assert.NotNull(postLib);
|
||||
|
||||
// Get the loose leaf volume and confirm each chapter aligns with expectation of Sort Order
|
||||
var series = postLib.Series.First();
|
||||
Assert.NotNull(series);
|
||||
|
||||
var volume = series.Volumes.FirstOrDefault();
|
||||
Assert.NotNull(volume);
|
||||
|
||||
var sortedChapters = volume.Chapters.OrderBy(c => c.SortOrder).ToList();
|
||||
Assert.True(sortedChapters[0].SortOrder.Is(1f));
|
||||
Assert.True(sortedChapters[1].SortOrder.Is(4f));
|
||||
Assert.True(sortedChapters[2].SortOrder.Is(5f));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,208 @@
|
|||
using API.Services.Plus;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.DTOs.Scrobbling;
|
||||
using API.Entities.Enums;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services;
|
||||
using API.Services.Plus;
|
||||
using API.SignalR;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
#nullable enable
|
||||
|
||||
public class ScrobblingServiceTests
|
||||
public class ScrobblingServiceTests : AbstractDbTest
|
||||
{
|
||||
private readonly ScrobblingService _service;
|
||||
private readonly ILicenseService _licenseService;
|
||||
private readonly ILocalizationService _localizationService;
|
||||
private readonly ILogger<ScrobblingService> _logger;
|
||||
private readonly IEmailService _emailService;
|
||||
|
||||
public ScrobblingServiceTests()
|
||||
{
|
||||
_licenseService = Substitute.For<ILicenseService>();
|
||||
_localizationService = Substitute.For<ILocalizationService>();
|
||||
_logger = Substitute.For<ILogger<ScrobblingService>>();
|
||||
_emailService = Substitute.For<IEmailService>();
|
||||
|
||||
_service = new ScrobblingService(_unitOfWork, Substitute.For<IEventHub>(), _logger, _licenseService, _localizationService, _emailService);
|
||||
}
|
||||
|
||||
protected override async Task ResetDb()
|
||||
{
|
||||
_context.ScrobbleEvent.RemoveRange(_context.ScrobbleEvent.ToList());
|
||||
_context.Series.RemoveRange(_context.Series.ToList());
|
||||
_context.Library.RemoveRange(_context.Library.ToList());
|
||||
_context.AppUser.RemoveRange(_context.AppUser.ToList());
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
private async Task SeedData()
|
||||
{
|
||||
var series = new SeriesBuilder("Test Series")
|
||||
.WithFormat(MangaFormat.Archive)
|
||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||
.Build();
|
||||
|
||||
var library = new LibraryBuilder("Test Library", LibraryType.Manga)
|
||||
.WithAllowScrobbling(true)
|
||||
.WithSeries(series)
|
||||
.Build();
|
||||
|
||||
|
||||
_context.Library.Add(library);
|
||||
|
||||
var user = new AppUserBuilder("testuser", "testuser")
|
||||
//.WithPreferences(new UserPreferencesBuilder().WithAniListScrobblingEnabled(true).Build())
|
||||
.Build();
|
||||
|
||||
user.UserPreferences.AniListScrobblingEnabled = true;
|
||||
|
||||
_unitOfWork.UserRepository.Add(user);
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
}
|
||||
|
||||
#region ScrobbleWantToReadUpdate Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_WantToRead_ShouldCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// Act
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Assert
|
||||
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
Assert.Single(events);
|
||||
Assert.Equal(ScrobbleEventType.AddWantToRead, events[0].ScrobbleEventType);
|
||||
Assert.Equal(userId, events[0].AppUserId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_RemoveWantToRead_ShouldCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// Act
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Assert
|
||||
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
Assert.Single(events);
|
||||
Assert.Equal(ScrobbleEventType.RemoveWantToRead, events[0].ScrobbleEventType);
|
||||
Assert.Equal(userId, events[0].AppUserId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_WantToRead_ShouldNotCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create an event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Act - Try to create the same event again
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Assert
|
||||
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.All(events, e => Assert.Equal(ScrobbleEventType.AddWantToRead, e.ScrobbleEventType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_RemoveWantToRead_ShouldAddRemoveEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create a want-to-read event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Act - Now remove from want-to-read
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Assert
|
||||
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.RemoveWantToRead);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_RemoveWantToRead_ShouldNotCreateNewEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create a remove-from-want-to-read event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Act - Try to create the same event again
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Assert
|
||||
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.All(events, e => Assert.Equal(ScrobbleEventType.RemoveWantToRead, e.ScrobbleEventType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_WantToRead_ShouldAddWantToReadEvent()
|
||||
{
|
||||
// Arrange
|
||||
await SeedData();
|
||||
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||
|
||||
const int userId = 1;
|
||||
const int seriesId = 1;
|
||||
|
||||
// First, let's create a remove-from-want-to-read event through the service
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||
|
||||
// Act - Now add to want-to-read
|
||||
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||
|
||||
// Assert
|
||||
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||
|
||||
Assert.Single(events);
|
||||
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.AddWantToRead);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
[Theory]
|
||||
[InlineData("https://anilist.co/manga/35851/Byeontaega-Doeja/", 35851)]
|
||||
[InlineData("https://anilist.co/manga/30105", 30105)]
|
||||
|
|
292
API.Tests/Services/SettingsServiceTests.cs
Normal file
|
@ -0,0 +1,292 @@
|
|||
using System.Collections.Generic;
|
||||
using System.IO.Abstractions;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.DTOs.KavitaPlus.Metadata;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Entities.MetadataMatching;
|
||||
using API.Services;
|
||||
using API.Services.Tasks.Scanner;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
|
||||
public class SettingsServiceTests
|
||||
{
|
||||
private readonly ISettingsService _settingsService;
|
||||
private readonly IUnitOfWork _mockUnitOfWork;
|
||||
|
||||
public SettingsServiceTests()
|
||||
{
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||
|
||||
_mockUnitOfWork = Substitute.For<IUnitOfWork>();
|
||||
_settingsService = new SettingsService(_mockUnitOfWork, ds,
|
||||
Substitute.For<ILibraryWatcher>(), Substitute.For<ITaskScheduler>(),
|
||||
Substitute.For<ILogger<SettingsService>>());
|
||||
}
|
||||
|
||||
#region UpdateMetadataSettings
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_ShouldUpdateExistingSettings()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
Enabled = false,
|
||||
EnableSummary = false,
|
||||
EnableLocalizedName = false,
|
||||
EnablePublicationStatus = false,
|
||||
EnableRelationships = false,
|
||||
EnablePeople = false,
|
||||
EnableStartDate = false,
|
||||
EnableGenres = false,
|
||||
EnableTags = false,
|
||||
FirstLastPeopleNaming = false,
|
||||
EnableCoverImage = false,
|
||||
AgeRatingMappings = new Dictionary<string, AgeRating>(),
|
||||
Blacklist = [],
|
||||
Whitelist = [],
|
||||
Overrides = [],
|
||||
PersonRoles = [],
|
||||
FieldMappings = []
|
||||
};
|
||||
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
Enabled = true,
|
||||
EnableSummary = true,
|
||||
EnableLocalizedName = true,
|
||||
EnablePublicationStatus = true,
|
||||
EnableRelationships = true,
|
||||
EnablePeople = true,
|
||||
EnableStartDate = true,
|
||||
EnableGenres = true,
|
||||
EnableTags = true,
|
||||
FirstLastPeopleNaming = true,
|
||||
EnableCoverImage = true,
|
||||
AgeRatingMappings = new Dictionary<string, AgeRating> { { "Adult", AgeRating.R18Plus } },
|
||||
Blacklist = ["blacklisted-tag"],
|
||||
Whitelist = ["whitelisted-tag"],
|
||||
Overrides = [MetadataSettingField.Summary],
|
||||
PersonRoles = [PersonRole.Writer],
|
||||
FieldMappings =
|
||||
[
|
||||
new MetadataFieldMappingDto
|
||||
{
|
||||
SourceType = MetadataFieldType.Genre,
|
||||
DestinationType = MetadataFieldType.Tag,
|
||||
SourceValue = "Action",
|
||||
DestinationValue = "Fight",
|
||||
ExcludeFromSource = true
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
// Verify properties were updated
|
||||
Assert.True(existingSettings.Enabled);
|
||||
Assert.True(existingSettings.EnableSummary);
|
||||
Assert.True(existingSettings.EnableLocalizedName);
|
||||
Assert.True(existingSettings.EnablePublicationStatus);
|
||||
Assert.True(existingSettings.EnableRelationships);
|
||||
Assert.True(existingSettings.EnablePeople);
|
||||
Assert.True(existingSettings.EnableStartDate);
|
||||
Assert.True(existingSettings.EnableGenres);
|
||||
Assert.True(existingSettings.EnableTags);
|
||||
Assert.True(existingSettings.FirstLastPeopleNaming);
|
||||
Assert.True(existingSettings.EnableCoverImage);
|
||||
|
||||
// Verify collections were updated
|
||||
Assert.Single(existingSettings.AgeRatingMappings);
|
||||
Assert.Equal(AgeRating.R18Plus, existingSettings.AgeRatingMappings["Adult"]);
|
||||
|
||||
Assert.Single(existingSettings.Blacklist);
|
||||
Assert.Equal("blacklisted-tag", existingSettings.Blacklist[0]);
|
||||
|
||||
Assert.Single(existingSettings.Whitelist);
|
||||
Assert.Equal("whitelisted-tag", existingSettings.Whitelist[0]);
|
||||
|
||||
Assert.Single(existingSettings.Overrides);
|
||||
Assert.Equal(MetadataSettingField.Summary, existingSettings.Overrides[0]);
|
||||
|
||||
Assert.Single(existingSettings.PersonRoles);
|
||||
Assert.Equal(PersonRole.Writer, existingSettings.PersonRoles[0]);
|
||||
|
||||
Assert.Single(existingSettings.FieldMappings);
|
||||
Assert.Equal(MetadataFieldType.Genre, existingSettings.FieldMappings[0].SourceType);
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[0].DestinationType);
|
||||
Assert.Equal("Action", existingSettings.FieldMappings[0].SourceValue);
|
||||
Assert.Equal("Fight", existingSettings.FieldMappings[0].DestinationValue);
|
||||
Assert.True(existingSettings.FieldMappings[0].ExcludeFromSource);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_WithNullCollections_ShouldUseEmptyCollections()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
FieldMappings = [new MetadataFieldMapping {Id = 1, SourceValue = "OldValue"}]
|
||||
};
|
||||
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
AgeRatingMappings = null,
|
||||
Blacklist = null,
|
||||
Whitelist = null,
|
||||
Overrides = null,
|
||||
PersonRoles = null,
|
||||
FieldMappings = null
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
Assert.Empty(existingSettings.AgeRatingMappings);
|
||||
Assert.Empty(existingSettings.Blacklist);
|
||||
Assert.Empty(existingSettings.Whitelist);
|
||||
Assert.Empty(existingSettings.Overrides);
|
||||
Assert.Empty(existingSettings.PersonRoles);
|
||||
|
||||
// Verify existing field mappings were cleared
|
||||
settingsRepo.Received(1).RemoveRange(Arg.Any<List<MetadataFieldMapping>>());
|
||||
Assert.Empty(existingSettings.FieldMappings);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_WithFieldMappings_ShouldReplaceExistingMappings()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
FieldMappings =
|
||||
[
|
||||
new MetadataFieldMapping
|
||||
{
|
||||
Id = 1,
|
||||
SourceType = MetadataFieldType.Genre,
|
||||
DestinationType = MetadataFieldType.Genre,
|
||||
SourceValue = "OldValue",
|
||||
DestinationValue = "OldDestination",
|
||||
ExcludeFromSource = false
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
FieldMappings =
|
||||
[
|
||||
new MetadataFieldMappingDto
|
||||
{
|
||||
SourceType = MetadataFieldType.Tag,
|
||||
DestinationType = MetadataFieldType.Genre,
|
||||
SourceValue = "NewValue",
|
||||
DestinationValue = "NewDestination",
|
||||
ExcludeFromSource = true
|
||||
},
|
||||
|
||||
new MetadataFieldMappingDto
|
||||
{
|
||||
SourceType = MetadataFieldType.Tag,
|
||||
DestinationType = MetadataFieldType.Tag,
|
||||
SourceValue = "AnotherValue",
|
||||
DestinationValue = "AnotherDestination",
|
||||
ExcludeFromSource = false
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
// Verify existing field mappings were cleared and new ones added
|
||||
settingsRepo.Received(1).RemoveRange(Arg.Any<List<MetadataFieldMapping>>());
|
||||
Assert.Equal(2, existingSettings.FieldMappings.Count);
|
||||
|
||||
// Verify first mapping
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[0].SourceType);
|
||||
Assert.Equal(MetadataFieldType.Genre, existingSettings.FieldMappings[0].DestinationType);
|
||||
Assert.Equal("NewValue", existingSettings.FieldMappings[0].SourceValue);
|
||||
Assert.Equal("NewDestination", existingSettings.FieldMappings[0].DestinationValue);
|
||||
Assert.True(existingSettings.FieldMappings[0].ExcludeFromSource);
|
||||
|
||||
// Verify second mapping
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[1].SourceType);
|
||||
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[1].DestinationType);
|
||||
Assert.Equal("AnotherValue", existingSettings.FieldMappings[1].SourceValue);
|
||||
Assert.Equal("AnotherDestination", existingSettings.FieldMappings[1].DestinationValue);
|
||||
Assert.False(existingSettings.FieldMappings[1].ExcludeFromSource);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateMetadataSettings_WithBlacklistWhitelist_ShouldNormalizeAndDeduplicateEntries()
|
||||
{
|
||||
// Arrange
|
||||
var existingSettings = new MetadataSettings
|
||||
{
|
||||
Id = 1,
|
||||
Blacklist = [],
|
||||
Whitelist = []
|
||||
};
|
||||
|
||||
// We need to mock the repository and provide a custom implementation for ToNormalized
|
||||
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||
|
||||
var updateDto = new MetadataSettingsDto
|
||||
{
|
||||
// Include duplicates with different casing and whitespace
|
||||
Blacklist = ["tag1", "Tag1", " tag2 ", "", " ", "tag3"],
|
||||
Whitelist = ["allowed1", "Allowed1", " allowed2 ", "", "allowed3"]
|
||||
};
|
||||
|
||||
// Act
|
||||
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||
|
||||
// Assert
|
||||
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||
|
||||
Assert.Equal(3, existingSettings.Blacklist.Count);
|
||||
Assert.Equal(3, existingSettings.Whitelist.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
|
@ -9,6 +9,7 @@ using API.Services;
|
|||
using API.Services.Tasks;
|
||||
using API.SignalR;
|
||||
using Kavita.Common;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using NSubstitute;
|
||||
using Xunit;
|
||||
|
@ -44,13 +45,14 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||
|
||||
_context.SiteTheme.Add(new SiteTheme()
|
||||
{
|
||||
Name = "Custom",
|
||||
NormalizedName = "Custom".ToNormalized(),
|
||||
Provider = ThemeProvider.User,
|
||||
Provider = ThemeProvider.Custom,
|
||||
FileName = "custom.css",
|
||||
IsDefault = false
|
||||
});
|
||||
|
@ -61,63 +63,6 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scan_ShouldFindCustomFile()
|
||||
{
|
||||
await ResetDb();
|
||||
_testOutputHelper.WriteLine($"[Scan_ShouldOnlyInsertOnceOnSecondScan] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
||||
await siteThemeService.Scan();
|
||||
|
||||
Assert.NotNull(await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("custom"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scan_ShouldOnlyInsertOnceOnSecondScan()
|
||||
{
|
||||
await ResetDb();
|
||||
_testOutputHelper.WriteLine(
|
||||
$"[Scan_ShouldOnlyInsertOnceOnSecondScan] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
||||
await siteThemeService.Scan();
|
||||
|
||||
Assert.NotNull(await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("custom"));
|
||||
|
||||
await siteThemeService.Scan();
|
||||
|
||||
var customThemes = (await _unitOfWork.SiteThemeRepository.GetThemeDtos()).Where(t =>
|
||||
t.Name.ToNormalized().Equals("custom".ToNormalized()));
|
||||
|
||||
Assert.Single(customThemes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scan_ShouldDeleteWhenFileDoesntExistOnSecondScan()
|
||||
{
|
||||
await ResetDb();
|
||||
_testOutputHelper.WriteLine($"[Scan_ShouldDeleteWhenFileDoesntExistOnSecondScan] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData(""));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
||||
await siteThemeService.Scan();
|
||||
|
||||
Assert.NotNull(await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("custom"));
|
||||
|
||||
filesystem.RemoveFile($"{SiteThemeDirectory}custom.css");
|
||||
await siteThemeService.Scan();
|
||||
|
||||
var themes = (await _unitOfWork.SiteThemeRepository.GetThemeDtos());
|
||||
|
||||
Assert.Equal(0, themes.Count(t =>
|
||||
t.Name.ToNormalized().Equals("custom".ToNormalized())));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetContent_ShouldReturnContent()
|
||||
|
@ -127,13 +72,14 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||
|
||||
_context.SiteTheme.Add(new SiteTheme()
|
||||
{
|
||||
Name = "Custom",
|
||||
NormalizedName = "Custom".ToNormalized(),
|
||||
Provider = ThemeProvider.User,
|
||||
Provider = ThemeProvider.Custom,
|
||||
FileName = "custom.css",
|
||||
IsDefault = false
|
||||
});
|
||||
|
@ -153,13 +99,14 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
|||
var filesystem = CreateFileSystem();
|
||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||
|
||||
_context.SiteTheme.Add(new SiteTheme()
|
||||
{
|
||||
Name = "Custom",
|
||||
NormalizedName = "Custom".ToNormalized(),
|
||||
Provider = ThemeProvider.User,
|
||||
Provider = ThemeProvider.Custom,
|
||||
FileName = "custom.css",
|
||||
IsDefault = false
|
||||
});
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
using API.Extensions;
|
||||
using API.Helpers.Builders;
|
||||
using API.Helpers.Builders;
|
||||
using API.Services.Plus;
|
||||
using API.Services.Tasks;
|
||||
|
||||
namespace API.Tests.Services;
|
||||
using System.Collections.Generic;
|
||||
|
@ -16,7 +14,6 @@ using API.Entities.Enums;
|
|||
using API.Helpers;
|
||||
using API.Services;
|
||||
using SignalR;
|
||||
using Helpers;
|
||||
using AutoMapper;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
@ -52,7 +49,7 @@ public class TachiyomiServiceTests
|
|||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()),
|
||||
Substitute.For<IScrobblingService>());
|
||||
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<ReaderService>>(), _readerService);
|
||||
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<TachiyomiService>>(), _readerService);
|
||||
|
||||
}
|
||||
|
||||
|
@ -125,12 +122,12 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||
|
@ -170,12 +167,12 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||
|
@ -221,7 +218,7 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
|
@ -265,18 +262,19 @@ public class TachiyomiServiceTests
|
|||
|
||||
Assert.Equal("21", latestChapter.Number);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress()
|
||||
{
|
||||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
|
||||
|
@ -323,13 +321,16 @@ public class TachiyomiServiceTests
|
|||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("0").WithPages(199).Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithPages(199).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("0").WithPages(192).Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithPages(192).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("3")
|
||||
.WithChapter(new ChapterBuilder("0").WithPages(255).Build())
|
||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||
.WithPages(255).Build())
|
||||
.Build())
|
||||
.WithPages(646)
|
||||
.Build();
|
||||
|
@ -368,7 +369,7 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
|
@ -421,12 +422,12 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||
|
@ -464,12 +465,12 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||
|
@ -514,7 +515,7 @@ public class TachiyomiServiceTests
|
|||
await ResetDb();
|
||||
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
|
@ -562,12 +563,12 @@ public class TachiyomiServiceTests
|
|||
{
|
||||
await ResetDb();
|
||||
var series = new SeriesBuilder("Test")
|
||||
.WithVolume(new VolumeBuilder("0")
|
||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("1")
|
||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
||||
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||
.Build())
|
||||
.WithVolume(new VolumeBuilder("2")
|
||||
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
|
||||
|
|
BIN
API.Tests/Services/Test Data/BookService/Rollo at Work SP01.pdf
Normal file
BIN
API.Tests/Services/Test Data/BookService/encrypted.pdf
Normal file
BIN
API.Tests/Services/Test Data/BookService/indirect.pdf
Normal file
BIN
API.Tests/Services/Test Data/ImageService/ColorScapes/blue-2.png
Normal file
After Width: | Height: | Size: 336 KiB |
BIN
API.Tests/Services/Test Data/ImageService/ColorScapes/blue.jpg
Normal file
After Width: | Height: | Size: 28 KiB |
After Width: | Height: | Size: 320 KiB |
BIN
API.Tests/Services/Test Data/ImageService/ColorScapes/green.png
Normal file
After Width: | Height: | Size: 340 KiB |
After Width: | Height: | Size: 294 KiB |
After Width: | Height: | Size: 286 KiB |
BIN
API.Tests/Services/Test Data/ImageService/ColorScapes/pink.png
Normal file
After Width: | Height: | Size: 327 KiB |
After Width: | Height: | Size: 168 KiB |
After Width: | Height: | Size: 351 KiB |
After Width: | Height: | Size: 1.5 MiB |
After Width: | Height: | Size: 447 KiB |
After Width: | Height: | Size: 482 KiB |
BIN
API.Tests/Services/Test Data/ImageService/Covers/comic-wide.jpg
Normal file
After Width: | Height: | Size: 618 KiB |
BIN
API.Tests/Services/Test Data/ImageService/Covers/manga-cover.png
Normal file
After Width: | Height: | Size: 3.1 MiB |
After Width: | Height: | Size: 886 KiB |
After Width: | Height: | Size: 1.4 MiB |
After Width: | Height: | Size: 257 KiB |