diff --git a/.browserslistrc b/.browserslistrc
index 427441dc9..6784945a5 100644
--- a/.browserslistrc
+++ b/.browserslistrc
@@ -8,10 +8,4 @@
# You can see what browsers were selected by your queries by running:
# npx browserslist
-last 1 Chrome version
-last 1 Firefox version
-last 2 Edge major versions
-last 2 Safari major versions
-last 2 iOS major versions
-Firefox ESR
-not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
+defaults
\ No newline at end of file
diff --git a/.editorconfig b/.editorconfig
index c24677846..c82009e40 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -1,6 +1,7 @@
# Editor configuration, see https://editorconfig.org
root = true
+
[*]
charset = utf-8
indent_style = space
@@ -22,3 +23,7 @@ indent_size = 2
[*.csproj]
indent_size = 2
+
+[*.cs]
+# Disable SonarLint warning S1075 (Don't use hardcoded url)
+dotnet_diagnostic.S1075.severity = none
diff --git a/.github/DISCUSSION_TEMPLATE/ideas.yml b/.github/DISCUSSION_TEMPLATE/ideas.yml
index 728b5b497..845d3e3f3 100644
--- a/.github/DISCUSSION_TEMPLATE/ideas.yml
+++ b/.github/DISCUSSION_TEMPLATE/ideas.yml
@@ -1,68 +1,48 @@
-title: "[Kavita] Idea Submission"
-labels: ["Idea Submission"]
+title: "[Kavita] Idea / Feature Submission"
+labels:
+ - "Idea Submission"
body:
- type: markdown
attributes:
value: |
- ## 🌟 Idea Submission for Kavita 🌟
-
- This is a template for submitting your ideas to enhance Kavita. Please fill out the details below, and let's make Kavita even better together!
+ ## Idea Submission for Kavita 💡
+ Please fill out the details below, and let's make Kavita even better together!
+
- type: textarea
id: idea-description
attributes:
label: Idea Description
- description: "Describe your idea in detail."
value: |
- [Include a brief overview of your idea]
-
- - type: markdown
- attributes:
- value: |
- **Why I Think This Is Important:**
-
- [Provide context on why you believe this idea is valuable or necessary for Kavita users]
-
- - type: markdown
- attributes:
- value: |
- **How You Can Contribute:**
-
- 1. **Upvote if You Agree:**
- - If you resonate with my idea, please upvote it! This helps us gauge community interest.
-
- 2. **Leave Your Thoughts:**
- - Feel free to leave comments with your opinions, suggestions, or even constructive critiques.
-
- Let's work together to shape the future of Kavita! 🌟
-
- - type: input
- id: duration-of-use
- attributes:
- label: Duration of Using Kavita
- description: "How long have you been using Kavita?"
- validations:
- required: true
-
+ Go into as much detail as possible to explain why your idea should be added to Kavita. Try to present some use cases and examples of how it would help other users. The more detail you have the better.
+
- type: dropdown
id: idea-category
attributes:
label: Idea Category
options:
+ - API
- Feature Enhancement
- User Experience
- Performance Improvement
- description: "Select the category that best fits your idea."
+ - Web UI
+ description: "What area would your idea help with?"
validations:
required: true
-
+
+ - type: input
+ id: duration-of-use
+ attributes:
+ label: Duration of Using Kavita
+ description: "How long have you been using Kavita?"
+
- type: checkboxes
attributes:
- label: Agreement
+ label: Before submitting
options:
- - label: "I agree that this is solely for submitting ideas, and I will search for existing ideas before posting."
+ - label: "I've already searched for existing ideas before posting."
required: true
-
+
- type: markdown
attributes:
value: |
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 627edd9ed..cdd72de1c 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -25,10 +25,10 @@ body:
- type: dropdown
id: version
attributes:
- label: Kavita Version Number - Don't see your version number listed? Then your install is out of date. Please update and see if your issue still persists.
+ label: Kavita Version Number - If you don't see your version number listed, please update Kavita and see if your issue still persists.
multiple: false
options:
- - 0.7.14 - Stable
+ - 0.8.6.2 - Stable
- Nightly Testing Branch
validations:
required: true
@@ -75,13 +75,13 @@ body:
- type: dropdown
id: mobile-browsers
attributes:
- label: If the issue is being seen on the UI, what browsers are you seeing the problem on?
+ label: If the issue is being seen on the Mobile UI, what browsers are you seeing the problem on?
multiple: true
options:
- Firefox
- Chrome
- Safari
- - Microsoft Edge
+ - Other iOS Browser
- type: textarea
id: logs
attributes:
diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml
index 98ce4c439..044864734 100644
--- a/.github/workflows/build-and-test.yml
+++ b/.github/workflows/build-and-test.yml
@@ -10,23 +10,23 @@ jobs:
runs-on: windows-latest
steps:
- name: Checkout Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup .NET Core
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
- dotnet-version: 8.0.x
+ dotnet-version: 9.0.x
- name: Install Swashbuckle CLI
shell: powershell
- run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
- name: Install dependencies
run: dotnet restore
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: csproj
path: Kavita.Common/Kavita.Common.csproj
diff --git a/.github/workflows/canary-workflow.yml b/.github/workflows/canary-workflow.yml
index af4a45dec..b919030b0 100644
--- a/.github/workflows/canary-workflow.yml
+++ b/.github/workflows/canary-workflow.yml
@@ -9,14 +9,14 @@ on:
jobs:
build:
name: Upload Kavita.Common for Version Bump
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: Checkout Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: csproj
path: Kavita.Common/Kavita.Common.csproj
@@ -24,16 +24,16 @@ jobs:
version:
name: Bump version
needs: [ build ]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup .NET Core
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
- dotnet-version: 8.0.x
+ dotnet-version: 9.0.x
- name: Bump versions
uses: SiqiLu/dotnet-bump-version@2.0.0
@@ -45,7 +45,7 @@ jobs:
canary:
name: Build Canary Docker
needs: [ build, version ]
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
permissions:
packages: write
contents: read
@@ -59,14 +59,14 @@ jobs:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Check Out Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: canary
- name: NodeJS to Compile WebUI
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
- node-version: '18.13.x'
+ node-version: 20
- run: |
cd UI/Web || exit
echo 'Installing web dependencies'
@@ -81,7 +81,7 @@ jobs:
cd ../ || exit
- name: Get csproj Version
- uses: kzrnm/get-net-sdk-project-versions-action@v1
+ uses: kzrnm/get-net-sdk-project-versions-action@v2
id: get-version
with:
proj-path: Kavita.Common/Kavita.Common.csproj
@@ -96,38 +96,38 @@ jobs:
run: echo "${{steps.get-version.outputs.assembly-version}}"
- name: Compile dotnet app
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
- dotnet-version: 8.0.x
+ dotnet-version: 9.0.x
- name: Install Swashbuckle CLI
- run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
- run: ./monorepo-build.sh
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Login to GitHub Container Registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
id: buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
- name: Build and push
id: docker_build
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm/v7,linux/arm64
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 53103f850..7ce4276bc 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -13,7 +13,7 @@ name: "CodeQL"
on:
push:
- branches: [ "develop", "main" ]
+ branches: [ "develop"]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "develop" ]
@@ -38,7 +38,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- language: [ 'csharp', 'javascript-typescript', 'python' ]
+ language: [ 'csharp', 'javascript-typescript' ]
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
@@ -46,15 +46,16 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- - name: Install Swashbuckle CLI
- shell: bash
- run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -68,7 +69,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
- uses: github/codeql-action/autobuild@v2
+ uses: github/codeql-action/autobuild@v3
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -81,6 +82,6 @@ jobs:
dotnet build Kavita.sln
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/develop-workflow.yml b/.github/workflows/develop-workflow.yml
index dff82c01e..006127645 100644
--- a/.github/workflows/develop-workflow.yml
+++ b/.github/workflows/develop-workflow.yml
@@ -2,15 +2,12 @@ name: Nightly Workflow
on:
push:
- branches: ['!release/**']
- pull_request:
branches: [ 'develop', '!release/**' ]
- types: [ closed ]
workflow_dispatch:
jobs:
debug:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: Debug Info
run: |
@@ -20,15 +17,15 @@ jobs:
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
build:
name: Upload Kavita.Common for Version Bump
- runs-on: ubuntu-latest
- if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
+ runs-on: ubuntu-24.04
+ if: github.ref == 'refs/heads/develop'
steps:
- name: Checkout Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: csproj
path: Kavita.Common/Kavita.Common.csproj
@@ -36,17 +33,17 @@ jobs:
version:
name: Bump version
needs: [ build ]
- runs-on: ubuntu-latest
- if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
+ runs-on: ubuntu-24.04
+ if: github.ref == 'refs/heads/develop'
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup .NET Core
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
- dotnet-version: 8.0.x
+ dotnet-version: 9.0.x
- name: Bump versions
uses: majora2007/dotnet-bump-version@v0.0.10
@@ -58,8 +55,8 @@ jobs:
develop:
name: Build Nightly Docker
needs: [ build, version ]
- runs-on: ubuntu-latest
- if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
+ runs-on: ubuntu-24.04
+ if: github.ref == 'refs/heads/develop'
permissions:
packages: write
contents: read
@@ -92,18 +89,18 @@ jobs:
echo "BODY=$body" >> $GITHUB_OUTPUT
- name: Check Out Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: develop
- name: NodeJS to Compile WebUI
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
- node-version: '18.13.x'
+ node-version: 20
- run: |
cd UI/Web || exit
echo 'Installing web dependencies'
- npm install --legacy-peer-deps
+ npm ci
echo 'Building UI'
npm run prod
@@ -114,7 +111,7 @@ jobs:
cd ../ || exit
- name: Get csproj Version
- uses: kzrnm/get-net-sdk-project-versions-action@v1
+ uses: kzrnm/get-net-sdk-project-versions-action@v2
id: get-version
with:
proj-path: Kavita.Common/Kavita.Common.csproj
@@ -129,49 +126,63 @@ jobs:
run: echo "${{steps.get-version.outputs.assembly-version}}"
- name: Compile dotnet app
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
- dotnet-version: 8.0.x
+ dotnet-version: 9.0.x
- name: Install Swashbuckle CLI
- run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
- run: ./monorepo-build.sh
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
+ if: ${{ github.repository_owner == 'Kareadita' }}
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Login to GitHub Container Registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
id: buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: docker_meta_nightly
+ uses: docker/metadata-action@v5
+ with:
+ tags: |
+ type=raw,value=nightly
+ type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
+ images: |
+ name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
+ name=ghcr.io/${{ github.repository }}
- name: Build and push
id: docker_build
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm/v7,linux/arm64
push: true
- tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
+ tags: ${{ steps.docker_meta_nightly.outputs.tags }}
+ labels: ${{ steps.docker_meta_nightly.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
- name: Notify Discord
uses: rjstone/discord-webhook-notify@v1
+ if: ${{ github.repository_owner == 'Kareadita' }}
with:
severity: info
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
diff --git a/.github/workflows/openapi-gen.yml b/.github/workflows/openapi-gen.yml
new file mode 100644
index 000000000..45446d045
--- /dev/null
+++ b/.github/workflows/openapi-gen.yml
@@ -0,0 +1,68 @@
+name: Generate OpenAPI Documentation
+
+on:
+ push:
+ branches: [ 'develop', '!release/**' ]
+ paths:
+ - '**/*.cs'
+ - '**/*.csproj'
+ pull_request:
+ branches: [ 'develop', '!release/**' ]
+ workflow_dispatch:
+
+jobs:
+ generate-openapi:
+ runs-on: ubuntu-latest
+ # Only run on direct pushes to develop, not PRs
+ if: (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && github.repository_owner == 'Kareadita'
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Install dependencies
+ run: dotnet restore
+
+ - name: Build project
+ run: dotnet build API/API.csproj --configuration Debug
+
+ - name: Get Swashbuckle version
+ id: swashbuckle-version
+ run: |
+ VERSION=$(grep -o '> $GITHUB_OUTPUT
+ echo "Found Swashbuckle.AspNetCore version: $VERSION"
+
+ - name: Install matching Swashbuckle CLI tool
+ run: |
+ dotnet new tool-manifest --force
+ dotnet tool install Swashbuckle.AspNetCore.Cli --version ${{ steps.swashbuckle-version.outputs.VERSION }}
+
+ - name: Generate OpenAPI file
+ run: dotnet swagger tofile --output openapi.json API/bin/Debug/net9.0/API.dll v1
+
+ - name: Check for changes
+ id: git-check
+ run: |
+ git add openapi.json
+ git diff --staged --quiet openapi.json || echo "has_changes=true" >> $GITHUB_OUTPUT
+
+ - name: Commit and push if changed
+ if: steps.git-check.outputs.has_changes == 'true'
+ run: |
+ git config --local user.email "action@github.com"
+ git config --local user.name "GitHub Action"
+
+ git commit -m "Update OpenAPI documentation" openapi.json
+
+ # Pull latest changes with rebase to avoid merge commits
+ git pull --rebase origin develop
+
+ git push
+ env:
+ GITHUB_TOKEN: ${{ secrets.REPO_GHA_PAT }}
diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml
index 7482deb0b..51589221f 100644
--- a/.github/workflows/pr-check.yml
+++ b/.github/workflows/pr-check.yml
@@ -1,15 +1,13 @@
name: Validate PR Body
on:
- push:
- branches: '**'
pull_request:
branches: [ main, develop, canary ]
types: [synchronize]
jobs:
check_pr:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: Extract branch name
shell: bash
diff --git a/.github/workflows/release-workflow.yml b/.github/workflows/release-workflow.yml
index ca1314e8b..757ce1075 100644
--- a/.github/workflows/release-workflow.yml
+++ b/.github/workflows/release-workflow.yml
@@ -10,7 +10,7 @@ on:
jobs:
debug:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: Debug Info
run: |
@@ -20,21 +20,21 @@ jobs:
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
if_merged:
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- run: |
echo The PR was merged
build:
name: Upload Kavita.Common for Version Bump
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
steps:
- name: Checkout Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
fetch-depth: 0
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: csproj
path: Kavita.Common/Kavita.Common.csproj
@@ -43,7 +43,7 @@ jobs:
name: Build Stable and Nightly Docker if Release
needs: [ build ]
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
permissions:
packages: write
contents: read
@@ -58,38 +58,25 @@ jobs:
- name: Parse PR body
id: parse-body
run: |
- body="${{ steps.findPr.outputs.body }}"
- body=${body//\'/}
- body=${body//'%'/'%25'}
- body=${body//$'\n'/'%0A'}
- body=${body//$'\r'/'%0D'}
- body=${body//$'`'/'%60'}
- body=${body//$'>'/'%3E'}
-
- if [[ ${#body} -gt 1870 ]] ; then
- body=${body:0:1870}
- body="${body}...and much more.
-
- Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
- fi
+ body="Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
echo $body
echo "BODY=$body" >> $GITHUB_OUTPUT
- name: Check Out Repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: develop
- name: NodeJS to Compile WebUI
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
- node-version: '18.13.x'
+ node-version: 20
- run: |
cd UI/Web || exit
echo 'Installing web dependencies'
- npm install --legacy-peer-deps
+ npm ci
echo 'Building UI'
npm run prod
@@ -100,7 +87,7 @@ jobs:
cd ../ || exit
- name: Get csproj Version
- uses: kzrnm/get-net-sdk-project-versions-action@v1
+ uses: kzrnm/get-net-sdk-project-versions-action@v2
id: get-version
with:
proj-path: Kavita.Common/Kavita.Common.csproj
@@ -117,72 +104,79 @@ jobs:
id: parse-version
- name: Compile dotnet app
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
- dotnet-version: 8.0.x
+ dotnet-version: 9.0.x
- name: Install Swashbuckle CLI
- run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
- run: ./monorepo-build.sh
- name: Login to Docker Hub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
+ if: ${{ github.repository_owner == 'Kareadita' }}
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Login to GitHub Container Registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
id: buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: docker_meta_stable
+ uses: docker/metadata-action@v5
+ with:
+ tags: |
+ type=raw,value=latest
+ type=raw,value=${{ steps.parse-version.outputs.VERSION }}
+ images: |
+ name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
+ name=ghcr.io/${{ github.repository }}
- name: Build and push stable
id: docker_build_stable
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm/v7,linux/arm64
push: true
- tags: jvmilazz0/kavita:latest, jvmilazz0/kavita:${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:latest, ghcr.io/kareadita/kavita:${{ steps.parse-version.outputs.VERSION }}
+ tags: ${{ steps.docker_meta_stable.outputs.tags }}
+ labels: ${{ steps.docker_meta_stable.outputs.labels }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: docker_meta_nightly
+ uses: docker/metadata-action@v5
+ with:
+ tags: |
+ type=raw,value=nightly
+ type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
+ images: |
+ name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
+ name=ghcr.io/${{ github.repository }}
- name: Build and push nightly
id: docker_build_nightly
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm/v7,linux/arm64
push: true
- tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
+ tags: ${{ steps.docker_meta_nightly.outputs.tags }}
+ labels: ${{ steps.docker_meta_nightly.outputs.labels }}
- name: Image digest
run: echo ${{ steps.docker_build_stable.outputs.digest }}
- name: Image digest
run: echo ${{ steps.docker_build_nightly.outputs.digest }}
-
- - name: Notify Discord
- uses: rjstone/discord-webhook-notify@v1
- with:
- severity: info
- description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
- details: '${{ steps.findPr.outputs.body }}'
- text: <@&939225192553644133> A new stable build has been released.
- webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
-
- - name: Notify Discord
- uses: rjstone/discord-webhook-notify@v1
- with:
- severity: info
- description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
- details: '${{ steps.findPr.outputs.body }}'
- text: <@&939225459156217917> <@&939225350775406643> A new nightly build has been released for docker.
- webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
diff --git a/.gitignore b/.gitignore
index bb124fc7f..1cffb441d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -513,6 +513,7 @@ UI/Web/dist/
/API/config/stats/
/API/config/bookmarks/
/API/config/favicons/
+/API/config/cache-long/
/API/config/kavita.db
/API/config/kavita.db-shm
/API/config/kavita.db-wal
@@ -520,9 +521,11 @@ UI/Web/dist/
/API/config/*.db
/API/config/*.bak
/API/config/*.backup
+/API/config/*.csv
/API/config/Hangfire.db
/API/config/Hangfire-log.db
API/config/covers/
+API/config/images/*
API/config/stats/*
API/config/stats/app_stats.json
API/config/pre-metadata/
@@ -533,3 +536,11 @@ UI/Web/.vscode/settings.json
/API.Tests/Services/Test Data/ArchiveService/CoverImages/output/*
UI/Web/.angular/
BenchmarkDotNet.Artifacts
+
+
+API.Tests/Services/Test Data/ImageService/**/*_output*
+API.Tests/Services/Test Data/ImageService/**/*_baseline*
+API.Tests/Services/Test Data/ImageService/**/*.html
+
+
+API.Tests/Services/Test Data/ScannerService/ScanTests/**/*
diff --git a/.sonarcloud.properties b/.sonarcloud.properties
new file mode 100644
index 000000000..1876ac55a
--- /dev/null
+++ b/.sonarcloud.properties
@@ -0,0 +1,15 @@
+# Path to sources
+sonar.sources=.
+sonar.exclusions=API.Benchmark
+#sonar.inclusions=
+
+# Path to tests
+sonar.tests=API.Tests
+#sonar.test.exclusions=
+#sonar.test.inclusions=
+
+# Source encoding
+sonar.sourceEncoding=UTF-8
+
+# Exclusions for copy-paste detection
+#sonar.cpd.exclusions=
diff --git a/API.Benchmark/API.Benchmark.csproj b/API.Benchmark/API.Benchmark.csproj
index ebc913fe1..38ec425fe 100644
--- a/API.Benchmark/API.Benchmark.csproj
+++ b/API.Benchmark/API.Benchmark.csproj
@@ -1,7 +1,7 @@
- net8.0
+ net9.0
Exe
@@ -10,9 +10,9 @@
-
-
-
+
+
+
diff --git a/API.Benchmark/ArchiveServiceBenchmark.cs b/API.Benchmark/ArchiveServiceBenchmark.cs
index 9ef8e237b..ccb44d517 100644
--- a/API.Benchmark/ArchiveServiceBenchmark.cs
+++ b/API.Benchmark/ArchiveServiceBenchmark.cs
@@ -32,7 +32,7 @@ public class ArchiveServiceBenchmark
public ArchiveServiceBenchmark()
{
_directoryService = new DirectoryService(null, new FileSystem());
- _imageService = new ImageService(null, _directoryService, Substitute.For());
+ _imageService = new ImageService(null, _directoryService);
_archiveService = new ArchiveService(new NullLogger(), _directoryService, _imageService, Substitute.For());
}
diff --git a/API.Benchmark/TestBenchmark.cs b/API.Benchmark/TestBenchmark.cs
index 3b08bbcdf..511d250aa 100644
--- a/API.Benchmark/TestBenchmark.cs
+++ b/API.Benchmark/TestBenchmark.cs
@@ -49,7 +49,7 @@ public class TestBenchmark
private static void SortSpecialChapters(IEnumerable volumes)
{
- foreach (var v in volumes.Where(vDto => vDto.MinNumber == 0))
+ foreach (var v in volumes.WhereNotLooseLeaf())
{
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
}
diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj
index 5287a124a..3a4867ec4 100644
--- a/API.Tests/API.Tests.csproj
+++ b/API.Tests/API.Tests.csproj
@@ -1,22 +1,22 @@
- net8.0
+ net9.0
false
-
-
-
-
-
-
-
+
+
+
+
+
+
+
runtime; build; native; contentfiles; analyzers; buildtransitive
all
-
+
runtime; build; native; contentfiles; analyzers; buildtransitive
all
@@ -28,7 +28,7 @@
-
+
diff --git a/API.Tests/AbstractDbTest.cs b/API.Tests/AbstractDbTest.cs
index 18f0669cd..77f978e7f 100644
--- a/API.Tests/AbstractDbTest.cs
+++ b/API.Tests/AbstractDbTest.cs
@@ -1,6 +1,5 @@
-using System.Collections.Generic;
+using System;
using System.Data.Common;
-using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
@@ -10,6 +9,7 @@ using API.Helpers;
using API.Helpers.Builders;
using API.Services;
using AutoMapper;
+using Hangfire;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
@@ -18,36 +18,33 @@ using NSubstitute;
namespace API.Tests;
-public abstract class AbstractDbTest
+public abstract class AbstractDbTest : AbstractFsTest , IDisposable
{
protected readonly DbConnection _connection;
protected readonly DataContext _context;
protected readonly IUnitOfWork _unitOfWork;
-
-
- protected const string CacheDirectory = "C:/kavita/config/cache/";
- protected const string CoverImageDirectory = "C:/kavita/config/covers/";
- protected const string BackupDirectory = "C:/kavita/config/backups/";
- protected const string LogDirectory = "C:/kavita/config/logs/";
- protected const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
- protected const string SiteThemeDirectory = "C:/kavita/config/themes/";
- protected const string TempDirectory = "C:/kavita/config/temp/";
- protected const string DataDirectory = "C:/data/";
+ protected readonly IMapper _mapper;
protected AbstractDbTest()
{
- var contextOptions = new DbContextOptionsBuilder()
+ var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
+ .EnableSensitiveDataLogging()
.Options;
+
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
+
+ _context.Database.EnsureCreated(); // Ensure DB schema is created
+
Task.Run(SeedDb).GetAwaiter().GetResult();
var config = new MapperConfiguration(cfg => cfg.AddProfile());
- var mapper = config.CreateMapper();
+ _mapper = config.CreateMapper();
- _unitOfWork = new UnitOfWork(_context, mapper, null);
+ GlobalConfiguration.Configuration.UseInMemoryStorage();
+ _unitOfWork = new UnitOfWork(_context, _mapper, null);
}
private static DbConnection CreateInMemoryDatabase()
@@ -60,47 +57,66 @@ public abstract class AbstractDbTest
private async Task SeedDb()
{
- await _context.Database.MigrateAsync();
- var filesystem = CreateFileSystem();
+ try
+ {
+ await _context.Database.EnsureCreatedAsync();
+ var filesystem = CreateFileSystem();
- await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem));
+ await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem));
- var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
- setting.Value = CacheDirectory;
+ var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
+ setting.Value = CacheDirectory;
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
- setting.Value = BackupDirectory;
+ setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
+ setting.Value = BackupDirectory;
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
- setting.Value = BookmarkDirectory;
+ setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
+ setting.Value = BookmarkDirectory;
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
- setting.Value = "10";
+ setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
+ setting.Value = "10";
- _context.ServerSetting.Update(setting);
+ _context.ServerSetting.Update(setting);
- _context.Library.Add(new LibraryBuilder("Manga")
- .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
- .Build());
- return await _context.SaveChangesAsync() > 0;
+
+ _context.Library.Add(new LibraryBuilder("Manga")
+ .WithAllowMetadataMatching(true)
+ .WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
+ .Build());
+
+ await _context.SaveChangesAsync();
+
+ await Seed.SeedMetadataSettings(_context);
+
+ return true;
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine($"[SeedDb] Error: {ex.Message}");
+ return false;
+ }
}
protected abstract Task ResetDb();
- protected static MockFileSystem CreateFileSystem()
+ public void Dispose()
{
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory(SiteThemeDirectory);
- fileSystem.AddDirectory(LogDirectory);
- fileSystem.AddDirectory(TempDirectory);
- fileSystem.AddDirectory(DataDirectory);
+ _context.Dispose();
+ _connection.Dispose();
+ }
- return fileSystem;
+ ///
+ /// Add a role to an existing User. Commits.
+ ///
+ ///
+ ///
+ protected async Task AddUserWithRole(int userId, string roleName)
+ {
+ var role = new AppRole { Id = userId, Name = roleName, NormalizedName = roleName.ToUpper() };
+
+ await _context.Roles.AddAsync(role);
+ await _context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
+
+ await _context.SaveChangesAsync();
}
}
diff --git a/API.Tests/AbstractFsTest.cs b/API.Tests/AbstractFsTest.cs
new file mode 100644
index 000000000..3341a3a7c
--- /dev/null
+++ b/API.Tests/AbstractFsTest.cs
@@ -0,0 +1,43 @@
+
+
+using System.IO;
+using System.IO.Abstractions.TestingHelpers;
+using API.Services.Tasks.Scanner.Parser;
+
+namespace API.Tests;
+
+public abstract class AbstractFsTest
+{
+
+ protected static readonly string Root = Parser.NormalizePath(Path.GetPathRoot(Directory.GetCurrentDirectory()));
+ protected static readonly string ConfigDirectory = Root + "kavita/config/";
+ protected static readonly string CacheDirectory = ConfigDirectory + "cache/";
+ protected static readonly string CacheLongDirectory = ConfigDirectory + "cache-long/";
+ protected static readonly string CoverImageDirectory = ConfigDirectory + "covers/";
+ protected static readonly string BackupDirectory = ConfigDirectory + "backups/";
+ protected static readonly string LogDirectory = ConfigDirectory + "logs/";
+ protected static readonly string BookmarkDirectory = ConfigDirectory + "bookmarks/";
+ protected static readonly string SiteThemeDirectory = ConfigDirectory + "themes/";
+ protected static readonly string TempDirectory = ConfigDirectory + "temp/";
+ protected static readonly string ThemesDirectory = ConfigDirectory + "theme";
+ protected static readonly string DataDirectory = Root + "data/";
+
+ protected static MockFileSystem CreateFileSystem()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.Directory.SetCurrentDirectory(Root + "kavita/");
+ fileSystem.AddDirectory(Root + "kavita/config/");
+ fileSystem.AddDirectory(CacheDirectory);
+ fileSystem.AddDirectory(CacheLongDirectory);
+ fileSystem.AddDirectory(CoverImageDirectory);
+ fileSystem.AddDirectory(BackupDirectory);
+ fileSystem.AddDirectory(BookmarkDirectory);
+ fileSystem.AddDirectory(SiteThemeDirectory);
+ fileSystem.AddDirectory(LogDirectory);
+ fileSystem.AddDirectory(TempDirectory);
+ fileSystem.AddDirectory(DataDirectory);
+ fileSystem.AddDirectory(ThemesDirectory);
+
+ return fileSystem;
+ }
+}
diff --git a/API.Tests/Comparers/ChapterSortComparerTest.cs b/API.Tests/Comparers/ChapterSortComparerTest.cs
index 220be052d..39a68b3b0 100644
--- a/API.Tests/Comparers/ChapterSortComparerTest.cs
+++ b/API.Tests/Comparers/ChapterSortComparerTest.cs
@@ -4,15 +4,16 @@ using Xunit;
namespace API.Tests.Comparers;
-public class ChapterSortComparerTest
+public class ChapterSortComparerDefaultLastTest
{
[Theory]
- [InlineData(new[] {1, 2, 0}, new[] {1, 2, 0})]
+ [InlineData(new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
- [InlineData(new[] {1, 0, 0}, new[] {1, 0, 0})]
+ [InlineData(new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
+ [InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
public void ChapterSortTest(int[] input, int[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparer()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultLast()).ToArray());
}
}
diff --git a/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs b/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs
index df3934884..fbae46b59 100644
--- a/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs
+++ b/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs
@@ -4,7 +4,7 @@ using Xunit;
namespace API.Tests.Comparers;
-public class ChapterSortComparerZeroFirstTests
+public class ChapterSortComparerDefaultFirstTests
{
[Theory]
[InlineData(new[] {1, 2, 0}, new[] {0, 1, 2,})]
@@ -12,13 +12,13 @@ public class ChapterSortComparerZeroFirstTests
[InlineData(new[] {1, 0, 0}, new[] {0, 0, 1})]
public void ChapterSortComparerZeroFirstTest(int[] input, int[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
}
[Theory]
- [InlineData(new[] {1.0, 0.5, 0.3}, new[] {0.3, 0.5, 1.0})]
- public void ChapterSortComparerZeroFirstTest_Doubles(double[] input, double[] expected)
+ [InlineData(new [] {1.0f, 0.5f, 0.3f}, new [] {0.3f, 0.5f, 1.0f})]
+ public void ChapterSortComparerZeroFirstTest_Doubles(float[] input, float[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
}
}
diff --git a/API.Tests/Comparers/SortComparerZeroLastTests.cs b/API.Tests/Comparers/SortComparerZeroLastTests.cs
index 669ca6c37..9a0722984 100644
--- a/API.Tests/Comparers/SortComparerZeroLastTests.cs
+++ b/API.Tests/Comparers/SortComparerZeroLastTests.cs
@@ -7,11 +7,11 @@ namespace API.Tests.Comparers;
public class SortComparerZeroLastTests
{
[Theory]
- [InlineData(new[] {0, 1, 2,}, new[] {1, 2, 0})]
+ [InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1, 2,}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
- [InlineData(new[] {0, 0, 1}, new[] {1, 0, 0})]
+ [InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
public void SortComparerZeroLastTest(int[] input, int[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, SortComparerZeroLast.Default).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, ChapterSortComparerDefaultLast.Default).ToArray());
}
}
diff --git a/API.Tests/Converters/CronConverterTests.cs b/API.Tests/Converters/CronConverterTests.cs
index 4e214e8f1..5568c89d0 100644
--- a/API.Tests/Converters/CronConverterTests.cs
+++ b/API.Tests/Converters/CronConverterTests.cs
@@ -1,5 +1,4 @@
using API.Helpers.Converters;
-using Hangfire;
using Xunit;
namespace API.Tests.Converters;
diff --git a/API.Tests/Extensions/ChapterListExtensionsTests.cs b/API.Tests/Extensions/ChapterListExtensionsTests.cs
index 3b59f1b02..d27903ca9 100644
--- a/API.Tests/Extensions/ChapterListExtensionsTests.cs
+++ b/API.Tests/Extensions/ChapterListExtensionsTests.cs
@@ -30,7 +30,7 @@ public class ChapterListExtensionsTests
{
var info = new ParserInfo()
{
- Chapters = "0",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
@@ -38,12 +38,12 @@ public class ChapterListExtensionsTests
IsSpecial = false,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
};
var chapterList = new List()
{
- CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
+ CreateChapter("darker than black - Some special", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
};
var actualChapter = chapterList.GetChapterByRange(info);
@@ -57,7 +57,7 @@ public class ChapterListExtensionsTests
{
var info = new ParserInfo()
{
- Chapters = "0",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
@@ -65,12 +65,12 @@ public class ChapterListExtensionsTests
IsSpecial = true,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
};
var chapterList = new List()
{
- CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
+ CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
};
var actualChapter = chapterList.GetChapterByRange(info);
@@ -83,7 +83,7 @@ public class ChapterListExtensionsTests
{
var info = new ParserInfo()
{
- Chapters = "0",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/detective comics #001.cbz",
@@ -91,13 +91,39 @@ public class ChapterListExtensionsTests
IsSpecial = true,
Series = "detective comics",
Title = "detective comics",
- Volumes = "0"
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
};
var chapterList = new List()
{
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ };
+
+ var actualChapter = chapterList.GetChapterByRange(info);
+
+ Assert.Equal(chapterList[0], actualChapter);
+ }
+
+ [Fact]
+ public void GetChapterByRange_On_FilenameChange_ShouldGetChapter()
+ {
+ var info = new ParserInfo()
+ {
+ Chapters = "1",
+ Edition = "",
+ Format = MangaFormat.Archive,
+ FullFilePath = "/manga/detective comics #001.cbz",
+ Filename = "detective comics #001.cbz",
+ IsSpecial = false,
+ Series = "detective comics",
+ Title = "detective comics",
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
+ };
+
+ var chapterList = new List()
+ {
+ CreateChapter("1", "1", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), false),
};
var actualChapter = chapterList.GetChapterByRange(info);
@@ -112,7 +138,7 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
+ CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
};
@@ -124,7 +150,7 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
+ CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
};
@@ -151,8 +177,8 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
};
chapterList[0].ReleaseDate = new DateTime(10, 1, 1);
@@ -166,8 +192,8 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
};
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1);
diff --git a/API.Tests/Extensions/EncodeFormatExtensionsTests.cs b/API.Tests/Extensions/EncodeFormatExtensionsTests.cs
new file mode 100644
index 000000000..a02de84aa
--- /dev/null
+++ b/API.Tests/Extensions/EncodeFormatExtensionsTests.cs
@@ -0,0 +1,31 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using API.Entities.Enums;
+using API.Extensions;
+using Xunit;
+
+namespace API.Tests.Extensions;
+
+public class EncodeFormatExtensionsTests
+{
+ [Fact]
+ public void GetExtension_ShouldReturnCorrectExtensionForAllValues()
+ {
+ // Arrange
+ var expectedExtensions = new Dictionary
+ {
+ { EncodeFormat.PNG, ".png" },
+ { EncodeFormat.WEBP, ".webp" },
+ { EncodeFormat.AVIF, ".avif" }
+ };
+
+ // Act & Assert
+ foreach (var format in Enum.GetValues(typeof(EncodeFormat)).Cast())
+ {
+ var extension = format.GetExtension();
+ Assert.Equal(expectedExtensions[format], extension);
+ }
+ }
+
+}
diff --git a/API.Tests/Extensions/EnumerableExtensionsTests.cs b/API.Tests/Extensions/EnumerableExtensionsTests.cs
index e115d45f3..bdd3433ae 100644
--- a/API.Tests/Extensions/EnumerableExtensionsTests.cs
+++ b/API.Tests/Extensions/EnumerableExtensionsTests.cs
@@ -74,10 +74,10 @@ public class EnumerableExtensionsTests
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"},
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"}
)]
- [InlineData(
- new[] {"01/001.jpg", "001.jpg"},
- new[] {"001.jpg", "01/001.jpg"}
- )]
+ [InlineData(
+ new[] {"01/001.jpg", "001.jpg"},
+ new[] {"001.jpg", "01/001.jpg"}
+ )]
public void TestNaturalSort(string[] input, string[] expected)
{
Assert.Equal(expected, input.OrderByNatural(x => x).ToArray());
diff --git a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs
index 6ea35e471..227dd2b32 100644
--- a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs
+++ b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs
@@ -1,4 +1,5 @@
using System.Collections.Generic;
+using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using API.Entities.Enums;
@@ -6,7 +7,6 @@ using API.Extensions;
using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
-using API.Tests.Helpers;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
@@ -18,9 +18,8 @@ public class ParserInfoListExtensions
private readonly IDefaultParser _defaultParser;
public ParserInfoListExtensions()
{
- _defaultParser =
- new DefaultParser(new DirectoryService(Substitute.For>(),
- new MockFileSystem()));
+ var ds = new DirectoryService(Substitute.For>(), new MockFileSystem());
+ _defaultParser = new BasicParser(ds, new ImageParser(ds));
}
[Theory]
@@ -33,7 +32,7 @@ public class ParserInfoListExtensions
[Theory]
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
- [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
+ [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
[InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo)
{
@@ -41,8 +40,8 @@ public class ParserInfoListExtensions
foreach (var filename in inputInfos)
{
infos.Add(_defaultParser.Parse(
- filename,
- string.Empty));
+ Path.Join("E:/Manga/Cynthia the Mission/", filename),
+ "E:/Manga/", "E:/Manga/", LibraryType.Manga));
}
var files = inputChapters.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build()).ToList();
@@ -52,4 +51,26 @@ public class ParserInfoListExtensions
Assert.Equal(expectedHasInfo, infos.HasInfo(chapter));
}
+
+ [Fact]
+ public void HasInfoTest_SuccessWhenSpecial()
+ {
+ var infos = new[]
+ {
+ _defaultParser.Parse(
+ "E:/Manga/Cynthia the Mission/Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip",
+ "E:/Manga/", "E:/Manga/", LibraryType.Manga)
+ };
+
+ var files = new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip"}
+ .Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build())
+ .ToList();
+ var chapter = new ChapterBuilder("Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip")
+ .WithRange("Cynthia The Mission The Special SP01 [Desudesu&Brolen]")
+ .WithFiles(files)
+ .WithIsSpecial(true)
+ .Build();
+
+ Assert.True(infos.HasInfo(chapter));
+ }
}
diff --git a/API.Tests/Extensions/QueryableExtensionsTests.cs b/API.Tests/Extensions/QueryableExtensionsTests.cs
index 230028d44..866e0202c 100644
--- a/API.Tests/Extensions/QueryableExtensionsTests.cs
+++ b/API.Tests/Extensions/QueryableExtensionsTests.cs
@@ -1,11 +1,9 @@
using System.Collections.Generic;
using System.Linq;
-using API.Data;
using API.Data.Misc;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
-using API.Extensions;
+using API.Entities.Person;
using API.Extensions.QueryExtensions;
using API.Helpers.Builders;
using Xunit;
@@ -45,17 +43,17 @@ public class QueryableExtensionsTests
[InlineData(false, 1)]
public void RestrictAgainstAgeRestriction_CollectionTag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
{
- var items = new List()
+ var items = new List()
{
- new CollectionTagBuilder("Test")
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ new AppUserCollectionBuilder("Test")
+ .WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
.Build(),
- new CollectionTagBuilder("Test 2")
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ new AppUserCollectionBuilder("Test 2")
+ .WithItem(new SeriesBuilder("S2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build()).Build())
+ .WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
.Build(),
- new CollectionTagBuilder("Test 3")
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
+ new AppUserCollectionBuilder("Test 3")
+ .WithItem(new SeriesBuilder("S3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build()).Build())
.Build(),
};
@@ -123,29 +121,46 @@ public class QueryableExtensionsTests
[Theory]
[InlineData(true, 2)]
- [InlineData(false, 1)]
- public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
+ [InlineData(false, 2)]
+ public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedPeopleCount)
{
- var items = new List()
+ // Arrange
+ var items = new List
{
- new PersonBuilder("Test", PersonRole.Character)
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
- .Build(),
- new PersonBuilder("Test", PersonRole.Character)
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
- .Build(),
- new PersonBuilder("Test", PersonRole.Character)
- .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
- .Build(),
+ CreatePersonWithSeriesMetadata("Test1", AgeRating.Teen),
+ CreatePersonWithSeriesMetadata("Test2", AgeRating.Unknown, AgeRating.Teen), // 2 series on this person, restrict will still allow access
+ CreatePersonWithSeriesMetadata("Test3", AgeRating.X18Plus)
};
- var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
+ var ageRestriction = new AgeRestriction
{
AgeRating = AgeRating.Teen,
IncludeUnknowns = includeUnknowns
- });
- Assert.Equal(expectedCount, filtered.Count());
+ };
+
+ // Act
+ var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(ageRestriction);
+
+ // Assert
+ Assert.Equal(expectedPeopleCount, filtered.Count());
+ }
+
+ private static Person CreatePersonWithSeriesMetadata(string name, params AgeRating[] ageRatings)
+ {
+ var person = new PersonBuilder(name).Build();
+
+ foreach (var ageRating in ageRatings)
+ {
+ var seriesMetadata = new SeriesMetadataBuilder().WithAgeRating(ageRating).Build();
+ person.SeriesMetadataPeople.Add(new SeriesMetadataPeople
+ {
+ SeriesMetadata = seriesMetadata,
+ Person = person,
+ Role = PersonRole.Character // Role is now part of the relationship
+ });
+ }
+
+ return person;
}
[Theory]
diff --git a/API.Tests/Extensions/SeriesExtensionsTests.cs b/API.Tests/Extensions/SeriesExtensionsTests.cs
index c14de4439..adaecfba5 100644
--- a/API.Tests/Extensions/SeriesExtensionsTests.cs
+++ b/API.Tests/Extensions/SeriesExtensionsTests.cs
@@ -1,11 +1,9 @@
-using System.Collections.Generic;
-using System.Globalization;
-using System.Linq;
+using System.Linq;
using API.Comparators;
-using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Builders;
+using API.Services.Tasks.Scanner.Parser;
using Xunit;
namespace API.Tests.Extensions;
@@ -17,22 +15,23 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Special 1")
.WithIsSpecial(true)
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
.Build())
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Special 2")
.WithIsSpecial(true)
+ .WithSortOrder(Parser.SpecialVolumeNumber + 2)
.Build())
.Build())
.Build();
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Special 1", series.GetCoverImage());
@@ -43,8 +42,8 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("13")
.WithCoverImage("Chapter 13")
.Build())
@@ -59,7 +58,7 @@ public class SeriesExtensionsTests
.WithVolume(new VolumeBuilder("2")
.WithName("Volume 2")
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Volume 2")
.Build())
.Build())
@@ -67,12 +66,83 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
}
+ [Fact]
+ public void GetCoverImage_LooseChapters_WithSub1_Chapter()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("-1")
+ .WithCoverImage("Chapter -1")
+ .Build())
+ .WithChapter(new ChapterBuilder("0.5")
+ .WithCoverImage("Chapter 0.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .WithChapter(new ChapterBuilder("3")
+ .WithCoverImage("Chapter 3")
+ .Build())
+ .WithChapter(new ChapterBuilder("4AU")
+ .WithCoverImage("Chapter 4AU")
+ .Build())
+ .Build())
+
+ .Build();
+
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
+ ///
+ /// Checks the case where there are specials and loose leafs, loose leaf chapters should be preferred
+ ///
+ [Fact]
+ public void GetCoverImage_LooseChapters_WithSub1_Chapter_WithSpecials()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithName(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("I am a Special")
+ .WithCoverImage("I am a Special")
+ .Build())
+ .WithChapter(new ChapterBuilder("I am a Special 2")
+ .WithCoverImage("I am a Special 2")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("0.5")
+ .WithCoverImage("Chapter 0.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .Build())
+
+ .Build();
+
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
[Fact]
public void GetCoverImage_JustVolumes()
{
@@ -81,14 +151,14 @@ public class SeriesExtensionsTests
.WithVolume(new VolumeBuilder("1")
.WithName("Volume 1")
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Volume 1 Chapter 1")
.Build())
.Build())
.WithVolume(new VolumeBuilder("2")
.WithName("Volume 2")
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Volume 2")
.Build())
.Build())
@@ -109,19 +179,48 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
}
+ [Fact]
+ public void GetCoverImage_JustVolumes_ButVolume0()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+
+ .WithVolume(new VolumeBuilder("0")
+ .WithName("Volume 0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 0")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("1")
+ .WithName("Volume 1")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 1")
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Volume 1", series.GetCoverImage());
+ }
+
[Fact]
public void GetCoverImage_JustSpecials_WithDecimal()
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("2.5")
.WithIsSpecial(false)
.WithCoverImage("Special 1")
@@ -135,7 +234,7 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Special 2", series.GetCoverImage());
@@ -146,8 +245,8 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("2.5")
.WithIsSpecial(false)
.WithCoverImage("Chapter 2.5")
@@ -156,16 +255,19 @@ public class SeriesExtensionsTests
.WithIsSpecial(false)
.WithCoverImage("Chapter 2")
.Build())
- .WithChapter(new ChapterBuilder("0")
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(true)
.WithCoverImage("Special 1")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
.Build())
- .Build())
+ .Build())
.Build();
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Chapter 2", series.GetCoverImage());
@@ -176,8 +278,8 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("2.5")
.WithIsSpecial(false)
.WithCoverImage("Chapter 2.5")
@@ -186,14 +288,17 @@ public class SeriesExtensionsTests
.WithIsSpecial(false)
.WithCoverImage("Chapter 2")
.Build())
- .WithChapter(new ChapterBuilder("0")
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(true)
.WithCoverImage("Special 3")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
.Build())
.Build())
.WithVolume(new VolumeBuilder("1")
.WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(false)
.WithCoverImage("Volume 1")
.Build())
@@ -202,7 +307,7 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1", series.GetCoverImage());
@@ -213,8 +318,8 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("2.5")
.WithIsSpecial(false)
.WithCoverImage("Chapter 2.5")
@@ -223,14 +328,17 @@ public class SeriesExtensionsTests
.WithIsSpecial(false)
.WithCoverImage("Chapter 2")
.Build())
- .WithChapter(new ChapterBuilder("0")
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(true)
.WithCoverImage("Special 1")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
.Build())
.Build())
.WithVolume(new VolumeBuilder("1")
.WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(false)
.WithCoverImage("Volume 1")
.Build())
@@ -239,7 +347,7 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1", series.GetCoverImage());
@@ -250,8 +358,8 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Ippo")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1426")
.WithIsSpecial(false)
.WithCoverImage("Chapter 1426")
@@ -260,21 +368,24 @@ public class SeriesExtensionsTests
.WithIsSpecial(false)
.WithCoverImage("Chapter 1425")
.Build())
- .WithChapter(new ChapterBuilder("0")
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(true)
- .WithCoverImage("Special 1")
+ .WithCoverImage("Special 3")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
.Build())
.Build())
.WithVolume(new VolumeBuilder("1")
.WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(false)
.WithCoverImage("Volume 1")
.Build())
.Build())
.WithVolume(new VolumeBuilder("137")
.WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(false)
.WithCoverImage("Volume 137")
.Build())
@@ -283,7 +394,7 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1", series.GetCoverImage());
@@ -294,8 +405,8 @@ public class SeriesExtensionsTests
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder("0")
- .WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("2.5")
.WithIsSpecial(false)
.WithCoverImage("Chapter 2.5")
@@ -307,7 +418,7 @@ public class SeriesExtensionsTests
.Build())
.WithVolume(new VolumeBuilder("4")
.WithMinNumber(4)
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithIsSpecial(false)
.WithCoverImage("Volume 4")
.Build())
@@ -316,11 +427,77 @@ public class SeriesExtensionsTests
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Chapter 2", series.GetCoverImage());
}
+ ///
+ /// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
+ ///
+ [Fact]
+ public void GetCoverImage_LessThanIssue1()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("0")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 0")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithMinNumber(4)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 4")
+ .Build())
+ .Build())
+ .Build();
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
+ ///
+ /// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
+ ///
+ [Fact]
+ public void GetCoverImage_LessThanIssue1_WithNegative()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("-1")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter -1")
+ .Build())
+ .WithChapter(new ChapterBuilder("0")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 0")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithMinNumber(4)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 4")
+ .Build())
+ .Build())
+ .Build();
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
}
diff --git a/API.Tests/Extensions/SeriesFilterTests.cs b/API.Tests/Extensions/SeriesFilterTests.cs
index 2774ad78e..577e17619 100644
--- a/API.Tests/Extensions/SeriesFilterTests.cs
+++ b/API.Tests/Extensions/SeriesFilterTests.cs
@@ -1,28 +1,1342 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
+using System.Linq;
using System.Threading.Tasks;
+using API.DTOs;
using API.DTOs.Filtering.v2;
+using API.DTOs.Progress;
+using API.Entities;
+using API.Entities.Enums;
using API.Extensions.QueryExtensions.Filtering;
+using API.Helpers.Builders;
+using API.Services;
+using API.Services.Plus;
+using API.SignalR;
+using Kavita.Common;
using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
using Xunit;
namespace API.Tests.Extensions;
public class SeriesFilterTests : AbstractDbTest
{
-
- protected override Task ResetDb()
+ protected override async Task ResetDb()
{
- return Task.CompletedTask;
+ _context.Series.RemoveRange(_context.Series);
+ _context.AppUser.RemoveRange(_context.AppUser);
+ await _context.SaveChangesAsync();
}
+ #region HasProgress
+
+ private async Task SetupHasProgress()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("None").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Partial").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Full").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+
+ // Create read progress on Partial and Full
+ var readerService = new ReaderService(_unitOfWork, Substitute.For>(),
+ Substitute.For(), Substitute.For(),
+ Substitute.For(), Substitute.For());
+
+ // Select Partial and set pages read to 5 on first chapter
+ var partialSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(2);
+ var partialChapter = partialSeries.Volumes.First().Chapters.First();
+
+ Assert.True(await readerService.SaveReadingProgress(new ProgressDto()
+ {
+ ChapterId = partialChapter.Id,
+ LibraryId = 1,
+ SeriesId = partialSeries.Id,
+ PageNum = 5,
+ VolumeId = partialChapter.VolumeId
+ }, user.Id));
+
+ // Select Full and set pages read to 10 on first chapter
+ var fullSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(3);
+ var fullChapter = fullSeries.Volumes.First().Chapters.First();
+
+ Assert.True(await readerService.SaveReadingProgress(new ProgressDto()
+ {
+ ChapterId = fullChapter.Id,
+ LibraryId = 1,
+ SeriesId = fullSeries.Id,
+ PageNum = 10,
+ VolumeId = fullChapter.VolumeId
+ }, user.Id));
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThan50_ShouldReturnSingle()
+ {
+ var user = await SetupHasProgress();
+
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.LessThan, 50, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("None", queryResult.First().Name);
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThanOrEqual50_ShouldReturnTwo()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress <= 50%
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.LessThanEqual, 50, user.Id)
+ .ToListAsync();
+
+ Assert.Equal(2, queryResult.Count);
+ Assert.Contains(queryResult, s => s.Name == "None");
+ Assert.Contains(queryResult, s => s.Name == "Partial");
+ }
+
+ [Fact]
+ public async Task HasProgress_GreaterThan50_ShouldReturnFull()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress > 50%
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.GreaterThan, 50, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("Full", queryResult.First().Name);
+ }
+
+ [Fact]
+ public async Task HasProgress_Equal100_ShouldReturnFull()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress == 100%
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.Equal, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("Full", queryResult.First().Name);
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThan100_ShouldReturnTwo()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress < 100%
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.LessThan, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Equal(2, queryResult.Count);
+ Assert.Contains(queryResult, s => s.Name == "None");
+ Assert.Contains(queryResult, s => s.Name == "Partial");
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThanOrEqual100_ShouldReturnAll()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress <= 100%
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.LessThanEqual, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Equal(3, queryResult.Count);
+ Assert.Contains(queryResult, s => s.Name == "None");
+ Assert.Contains(queryResult, s => s.Name == "Partial");
+ Assert.Contains(queryResult, s => s.Name == "Full");
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThan100_WithProgress99_99_ShouldReturnSeries()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("AlmostFull").WithPages(100)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(100).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ var readerService = new ReaderService(_unitOfWork, Substitute.For>(),
+ Substitute.For(), Substitute.For(),
+ Substitute.For(), Substitute.For());
+
+ // Set progress to 99.99% (99/100 pages read)
+ var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
+ var chapter = series.Volumes.First().Chapters.First();
+
+ Assert.True(await readerService.SaveReadingProgress(new ProgressDto()
+ {
+ ChapterId = chapter.Id,
+ LibraryId = 1,
+ SeriesId = series.Id,
+ PageNum = 99,
+ VolumeId = chapter.VolumeId
+ }, user.Id));
+
+ // Query series with progress < 100%
+ var queryResult = await _context.Series.HasReadingProgress(true, FilterComparison.LessThan, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("AlmostFull", queryResult.First().Name);
+ }
+ #endregion
+
#region HasLanguage
- [Fact]
- public async Task HasLanguage_Works()
+ private async Task SetupHasLanguage()
{
- var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.Contains, new List() { }).ToListAsync();
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("English").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithLanguage("en").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("French").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithLanguage("fr").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Spanish").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithLanguage("es").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasLanguage_Equal_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.Equal, ["en"]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("en", foundSeries[0].Metadata.Language);
+ }
+
+ [Fact]
+ public async Task HasLanguage_NotEqual_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.NotEqual, ["en"]).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.DoesNotContain(foundSeries, s => s.Metadata.Language == "en");
+ }
+
+ [Fact]
+ public async Task HasLanguage_Contains_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.Contains, ["en", "fr"]).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Metadata.Language == "en");
+ Assert.Contains(foundSeries, s => s.Metadata.Language == "fr");
+ }
+
+ [Fact]
+ public async Task HasLanguage_NotContains_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.NotContains, ["en", "fr"]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("es", foundSeries[0].Metadata.Language);
+ }
+
+ [Fact]
+ public async Task HasLanguage_MustContains_Works()
+ {
+ await SetupHasLanguage();
+
+ // Since "MustContains" matches all the provided languages, no series should match in this case.
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.MustContains, ["en", "fr"]).ToListAsync();
+ Assert.Empty(foundSeries);
+
+ // Single language should work.
+ foundSeries = await _context.Series.HasLanguage(true, FilterComparison.MustContains, ["en"]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("en", foundSeries[0].Metadata.Language);
+ }
+
+ [Fact]
+ public async Task HasLanguage_Matches_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.Matches, ["e"]).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains("en", foundSeries.Select(s => s.Metadata.Language));
+ Assert.Contains("es", foundSeries.Select(s => s.Metadata.Language));
+ }
+
+ [Fact]
+ public async Task HasLanguage_DisabledCondition_ReturnsAll()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(false, FilterComparison.Equal, ["en"]).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasLanguage_EmptyLanguageList_ReturnsAll()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await _context.Series.HasLanguage(true, FilterComparison.Equal, new List()).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasLanguage_UnsupportedComparison_ThrowsException()
+ {
+ await SetupHasLanguage();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasLanguage(true, FilterComparison.GreaterThan, ["en"]).ToListAsync();
+ });
}
+ #endregion
+
+ #region HasAverageRating
+
+ private async Task SetupHasAverageRating()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("None").WithPages(10)
+ .WithExternalMetadata(new ExternalSeriesMetadataBuilder().WithAverageExternalRating(-1).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Partial").WithPages(10)
+ .WithExternalMetadata(new ExternalSeriesMetadataBuilder().WithAverageExternalRating(50).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Full").WithPages(10)
+ .WithExternalMetadata(new ExternalSeriesMetadataBuilder().WithAverageExternalRating(100).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasAverageRating_Equal_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.Equal, 100).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("Full", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_GreaterThan_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.GreaterThan, 50).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("Full", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_GreaterThanEqual_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.GreaterThanEqual, 50).ToListAsync();
+ Assert.Equal(2, series.Count);
+ Assert.Contains(series, s => s.Name == "Partial");
+ Assert.Contains(series, s => s.Name == "Full");
+ }
+
+ [Fact]
+ public async Task HasAverageRating_LessThan_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.LessThan, 50).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("None", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_LessThanEqual_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.LessThanEqual, 50).ToListAsync();
+ Assert.Equal(2, series.Count);
+ Assert.Contains(series, s => s.Name == "None");
+ Assert.Contains(series, s => s.Name == "Partial");
+ }
+
+ [Fact]
+ public async Task HasAverageRating_NotEqual_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.NotEqual, 100).ToListAsync();
+ Assert.Equal(2, series.Count);
+ Assert.DoesNotContain(series, s => s.Name == "Full");
+ }
+
+ [Fact]
+ public async Task HasAverageRating_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(false, FilterComparison.Equal, 100).ToListAsync();
+ Assert.Equal(3, series.Count);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_NotSet_IsHandled()
+ {
+ await SetupHasAverageRating();
+
+ var series = await _context.Series.HasAverageRating(true, FilterComparison.Equal, -1).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("None", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_ThrowsForInvalidComparison()
+ {
+ await SetupHasAverageRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasAverageRating(true, FilterComparison.Contains, 50).ToListAsync();
+ });
+ }
+
+ [Fact]
+ public async Task HasAverageRating_ThrowsForOutOfRangeComparison()
+ {
+ await SetupHasAverageRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasAverageRating(true, (FilterComparison)999, 50).ToListAsync();
+ });
+ }
+
+ #endregion
+
+ # region HasPublicationStatus
+
+ private async Task SetupHasPublicationStatus()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Cancelled").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Cancelled).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("OnGoing").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.OnGoing).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Completed").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Completed).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_Equal_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await _context.Series.HasPublicationStatus(true, FilterComparison.Equal, new List { PublicationStatus.Cancelled }).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("Cancelled", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_Contains_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await _context.Series.HasPublicationStatus(true, FilterComparison.Contains, new List { PublicationStatus.Cancelled, PublicationStatus.Completed }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Cancelled");
+ Assert.Contains(foundSeries, s => s.Name == "Completed");
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_NotContains_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await _context.Series.HasPublicationStatus(true, FilterComparison.NotContains, new List { PublicationStatus.Cancelled }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "OnGoing");
+ Assert.Contains(foundSeries, s => s.Name == "Completed");
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_NotEqual_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await _context.Series.HasPublicationStatus(true, FilterComparison.NotEqual, new List { PublicationStatus.OnGoing }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Cancelled");
+ Assert.Contains(foundSeries, s => s.Name == "Completed");
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await _context.Series.HasPublicationStatus(false, FilterComparison.Equal, new List { PublicationStatus.Cancelled }).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_EmptyPubStatuses_ReturnsAll()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await _context.Series.HasPublicationStatus(true, FilterComparison.Equal, new List()).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_ThrowsForInvalidComparison()
+ {
+ await SetupHasPublicationStatus();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasPublicationStatus(true, FilterComparison.BeginsWith, new List { PublicationStatus.Cancelled }).ToListAsync();
+ });
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_ThrowsForOutOfRangeComparison()
+ {
+ await SetupHasPublicationStatus();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasPublicationStatus(true, (FilterComparison)999, new List { PublicationStatus.Cancelled }).ToListAsync();
+ });
+ }
+ #endregion
+
+ #region HasAgeRating
+ private async Task SetupHasAgeRating()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Unknown").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("G").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Mature").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasAgeRating_Equal_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.Equal, [AgeRating.G]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("G", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAgeRating_Contains_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.Contains, new List { AgeRating.G, AgeRating.Mature }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_NotContains_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.NotContains, new List { AgeRating.Unknown }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_NotEqual_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.NotEqual, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Unknown");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_GreaterThan_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.GreaterThan, new List { AgeRating.Unknown }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_GreaterThanEqual_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.GreaterThanEqual, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_LessThan_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.LessThan, new List { AgeRating.Mature }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Unknown");
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_LessThanEqual_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.LessThanEqual, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Unknown");
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(false, FilterComparison.Equal, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasAgeRating_EmptyRatings_ReturnsAll()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await _context.Series.HasAgeRating(true, FilterComparison.Equal, new List()).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasAgeRating_ThrowsForInvalidComparison()
+ {
+ await SetupHasAgeRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasAgeRating(true, FilterComparison.BeginsWith, new List { AgeRating.G }).ToListAsync();
+ });
+ }
+
+ [Fact]
+ public async Task HasAgeRating_ThrowsForOutOfRangeComparison()
+ {
+ await SetupHasAgeRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await _context.Series.HasAgeRating(true, (FilterComparison)999, new List { AgeRating.G }).ToListAsync();
+ });
+ }
+
+ #endregion
+
+ #region HasReleaseYear
+
+ private async Task SetupHasReleaseYear()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("2000").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(2000).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("2020").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(2020).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("2025").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(2025).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_Equal_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.Equal, 2020).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("2020", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_GreaterThan_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.GreaterThan, 2000).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "2020");
+ Assert.Contains(foundSeries, s => s.Name == "2025");
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_LessThan_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.LessThan, 2025).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "2000");
+ Assert.Contains(foundSeries, s => s.Name == "2020");
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_IsInLast_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.IsInLast, 5).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_IsNotInLast_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.IsNotInLast, 5).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Contains(foundSeries, s => s.Name == "2000");
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(false, FilterComparison.Equal, 2020).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_ReleaseYearNull_ReturnsAll()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.Equal, null).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_IsEmpty_Works()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("EmptyYear").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(0).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ var foundSeries = await _context.Series.HasReleaseYear(true, FilterComparison.IsEmpty, 0).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("EmptyYear", foundSeries[0].Name);
+ }
+
+
+ #endregion
+
+ #region HasRating
+
+ private async Task SetupHasRating()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("No Rating").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("0 Rating").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("4.5 Rating").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+
+ var seriesService = new SeriesService(_unitOfWork, Substitute.For(),
+ Substitute.For(), Substitute.For>(),
+ Substitute.For(), Substitute.For(),
+ Substitute.For());
+
+ // Select 0 Rating
+ var zeroRating = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(2);
+ Assert.NotNull(zeroRating);
+
+ Assert.True(await seriesService.UpdateRating(user, new UpdateSeriesRatingDto()
+ {
+ SeriesId = zeroRating.Id,
+ UserRating = 0
+ }));
+
+ // Select 4.5 Rating
+ var partialRating = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(3);
+
+ Assert.True(await seriesService.UpdateRating(user, new UpdateSeriesRatingDto()
+ {
+ SeriesId = partialRating.Id,
+ UserRating = 4.5f
+ }));
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasRating_Equal_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await _context.Series
+ .HasRating(true, FilterComparison.Equal, 4.5f, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("4.5 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_GreaterThan_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await _context.Series
+ .HasRating(true, FilterComparison.GreaterThan, 0, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("4.5 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_LessThan_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await _context.Series
+ .HasRating(true, FilterComparison.LessThan, 4.5f, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("0 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_IsEmpty_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await _context.Series
+ .HasRating(true, FilterComparison.IsEmpty, 0, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("No Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_GreaterThanEqual_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await _context.Series
+ .HasRating(true, FilterComparison.GreaterThanEqual, 4.5f, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("4.5 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_LessThanEqual_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await _context.Series
+ .HasRating(true, FilterComparison.LessThanEqual, 0, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("0 Rating", foundSeries[0].Name);
+ }
+
+ #endregion
+
+ #region HasAverageReadTime
+
+
+
+ #endregion
+
+ #region HasReadLast
+
+
+
+ #endregion
+
+ #region HasReadingDate
+
+
+
+ #endregion
+
+ #region HasTags
+
+
+
+ #endregion
+
+ #region HasPeople
+
+
+
+ #endregion
+
+ #region HasGenre
+
+
+
+ #endregion
+
+ #region HasFormat
+
+
+
+ #endregion
+
+ #region HasCollectionTags
+
+
+
+ #endregion
+
+ #region HasName
+
+ private async Task SetupHasName()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Don't Toy With Me, Miss Nagatoro").WithLocalizedName("Ijiranaide, Nagatoro-san").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("My Dress-Up Darling").WithLocalizedName("Sono Bisque Doll wa Koi wo Suru").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasName_Equal_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.Equal, "My Dress-Up Darling")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("My Dress-Up Darling", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_Equal_LocalizedName_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.Equal, "Ijiranaide, Nagatoro-san")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_BeginsWith_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.BeginsWith, "My Dress")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("My Dress-Up Darling", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_BeginsWith_LocalizedName_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.BeginsWith, "Sono Bisque")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("My Dress-Up Darling", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_EndsWith_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.EndsWith, "Nagatoro")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_Matches_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.Matches, "Toy With Me")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_NotEqual_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await _context.Series
+ .HasName(true, FilterComparison.NotEqual, "My Dress-Up Darling")
+ .ToListAsync();
+
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+
+ #endregion
+
+ #region HasSummary
+
+ private async Task SetupHasSummary()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Hippos").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithSummary("I like hippos").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Apples").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithSummary("I like apples").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Ducks").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithSummary("I like ducks").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("No Summary").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ _context.Users.Add(user);
+ _context.Library.Add(library);
+ await _context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasSummary_Equal_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await _context.Series
+ .HasSummary(true, FilterComparison.Equal, "I like hippos")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Hippos", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_BeginsWith_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await _context.Series
+ .HasSummary(true, FilterComparison.BeginsWith, "I like h")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Hippos", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_EndsWith_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await _context.Series
+ .HasSummary(true, FilterComparison.EndsWith, "apples")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Apples", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_Matches_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await _context.Series
+ .HasSummary(true, FilterComparison.Matches, "like ducks")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Ducks", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_NotEqual_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await _context.Series
+ .HasSummary(true, FilterComparison.NotEqual, "I like ducks")
+ .ToListAsync();
+
+ Assert.Equal(3, foundSeries.Count);
+ Assert.DoesNotContain(foundSeries, s => s.Name == "Ducks");
+ }
+
+ [Fact]
+ public async Task HasSummary_IsEmpty_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await _context.Series
+ .HasSummary(true, FilterComparison.IsEmpty, string.Empty)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("No Summary", foundSeries[0].Name);
+ }
+
+ #endregion
+
+
+ #region HasPath
+
+
+
+ #endregion
+
+
+ #region HasFilePath
+
+
+
#endregion
}
diff --git a/API.Tests/Extensions/VersionExtensionTests.cs b/API.Tests/Extensions/VersionExtensionTests.cs
new file mode 100644
index 000000000..e19fd7312
--- /dev/null
+++ b/API.Tests/Extensions/VersionExtensionTests.cs
@@ -0,0 +1,81 @@
+using System;
+using API.Extensions;
+using Xunit;
+
+namespace API.Tests.Extensions;
+
+public class VersionHelperTests
+{
+ [Fact]
+ public void CompareWithoutRevision_ShouldReturnTrue_WhenMajorMinorBuildMatch()
+ {
+ // Arrange
+ var v1 = new Version(1, 2, 3, 4);
+ var v2 = new Version(1, 2, 3, 5);
+
+ // Act
+ var result = v1.CompareWithoutRevision(v2);
+
+ // Assert
+ Assert.True(result);
+ }
+
+ [Fact]
+ public void CompareWithoutRevision_ShouldHandleBuildlessVersions()
+ {
+ // Arrange
+ var v1 = new Version(1, 2);
+ var v2 = new Version(1, 2);
+
+ // Act
+ var result = v1.CompareWithoutRevision(v2);
+
+ // Assert
+ Assert.True(result);
+ }
+
+ [Theory]
+ [InlineData(1, 2, 3, 1, 2, 4)]
+ [InlineData(1, 2, 3, 1, 2, 0)]
+ public void CompareWithoutRevision_ShouldReturnFalse_WhenBuildDiffers(
+ int major1, int minor1, int build1,
+ int major2, int minor2, int build2)
+ {
+ var v1 = new Version(major1, minor1, build1);
+ var v2 = new Version(major2, minor2, build2);
+
+ var result = v1.CompareWithoutRevision(v2);
+
+ Assert.False(result);
+ }
+
+ [Theory]
+ [InlineData(1, 2, 3, 1, 3, 3)]
+ [InlineData(1, 2, 3, 1, 0, 3)]
+ public void CompareWithoutRevision_ShouldReturnFalse_WhenMinorDiffers(
+ int major1, int minor1, int build1,
+ int major2, int minor2, int build2)
+ {
+ var v1 = new Version(major1, minor1, build1);
+ var v2 = new Version(major2, minor2, build2);
+
+ var result = v1.CompareWithoutRevision(v2);
+
+ Assert.False(result);
+ }
+
+ [Theory]
+ [InlineData(1, 2, 3, 2, 2, 3)]
+ [InlineData(1, 2, 3, 0, 2, 3)]
+ public void CompareWithoutRevision_ShouldReturnFalse_WhenMajorDiffers(
+ int major1, int minor1, int build1,
+ int major2, int minor2, int build2)
+ {
+ var v1 = new Version(major1, minor1, build1);
+ var v2 = new Version(major2, minor2, build2);
+
+ var result = v1.CompareWithoutRevision(v2);
+
+ Assert.False(result);
+ }
+}
diff --git a/API.Tests/Extensions/VolumeListExtensionsTests.cs b/API.Tests/Extensions/VolumeListExtensionsTests.cs
index e64267896..bbb8f215c 100644
--- a/API.Tests/Extensions/VolumeListExtensionsTests.cs
+++ b/API.Tests/Extensions/VolumeListExtensionsTests.cs
@@ -3,7 +3,6 @@ using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Builders;
-using API.Tests.Helpers;
using Xunit;
namespace API.Tests.Extensions;
@@ -21,12 +20,43 @@ public class VolumeListExtensionsTests
.WithChapter(new ChapterBuilder("3").Build())
.WithChapter(new ChapterBuilder("4").Build())
.Build(),
- new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
+ .Build(),
+
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
.Build(),
};
+ var v = volumes.GetCoverImage(MangaFormat.Archive);
+ Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
+ }
+
+ [Fact]
+ public void GetCoverImage_ChoosesVolume1_WhenHalf()
+ {
+ var volumes = new List()
+ {
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("0.5").Build())
+ .Build(),
+
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
+ };
+
+ var v = volumes.GetCoverImage(MangaFormat.Archive);
Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
}
@@ -39,9 +69,14 @@ public class VolumeListExtensionsTests
.WithChapter(new ChapterBuilder("3").Build())
.WithChapter(new ChapterBuilder("4").Build())
.Build(),
- new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
.Build(),
};
@@ -57,9 +92,14 @@ public class VolumeListExtensionsTests
.WithChapter(new ChapterBuilder("3").Build())
.WithChapter(new ChapterBuilder("4").Build())
.Build(),
- new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
.Build(),
};
@@ -75,9 +115,14 @@ public class VolumeListExtensionsTests
.WithChapter(new ChapterBuilder("3").Build())
.WithChapter(new ChapterBuilder("4").Build())
.Build(),
- new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
.Build(),
};
@@ -95,7 +140,12 @@ public class VolumeListExtensionsTests
.Build(),
new VolumeBuilder("1")
.WithChapter(new ChapterBuilder("1").Build())
- .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
.Build(),
};
diff --git a/API.Tests/Helpers/CacheHelperTests.cs b/API.Tests/Helpers/CacheHelperTests.cs
index 82f496a7b..3962ba2df 100644
--- a/API.Tests/Helpers/CacheHelperTests.cs
+++ b/API.Tests/Helpers/CacheHelperTests.cs
@@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
-using API.Entities;
using API.Entities.Enums;
using API.Helpers;
using API.Helpers.Builders;
@@ -11,9 +10,9 @@ using Xunit;
namespace API.Tests.Helpers;
-public class CacheHelperTests
+public class CacheHelperTests: AbstractFsTest
{
- private const string TestCoverImageDirectory = @"c:\";
+ private static readonly string TestCoverImageDirectory = Root;
private const string TestCoverImageFile = "thumbnail.jpg";
private readonly string _testCoverPath = Path.Join(TestCoverImageDirectory, TestCoverImageFile);
private const string TestCoverArchive = @"file in folder.zip";
@@ -37,24 +36,29 @@ public class CacheHelperTests
[Theory]
[InlineData("", false)]
- [InlineData("C:/", false)]
[InlineData(null, false)]
public void CoverImageExists_DoesFileExist(string coverImage, bool exists)
{
Assert.Equal(exists, _cacheHelper.CoverImageExists(coverImage));
}
+ [Fact]
+ public void CoverImageExists_DoesFileExistRoot()
+ {
+ Assert.False(_cacheHelper.CoverImageExists(Root));
+ }
+
[Fact]
public void CoverImageExists_FileExists()
{
- Assert.True(_cacheHelper.CoverImageExists(TestCoverArchive));
+ Assert.True(_cacheHelper.CoverImageExists(Path.Join(TestCoverImageDirectory, TestCoverArchive)));
}
[Fact]
public void ShouldUpdateCoverImage_OnFirstRun()
{
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
.WithLastModified(DateTime.Now)
.Build();
Assert.True(_cacheHelper.ShouldUpdateCoverImage(null, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
@@ -65,7 +69,7 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked()
{
// Represents first run
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
.WithLastModified(DateTime.Now)
.Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
@@ -76,7 +80,7 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked_2()
{
// Represents first run
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
.WithLastModified(DateTime.Now)
.Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now,
@@ -87,7 +91,7 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked()
{
// Represents first run
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
.WithLastModified(DateTime.Now)
.Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
@@ -98,7 +102,7 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked_Modified()
{
// Represents first run
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
.WithLastModified(DateTime.Now)
.Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
@@ -122,7 +126,7 @@ public class CacheHelperTests
var cacheHelper = new CacheHelper(fileService);
var created = DateTime.Now.Subtract(TimeSpan.FromHours(1));
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
.WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(1)))
.Build();
@@ -133,9 +137,10 @@ public class CacheHelperTests
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceCreated()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime =now,
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -147,12 +152,12 @@ public class CacheHelperTests
var cacheHelper = new CacheHelper(fileService);
var chapter = new ChapterBuilder("1")
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
- .WithCreated(filesystemFile.LastWriteTime.DateTime)
+ .WithLastModified(now.DateTime)
+ .WithCreated(now.DateTime)
.Build();
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
.Build();
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
}
@@ -160,9 +165,10 @@ public class CacheHelperTests
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime = now,
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -174,12 +180,12 @@ public class CacheHelperTests
var cacheHelper = new CacheHelper(fileService);
var chapter = new ChapterBuilder("1")
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
- .WithCreated(filesystemFile.LastWriteTime.DateTime)
+ .WithLastModified(now.DateTime)
+ .WithCreated(now.DateTime)
.Build();
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
.Build();
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
@@ -188,9 +194,10 @@ public class CacheHelperTests
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified_ForceUpdate()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime = now.DateTime,
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -202,12 +209,12 @@ public class CacheHelperTests
var cacheHelper = new CacheHelper(fileService);
var chapter = new ChapterBuilder("1")
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
- .WithCreated(filesystemFile.LastWriteTime.DateTime)
+ .WithLastModified(now.DateTime)
+ .WithCreated(now.DateTime)
.Build();
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
.Build();
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file));
}
@@ -215,10 +222,11 @@ public class CacheHelperTests
[Fact]
public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now,
- CreationTime = DateTimeOffset.Now
+ LastWriteTime = now.DateTime,
+ CreationTime = now.DateTime
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -234,8 +242,8 @@ public class CacheHelperTests
.WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
.Build();
- var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
.Build();
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
}
@@ -243,9 +251,10 @@ public class CacheHelperTests
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan_ButLastModifiedSame()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime =now.DateTime
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -262,7 +271,7 @@ public class CacheHelperTests
.Build();
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
- .WithLastModified(filesystemFile.LastWriteTime.DateTime)
+ .WithLastModified(now.DateTime)
.Build();
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
diff --git a/API.Tests/Helpers/GenreHelperTests.cs b/API.Tests/Helpers/GenreHelperTests.cs
deleted file mode 100644
index 830f32ee0..000000000
--- a/API.Tests/Helpers/GenreHelperTests.cs
+++ /dev/null
@@ -1,118 +0,0 @@
-using System.Collections.Generic;
-using API.Data;
-using API.Entities;
-using API.Helpers;
-using API.Helpers.Builders;
-using Xunit;
-
-namespace API.Tests.Helpers;
-
-public class GenreHelperTests
-{
- [Fact]
- public void UpdateGenre_ShouldAddNewGenre()
- {
- var allGenres = new List
- {
- new GenreBuilder("Action").Build(),
- new GenreBuilder("action").Build(),
- new GenreBuilder("Sci-fi").Build(),
- };
- var genreAdded = new List();
-
- GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Adventure"}, genre =>
- {
- genreAdded.Add(genre);
- });
-
- Assert.Equal(2, genreAdded.Count);
- Assert.Equal(4, allGenres.Count);
- }
-
- [Fact]
- public void UpdateGenre_ShouldNotAddDuplicateGenre()
- {
- var allGenres = new List
- {
- new GenreBuilder("Action").Build(),
- new GenreBuilder("action").Build(),
- new GenreBuilder("Sci-fi").Build(),
-
- };
- var genreAdded = new List();
-
- GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Scifi"}, genre =>
- {
- genreAdded.Add(genre);
- });
-
- Assert.Equal(3, allGenres.Count);
- Assert.Equal(2, genreAdded.Count);
- }
-
- [Fact]
- public void AddGenre_ShouldAddOnlyNonExistingGenre()
- {
- var existingGenres = new List
- {
- new GenreBuilder("Action").Build(),
- new GenreBuilder("action").Build(),
- new GenreBuilder("Sci-fi").Build(),
- };
-
-
- GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Action").Build());
- Assert.Equal(3, existingGenres.Count);
-
- GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("action").Build());
- Assert.Equal(3, existingGenres.Count);
-
- GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Shonen").Build());
- Assert.Equal(4, existingGenres.Count);
- }
-
- [Fact]
- public void KeepOnlySamePeopleBetweenLists()
- {
- var existingGenres = new List
- {
- new GenreBuilder("Action").Build(),
- new GenreBuilder("Sci-fi").Build(),
- };
-
- var peopleFromChapters = new List
- {
- new GenreBuilder("Action").Build(),
- };
-
- var genreRemoved = new List();
- GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
- peopleFromChapters, genre =>
- {
- genreRemoved.Add(genre);
- });
-
- Assert.Single(genreRemoved);
- }
-
- [Fact]
- public void RemoveEveryoneIfNothingInRemoveAllExcept()
- {
- var existingGenres = new List
- {
- new GenreBuilder("Action").Build(),
- new GenreBuilder("Sci-fi").Build(),
- };
-
- var peopleFromChapters = new List();
-
- var genreRemoved = new List();
- GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
- peopleFromChapters, genre =>
- {
- genreRemoved.Add(genre);
- });
-
- Assert.Equal(2, genreRemoved.Count);
- }
-}
diff --git a/API.Tests/Helpers/OrderableHelperTests.cs b/API.Tests/Helpers/OrderableHelperTests.cs
index a6d741be1..15f9e6268 100644
--- a/API.Tests/Helpers/OrderableHelperTests.cs
+++ b/API.Tests/Helpers/OrderableHelperTests.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
using System.Linq;
using API.Entities;
using API.Helpers;
@@ -49,17 +50,14 @@ public class OrderableHelperTests
[Fact]
public void ReorderItems_InvalidPosition_NoChange()
{
- // Arrange
var items = new List
{
new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
};
- // Act
OrderableHelper.ReorderItems(items, 2, 3); // Position 3 is out of range
- // Assert
Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
}
@@ -80,7 +78,6 @@ public class OrderableHelperTests
[Fact]
public void ReorderItems_DoubleMove()
{
- // Arrange
var items = new List
{
new AppUserSideNavStream { Id = 1, Order = 0, Name = "0" },
@@ -94,7 +91,6 @@ public class OrderableHelperTests
// Move 4 -> 1
OrderableHelper.ReorderItems(items, 5, 1);
- // Assert
Assert.Equal(1, items[0].Id);
Assert.Equal(0, items[0].Order);
Assert.Equal(5, items[1].Id);
@@ -109,4 +105,98 @@ public class OrderableHelperTests
Assert.Equal("034125", string.Join("", items.Select(s => s.Name)));
}
+
+ private static List CreateTestReadingListItems(int count = 4)
+ {
+ var items = new List();
+
+ for (var i = 0; i < count; i++)
+ {
+ items.Add(new ReadingListItem() { Id = i + 1, Order = count, ReadingListId = i + 1});
+ }
+
+ return items;
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToBeginning_CorrectOrder()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 3, 0);
+
+ Assert.Equal(3, items[0].Id);
+ Assert.Equal(1, items[1].Id);
+ Assert.Equal(2, items[2].Id);
+ Assert.Equal(4, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToEnd_CorrectOrder()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 1, 3);
+
+ Assert.Equal(2, items[0].Id);
+ Assert.Equal(3, items[1].Id);
+ Assert.Equal(4, items[2].Id);
+ Assert.Equal(1, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToMiddle_CorrectOrder()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 4, 2);
+
+ Assert.Equal(1, items[0].Id);
+ Assert.Equal(2, items[1].Id);
+ Assert.Equal(4, items[2].Id);
+ Assert.Equal(3, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToOutOfBoundsPosition_MovesToEnd()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 2, 10);
+
+ Assert.Equal(1, items[0].Id);
+ Assert.Equal(3, items[1].Id);
+ Assert.Equal(4, items[2].Id);
+ Assert.Equal(2, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_NegativePosition_ThrowsArgumentException()
+ {
+ var items = CreateTestReadingListItems();
+
+ Assert.Throws(() =>
+ OrderableHelper.ReorderItems(items, 2, -1)
+ );
+ }
}
diff --git a/API.Tests/Helpers/ParserInfoHelperTests.cs b/API.Tests/Helpers/ParserInfoHelperTests.cs
index 70ce3aa69..0bb7efb9b 100644
--- a/API.Tests/Helpers/ParserInfoHelperTests.cs
+++ b/API.Tests/Helpers/ParserInfoHelperTests.cs
@@ -1,8 +1,5 @@
using System.Collections.Generic;
-using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
-using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using API.Services.Tasks.Scanner;
diff --git a/API.Tests/Helpers/PersonHelperTests.cs b/API.Tests/Helpers/PersonHelperTests.cs
index ed59a958f..1a38ccdac 100644
--- a/API.Tests/Helpers/PersonHelperTests.cs
+++ b/API.Tests/Helpers/PersonHelperTests.cs
@@ -1,415 +1,133 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using API.Data;
-using API.DTOs;
-using API.Entities;
-using API.Entities.Enums;
-using API.Helpers;
-using API.Helpers.Builders;
-using Xunit;
+using System.Linq;
+using System.Threading.Tasks;
namespace API.Tests.Helpers;
-public class PersonHelperTests
+public class PersonHelperTests : AbstractDbTest
{
- #region UpdatePeople
- [Fact]
- public void UpdatePeople_ShouldAddNewPeople()
+ protected override async Task ResetDb()
{
- var allPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- };
- var peopleAdded = new List();
-
- PersonHelper.UpdatePeople(allPeople, new[] {"Joseph Shmo", "Sally Ann"}, PersonRole.Writer, person =>
- {
- peopleAdded.Add(person);
- });
-
- Assert.Equal(2, peopleAdded.Count);
- Assert.Equal(4, allPeople.Count);
+ _context.Series.RemoveRange(_context.Series.ToList());
+ await _context.SaveChangesAsync();
}
-
- [Fact]
- public void UpdatePeople_ShouldNotAddDuplicatePeople()
- {
- var allPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- new PersonBuilder("Sally Ann", PersonRole.CoverArtist).Build(),
-
- };
- var peopleAdded = new List();
-
- PersonHelper.UpdatePeople(allPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.CoverArtist, person =>
- {
- peopleAdded.Add(person);
- });
-
- Assert.Equal(3, allPeople.Count);
- }
- #endregion
-
- #region UpdatePeopleList
-
- [Fact]
- public void UpdatePeopleList_NullTags_NoChanges()
- {
- // Arrange
- ICollection tags = null;
- var series = new SeriesBuilder("Test Series").Build();
- var allTags = new List();
- var handleAddCalled = false;
- var onModifiedCalled = false;
-
- // Act
- PersonHelper.UpdatePeopleList(PersonRole.Writer, tags, series, allTags, p => handleAddCalled = true, () => onModifiedCalled = true);
-
- // Assert
- Assert.False(handleAddCalled);
- Assert.False(onModifiedCalled);
- }
-
- [Fact]
- public void UpdatePeopleList_AddNewTag_TagAddedAndOnModifiedCalled()
- {
- // Arrange
- const PersonRole role = PersonRole.Writer;
- var tags = new List
- {
- new PersonDto { Id = 1, Name = "John Doe", Role = role }
- };
- var series = new SeriesBuilder("Test Series").Build();
- var allTags = new List();
- var handleAddCalled = false;
- var onModifiedCalled = false;
-
- // Act
- PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
- {
- handleAddCalled = true;
- series.Metadata.People.Add(p);
- }, () => onModifiedCalled = true);
-
- // Assert
- Assert.True(handleAddCalled);
- Assert.True(onModifiedCalled);
- Assert.Single(series.Metadata.People);
- Assert.Equal("John Doe", series.Metadata.People.First().Name);
- }
-
- [Fact]
- public void UpdatePeopleList_RemoveExistingTag_TagRemovedAndOnModifiedCalled()
- {
- // Arrange
- const PersonRole role = PersonRole.Writer;
- var tags = new List();
- var series = new SeriesBuilder("Test Series").Build();
- var person = new PersonBuilder("John Doe", role).Build();
- person.Id = 1;
- series.Metadata.People.Add(person);
- var allTags = new List
- {
- person
- };
- var handleAddCalled = false;
- var onModifiedCalled = false;
-
- // Act
- PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
- {
- handleAddCalled = true;
- series.Metadata.People.Add(p);
- }, () => onModifiedCalled = true);
-
- // Assert
- Assert.False(handleAddCalled);
- Assert.True(onModifiedCalled);
- Assert.Empty(series.Metadata.People);
- }
-
- [Fact]
- public void UpdatePeopleList_UpdateExistingTag_OnModifiedCalled()
- {
- // Arrange
- const PersonRole role = PersonRole.Writer;
- var tags = new List
- {
- new PersonDto { Id = 1, Name = "John Doe", Role = role }
- };
- var series = new SeriesBuilder("Test Series").Build();
- var person = new PersonBuilder("John Doe", role).Build();
- person.Id = 1;
- series.Metadata.People.Add(person);
- var allTags = new List
- {
- person
- };
- var handleAddCalled = false;
- var onModifiedCalled = false;
-
- // Act
- PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
- {
- handleAddCalled = true;
- series.Metadata.People.Add(p);
- }, () => onModifiedCalled = true);
-
- // Assert
- Assert.False(handleAddCalled);
- Assert.False(onModifiedCalled);
- Assert.Single(series.Metadata.People);
- Assert.Equal("John Doe", series.Metadata.People.First().Name);
- }
-
- [Fact]
- public void UpdatePeopleList_NoChanges_HandleAddAndOnModifiedNotCalled()
- {
- // Arrange
- const PersonRole role = PersonRole.Writer;
- var tags = new List
- {
- new PersonDto { Id = 1, Name = "John Doe", Role = role }
- };
- var series = new SeriesBuilder("Test Series").Build();
- var person = new PersonBuilder("John Doe", role).Build();
- person.Id = 1;
- series.Metadata.People.Add(person);
- var allTags = new List
- {
- new PersonBuilder("John Doe", role).Build()
- };
- var handleAddCalled = false;
- var onModifiedCalled = false;
-
- // Act
- PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
- {
- handleAddCalled = true;
- series.Metadata.People.Add(p);
- }, () => onModifiedCalled = true);
-
- // Assert
- Assert.False(handleAddCalled);
- Assert.False(onModifiedCalled);
- Assert.Single(series.Metadata.People);
- Assert.Equal("John Doe", series.Metadata.People.First().Name);
- }
-
-
-
- #endregion
-
- #region RemovePeople
- [Fact]
- public void RemovePeople_ShouldRemovePeopleOfSameRole()
- {
- var existingPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- };
- var peopleRemoved = new List();
- PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.NotEqual(existingPeople, peopleRemoved);
- Assert.Single(peopleRemoved);
- }
-
- [Fact]
- public void RemovePeople_ShouldRemovePeopleFromBothRoles()
- {
- var existingPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- };
- var peopleRemoved = new List();
- PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.NotEqual(existingPeople, peopleRemoved);
- Assert.Single(peopleRemoved);
-
- PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo"}, PersonRole.CoverArtist, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.Empty(existingPeople);
- Assert.Equal(2, peopleRemoved.Count);
- }
-
- [Fact]
- public void RemovePeople_ShouldRemovePeopleOfSameRole_WhenNothingPassed()
- {
- var existingPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- };
- var peopleRemoved = new List();
- PersonHelper.RemovePeople(existingPeople, new List(), PersonRole.Writer, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.NotEqual(existingPeople, peopleRemoved);
- Assert.Equal(2, peopleRemoved.Count);
- }
-
-
- #endregion
-
- #region KeepOnlySamePeopleBetweenLists
- [Fact]
- public void KeepOnlySamePeopleBetweenLists()
- {
- var existingPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- new PersonBuilder("Sally", PersonRole.Writer).Build(),
- };
-
- var peopleFromChapters = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- };
-
- var peopleRemoved = new List();
- PersonHelper.KeepOnlySamePeopleBetweenLists(existingPeople,
- peopleFromChapters, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.Equal(2, peopleRemoved.Count);
- }
- #endregion
-
- #region AddPeople
-
- [Fact]
- public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonDoesNotExist()
- {
- // Arrange
- var metadataPeople = new List();
- var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
-
- // Act
- PersonHelper.AddPersonIfNotExists(metadataPeople, person);
-
- // Assert
- Assert.Single(metadataPeople);
- Assert.Contains(person, metadataPeople);
- }
-
- [Fact]
- public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonAlreadyExists()
- {
- // Arrange
- var metadataPeople = new List
- {
- new PersonBuilder("John Smith", PersonRole.Character)
- .WithId(1)
- .Build()
- };
- var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
- // Act
- PersonHelper.AddPersonIfNotExists(metadataPeople, person);
-
- // Assert
- Assert.Single(metadataPeople);
- Assert.NotNull(metadataPeople.SingleOrDefault(p =>
- p.Name.Equals(person.Name) && p.Role == person.Role && p.NormalizedName == person.NormalizedName));
- Assert.Equal(1, metadataPeople.First().Id);
- }
-
- [Fact]
- public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonNameIsNullOrEmpty()
- {
- // Arrange
- var metadataPeople = new List();
- var person2 = new PersonBuilder(string.Empty, PersonRole.Character).Build();
-
- // Act
- PersonHelper.AddPersonIfNotExists(metadataPeople, person2);
-
- // Assert
- Assert.Empty(metadataPeople);
- }
-
- [Fact]
- public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsDifferentButRoleIsSame()
- {
- // Arrange
- var metadataPeople = new List
- {
- new PersonBuilder("John Smith", PersonRole.Character).Build()
- };
- var person = new PersonBuilder("John Doe", PersonRole.Character).Build();
-
- // Act
- PersonHelper.AddPersonIfNotExists(metadataPeople, person);
-
- // Assert
- Assert.Equal(2, metadataPeople.Count);
- Assert.Contains(person, metadataPeople);
- }
-
- [Fact]
- public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsSameButRoleIsDifferent()
- {
- // Arrange
- var metadataPeople = new List
- {
- new PersonBuilder("John Doe", PersonRole.Writer).Build()
- };
- var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
-
- // Act
- PersonHelper.AddPersonIfNotExists(metadataPeople, person);
-
- // Assert
- Assert.Equal(2, metadataPeople.Count);
- Assert.Contains(person, metadataPeople);
- }
-
-
-
-
- [Fact]
- public void AddPeople_ShouldAddOnlyNonExistingPeople()
- {
- var existingPeople = new List
- {
- new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
- new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
- new PersonBuilder("Sally", PersonRole.Writer).Build(),
- };
-
-
- PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build());
- Assert.Equal(3, existingPeople.Count);
-
- PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.Writer).Build());
- Assert.Equal(3, existingPeople.Count);
-
- PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo Two", PersonRole.CoverArtist).Build());
- Assert.Equal(4, existingPeople.Count);
- }
-
- #endregion
-
+ //
+ // // 1. Test adding new people and keeping existing ones
+ // [Fact]
+ // public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
+ // {
+ // var existingPerson = new PersonBuilder("Joe Shmo").Build();
+ // var chapter = new ChapterBuilder("1").Build();
+ //
+ // // Create an existing person and assign them to the series with a role
+ // var series = new SeriesBuilder("Test 1")
+ // .WithFormat(MangaFormat.Archive)
+ // .WithMetadata(new SeriesMetadataBuilder()
+ // .WithPerson(existingPerson, PersonRole.Editor)
+ // .Build())
+ // .WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
+ // .Build();
+ //
+ // _unitOfWork.SeriesRepository.Add(series);
+ // await _unitOfWork.CommitAsync();
+ //
+ // // Call UpdateChapterPeopleAsync with one existing and one new person
+ // await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo", "New Person" }, PersonRole.Editor, _unitOfWork);
+ //
+ // // Assert existing person retained and new person added
+ // var people = await _unitOfWork.PersonRepository.GetAllPeople();
+ // Assert.Contains(people, p => p.Name == "Joe Shmo");
+ // Assert.Contains(people, p => p.Name == "New Person");
+ //
+ // var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
+ // Assert.Contains("Joe Shmo", chapterPeople);
+ // Assert.Contains("New Person", chapterPeople);
+ // }
+ //
+ // // 2. Test removing a person no longer in the list
+ // [Fact]
+ // public async Task UpdateChapterPeopleAsync_RemovePeople()
+ // {
+ // var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
+ // var existingPerson2 = new PersonBuilder("Jane Doe").Build();
+ // var chapter = new ChapterBuilder("1").Build();
+ //
+ // var series = new SeriesBuilder("Test 1")
+ // .WithVolume(new VolumeBuilder("1")
+ // .WithChapter(new ChapterBuilder("1")
+ // .WithPerson(existingPerson1, PersonRole.Editor)
+ // .WithPerson(existingPerson2, PersonRole.Editor)
+ // .Build())
+ // .Build())
+ // .Build();
+ //
+ // _unitOfWork.SeriesRepository.Add(series);
+ // await _unitOfWork.CommitAsync();
+ //
+ // // Call UpdateChapterPeopleAsync with only one person
+ // await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
+ //
+ // var people = await _unitOfWork.PersonRepository.GetAllPeople();
+ // Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
+ //
+ // var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
+ // Assert.Contains("Joe Shmo", chapterPeople);
+ // Assert.DoesNotContain("Jane Doe", chapterPeople);
+ // }
+ //
+ // // 3. Test no changes when the list of people is the same
+ // [Fact]
+ // public async Task UpdateChapterPeopleAsync_NoChanges()
+ // {
+ // var existingPerson = new PersonBuilder("Joe Shmo").Build();
+ // var chapter = new ChapterBuilder("1").Build();
+ //
+ // var series = new SeriesBuilder("Test 1")
+ // .WithVolume(new VolumeBuilder("1")
+ // .WithChapter(new ChapterBuilder("1")
+ // .WithPerson(existingPerson, PersonRole.Editor)
+ // .Build())
+ // .Build())
+ // .Build();
+ //
+ // _unitOfWork.SeriesRepository.Add(series);
+ // await _unitOfWork.CommitAsync();
+ //
+ // // Call UpdateChapterPeopleAsync with the same list
+ // await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
+ //
+ // var people = await _unitOfWork.PersonRepository.GetAllPeople();
+ // Assert.Contains(people, p => p.Name == "Joe Shmo");
+ //
+ // var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
+ // Assert.Contains("Joe Shmo", chapterPeople);
+ // Assert.Single(chapter.People); // No duplicate entries
+ // }
+ //
+ // // 4. Test multiple roles for a person
+ // [Fact]
+ // public async Task UpdateChapterPeopleAsync_MultipleRoles()
+ // {
+ // var person = new PersonBuilder("Joe Shmo").Build();
+ // var chapter = new ChapterBuilder("1").Build();
+ //
+ // var series = new SeriesBuilder("Test 1")
+ // .WithVolume(new VolumeBuilder("1")
+ // .WithChapter(new ChapterBuilder("1")
+ // .WithPerson(person, PersonRole.Writer) // Assign person as Writer
+ // .Build())
+ // .Build())
+ // .Build();
+ //
+ // _unitOfWork.SeriesRepository.Add(series);
+ // await _unitOfWork.CommitAsync();
+ //
+ // // Add same person as Editor
+ // await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
+ //
+ // // Ensure that the same person is assigned with two roles
+ // var chapterPeople = chapter.People.Where(cp => cp.Person.Name == "Joe Shmo").ToList();
+ // Assert.Equal(2, chapterPeople.Count); // One for each role
+ // Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Writer);
+ // Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Editor);
+ // }
}
diff --git a/API.Tests/Helpers/RateLimiterTests.cs b/API.Tests/Helpers/RateLimiterTests.cs
index c05ce4e6d..e9b0030b9 100644
--- a/API.Tests/Helpers/RateLimiterTests.cs
+++ b/API.Tests/Helpers/RateLimiterTests.cs
@@ -1,4 +1,5 @@
using System;
+using System.Threading.Tasks;
using API.Helpers;
using Xunit;
@@ -33,7 +34,7 @@ public class RateLimiterTests
}
[Fact]
- public void AcquireTokens_Refill()
+ public async Task AcquireTokens_Refill()
{
// Arrange
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
@@ -43,14 +44,14 @@ public class RateLimiterTests
limiter.TryAcquire("test_key");
// Wait for refill
- System.Threading.Thread.Sleep(1100);
+ await Task.Delay(1100);
// Assert
Assert.True(limiter.TryAcquire("test_key"));
}
[Fact]
- public void AcquireTokens_Refill_WithOff()
+ public async Task AcquireTokens_Refill_WithOff()
{
// Arrange
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
@@ -60,7 +61,7 @@ public class RateLimiterTests
limiter.TryAcquire("test_key");
// Wait for refill
- System.Threading.Thread.Sleep(2100);
+ await Task.Delay(2100);
// Assert
Assert.False(limiter.TryAcquire("test_key"));
diff --git a/API.Tests/Helpers/ReviewHelperTests.cs b/API.Tests/Helpers/ReviewHelperTests.cs
new file mode 100644
index 000000000..b221c3c70
--- /dev/null
+++ b/API.Tests/Helpers/ReviewHelperTests.cs
@@ -0,0 +1,258 @@
+using API.Helpers;
+using System.Collections.Generic;
+using System.Linq;
+using Xunit;
+using API.DTOs.SeriesDetail;
+
+namespace API.Tests.Helpers;
+
+public class ReviewHelperTests
+{
+ #region SelectSpectrumOfReviews Tests
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WhenLessThan10Reviews_ReturnsAllReviews()
+ {
+ // Arrange
+ var reviews = CreateReviewList(8);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(8, result.Count);
+ Assert.Equal(reviews, result.OrderByDescending(r => r.Score));
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WhenMoreThan10Reviews_Returns10Reviews()
+ {
+ // Arrange
+ var reviews = CreateReviewList(20);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(10, result.Count);
+ Assert.Equal(reviews[0], result.First());
+ Assert.Equal(reviews[19], result.Last());
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WithExactly10Reviews_ReturnsAllReviews()
+ {
+ // Arrange
+ var reviews = CreateReviewList(10);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(10, result.Count);
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WithLargeNumberOfReviews_ReturnsCorrectSpectrum()
+ {
+ // Arrange
+ var reviews = CreateReviewList(100);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(10, result.Count);
+ Assert.Contains(reviews[0], result);
+ Assert.Contains(reviews[1], result);
+ Assert.Contains(reviews[98], result);
+ Assert.Contains(reviews[99], result);
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WithEmptyList_ReturnsEmptyList()
+ {
+ // Arrange
+ var reviews = new List();
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_ResultsOrderedByScoreDescending()
+ {
+ // Arrange
+ var reviews = new List
+ {
+ new UserReviewDto { Tagline = "1", Score = 3 },
+ new UserReviewDto { Tagline = "2", Score = 5 },
+ new UserReviewDto { Tagline = "3", Score = 1 },
+ new UserReviewDto { Tagline = "4", Score = 4 },
+ new UserReviewDto { Tagline = "5", Score = 2 }
+ };
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(5, result.Count);
+ Assert.Equal(5, result[0].Score);
+ Assert.Equal(4, result[1].Score);
+ Assert.Equal(3, result[2].Score);
+ Assert.Equal(2, result[3].Score);
+ Assert.Equal(1, result[4].Score);
+ }
+
+ #endregion
+
+ #region GetCharacters Tests
+
+ [Fact]
+ public void GetCharacters_WithNullBody_ReturnsNull()
+ {
+ // Arrange
+ string body = null;
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Null(result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithEmptyBody_ReturnsEmptyString()
+ {
+ // Arrange
+ var body = string.Empty;
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal(string.Empty, result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithNoTextNodes_ReturnsEmptyString()
+ {
+ // Arrange
+ const string body = "";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal(string.Empty, result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithLessCharactersThanLimit_ReturnsFullText()
+ {
+ // Arrange
+ var body = "This is a short review.
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal("This is a short review.…", result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithMoreCharactersThanLimit_TruncatesText()
+ {
+ // Arrange
+ var body = "" + new string('a', 200) + "
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal(new string('a', 175) + "…", result);
+ Assert.Equal(176, result.Length); // 175 characters + ellipsis
+ }
+
+ [Fact]
+ public void GetCharacters_IgnoresScriptTags()
+ {
+ // Arrange
+ const string body = "Visible text
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal("Visible text…", result);
+ Assert.DoesNotContain("hidden", result);
+ }
+
+ [Fact]
+ public void GetCharacters_RemovesMarkdownSymbols()
+ {
+ // Arrange
+ const string body = "This is **bold** and _italic_ text with [link](url).
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal("This is bold and italic text with link.…", result);
+ }
+
+ [Fact]
+ public void GetCharacters_HandlesComplexMarkdownAndHtml()
+ {
+ // Arrange
+ const string body = """
+
+
+
# Header
+
This is ~~strikethrough~~ and __underlined__ text
+
~~~code block~~~
+
+++highlighted+++
+
img123(image.jpg)
+
+ """;
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.DoesNotContain("~~", result);
+ Assert.DoesNotContain("__", result);
+ Assert.DoesNotContain("~~~", result);
+ Assert.DoesNotContain("+++", result);
+ Assert.DoesNotContain("img123(", result);
+ Assert.Contains("Header", result);
+ Assert.Contains("strikethrough", result);
+ Assert.Contains("underlined", result);
+ Assert.Contains("code block", result);
+ Assert.Contains("highlighted", result);
+ }
+
+ #endregion
+
+ #region Helper Methods
+
+ private static List CreateReviewList(int count)
+ {
+ var reviews = new List();
+ for (var i = 0; i < count; i++)
+ {
+ reviews.Add(new UserReviewDto
+ {
+ Tagline = $"{i + 1}",
+ Score = count - i // This makes them ordered by score descending initially
+ });
+ }
+ return reviews;
+ }
+
+ #endregion
+}
+
diff --git a/API.Tests/Helpers/ScannerHelper.cs b/API.Tests/Helpers/ScannerHelper.cs
new file mode 100644
index 000000000..653efebb1
--- /dev/null
+++ b/API.Tests/Helpers/ScannerHelper.cs
@@ -0,0 +1,208 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.IO.Abstractions;
+using System.IO.Compression;
+using System.Linq;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using System.Xml;
+using System.Xml.Serialization;
+using API.Data;
+using API.Data.Metadata;
+using API.Entities;
+using API.Entities.Enums;
+using API.Helpers;
+using API.Helpers.Builders;
+using API.Services;
+using API.Services.Plus;
+using API.Services.Tasks;
+using API.Services.Tasks.Metadata;
+using API.Services.Tasks.Scanner;
+using API.SignalR;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit.Abstractions;
+
+namespace API.Tests.Helpers;
+#nullable enable
+
+public class ScannerHelper
+{
+ private readonly IUnitOfWork _unitOfWork;
+ private readonly ITestOutputHelper _testOutputHelper;
+ private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
+ private readonly string _testcasesDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/TestCases");
+ private readonly string _imagePath = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/1x1.png");
+ private static readonly string[] ComicInfoExtensions = new[] { ".cbz", ".cbr", ".zip", ".rar" };
+
+ public ScannerHelper(IUnitOfWork unitOfWork, ITestOutputHelper testOutputHelper)
+ {
+ _unitOfWork = unitOfWork;
+ _testOutputHelper = testOutputHelper;
+ }
+
+ public async Task GenerateScannerData(string testcase, Dictionary comicInfos = null)
+ {
+ var testDirectoryPath = await GenerateTestDirectory(Path.Join(_testcasesDirectory, testcase), comicInfos);
+
+ var (publisher, type) = SplitPublisherAndLibraryType(Path.GetFileNameWithoutExtension(testcase));
+
+ var library = new LibraryBuilder(publisher, type)
+ .WithFolders([new FolderPath() {Path = testDirectoryPath}])
+ .Build();
+
+ var admin = new AppUserBuilder("admin", "admin@kavita.com", Seed.DefaultThemes[0])
+ .WithLibrary(library)
+ .Build();
+
+ _unitOfWork.UserRepository.Add(admin); // Admin is needed for generating collections/reading lists
+ _unitOfWork.LibraryRepository.Add(library);
+ await _unitOfWork.CommitAsync();
+
+ return library;
+ }
+
+ public ScannerService CreateServices(DirectoryService ds = null, IFileSystem fs = null)
+ {
+ fs ??= new FileSystem();
+ ds ??= new DirectoryService(Substitute.For>(), fs);
+ var archiveService = new ArchiveService(Substitute.For>(), ds,
+ Substitute.For(), Substitute.For());
+ var readingItemService = new ReadingItemService(archiveService, Substitute.For(),
+ Substitute.For(), ds, Substitute.For>());
+
+
+ var processSeries = new ProcessSeries(_unitOfWork, Substitute.For>(),
+ Substitute.For(),
+ ds, Substitute.For(), readingItemService, new FileService(fs),
+ Substitute.For(),
+ Substitute.For(),
+ Substitute.For(),
+ Substitute.For());
+
+ var scanner = new ScannerService(_unitOfWork, Substitute.For>(),
+ Substitute.For(),
+ Substitute.For(), Substitute.For(), ds,
+ readingItemService, processSeries, Substitute.For());
+ return scanner;
+ }
+
+ private static (string Publisher, LibraryType Type) SplitPublisherAndLibraryType(string input)
+ {
+ // Split the input string based on " - "
+ var parts = input.Split(" - ", StringSplitOptions.RemoveEmptyEntries);
+
+ if (parts.Length != 2)
+ {
+ throw new ArgumentException("Input must be in the format 'Publisher - LibraryType'");
+ }
+
+ var publisher = parts[0].Trim();
+ var libraryTypeString = parts[1].Trim();
+
+ // Try to parse the right-hand side as a LibraryType enum
+ if (!Enum.TryParse(libraryTypeString, out var libraryType))
+ {
+ throw new ArgumentException($"'{libraryTypeString}' is not a valid LibraryType");
+ }
+
+ return (publisher, libraryType);
+ }
+
+
+
+ private async Task GenerateTestDirectory(string mapPath, Dictionary comicInfos = null)
+ {
+ // Read the map file
+ var mapContent = await File.ReadAllTextAsync(mapPath);
+
+ // Deserialize the JSON content into a list of strings using System.Text.Json
+ var filePaths = JsonSerializer.Deserialize>(mapContent);
+
+ // Create a test directory
+ var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(mapPath));
+ if (Directory.Exists(testDirectory))
+ {
+ Directory.Delete(testDirectory, true);
+ }
+ Directory.CreateDirectory(testDirectory);
+
+ // Generate the files and folders
+ await Scaffold(testDirectory, filePaths, comicInfos);
+
+ _testOutputHelper.WriteLine($"Test Directory Path: {testDirectory}");
+
+ return Path.GetFullPath(testDirectory);
+ }
+
+
+ public async Task Scaffold(string testDirectory, List filePaths, Dictionary comicInfos = null)
+ {
+ foreach (var relativePath in filePaths)
+ {
+ var fullPath = Path.Combine(testDirectory, relativePath);
+ var fileDir = Path.GetDirectoryName(fullPath);
+
+ // Create the directory if it doesn't exist
+ if (!Directory.Exists(fileDir))
+ {
+ Directory.CreateDirectory(fileDir);
+ Console.WriteLine($"Created directory: {fileDir}");
+ }
+
+ var ext = Path.GetExtension(fullPath).ToLower();
+ if (ComicInfoExtensions.Contains(ext) && comicInfos != null && comicInfos.TryGetValue(Path.GetFileName(relativePath), out var info))
+ {
+ CreateMinimalCbz(fullPath, info);
+ }
+ else
+ {
+ // Create an empty file
+ await File.Create(fullPath).DisposeAsync();
+ Console.WriteLine($"Created empty file: {fullPath}");
+ }
+ }
+ }
+
+ private void CreateMinimalCbz(string filePath, ComicInfo? comicInfo = null)
+ {
+ using (var archive = ZipFile.Open(filePath, ZipArchiveMode.Create))
+ {
+ // Add the 1x1 image to the archive
+ archive.CreateEntryFromFile(_imagePath, "1x1.png");
+
+ if (comicInfo != null)
+ {
+ // Serialize ComicInfo object to XML
+ var comicInfoXml = SerializeComicInfoToXml(comicInfo);
+
+ // Create an entry for ComicInfo.xml in the archive
+ var entry = archive.CreateEntry("ComicInfo.xml");
+ using var entryStream = entry.Open();
+ using var writer = new StreamWriter(entryStream, Encoding.UTF8);
+
+ // Write the XML to the archive
+ writer.Write(comicInfoXml);
+ }
+
+ }
+ Console.WriteLine($"Created minimal CBZ archive: {filePath} with{(comicInfo != null ? "" : "out")} metadata.");
+ }
+
+
+ private static string SerializeComicInfoToXml(ComicInfo comicInfo)
+ {
+ var xmlSerializer = new XmlSerializer(typeof(ComicInfo));
+ using var stringWriter = new StringWriter();
+ using (var xmlWriter = XmlWriter.Create(stringWriter, new XmlWriterSettings { Indent = true, Encoding = new UTF8Encoding(false), OmitXmlDeclaration = false}))
+ {
+ xmlSerializer.Serialize(xmlWriter, comicInfo);
+ }
+
+ // For the love of god, I spent 2 hours trying to get utf-8 with no BOM
+ return stringWriter.ToString().Replace("""""",
+ @"");
+ }
+}
diff --git a/API.Tests/Helpers/SeriesHelperTests.cs b/API.Tests/Helpers/SeriesHelperTests.cs
index a5b5a063b..22b4a3cd1 100644
--- a/API.Tests/Helpers/SeriesHelperTests.cs
+++ b/API.Tests/Helpers/SeriesHelperTests.cs
@@ -1,6 +1,5 @@
using System.Collections.Generic;
using System.Linq;
-using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
diff --git a/API.Tests/Helpers/StringHelperTests.cs b/API.Tests/Helpers/StringHelperTests.cs
new file mode 100644
index 000000000..8f845c9b0
--- /dev/null
+++ b/API.Tests/Helpers/StringHelperTests.cs
@@ -0,0 +1,46 @@
+using API.Helpers;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+public class StringHelperTests
+{
+ [Theory]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?
",
+ "A Perfect Marriage Becomes a Perfect Affair!
Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?
"
+ )]
+ [InlineData(
+ "Blog | Twitter | Pixiv | Pawoo
",
+ "Blog | Twitter | Pixiv | Pawoo
"
+ )]
+ public void TestSquashBreaklines(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.SquashBreaklines(input));
+ }
+
+ [Theory]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
(Source: Anime News Network)
",
+ "A Perfect Marriage Becomes a Perfect Affair!
"
+ )]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
(Source: Anime News Network)",
+ "A Perfect Marriage Becomes a Perfect Affair!
"
+ )]
+ public void TestRemoveSourceInDescription(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
+ }
+
+
+ [Theory]
+ [InlineData(
+"""Pawoo
""",
+"""Pawoo"""
+ )]
+ public void TestCorrectUrls(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.CorrectUrls(input));
+ }
+}
diff --git a/API.Tests/Helpers/TagHelperTests.cs b/API.Tests/Helpers/TagHelperTests.cs
deleted file mode 100644
index 430a85d69..000000000
--- a/API.Tests/Helpers/TagHelperTests.cs
+++ /dev/null
@@ -1,126 +0,0 @@
-using System.Collections.Generic;
-using API.Data;
-using API.Entities;
-using API.Helpers;
-using API.Helpers.Builders;
-using Xunit;
-
-namespace API.Tests.Helpers;
-
-public class TagHelperTests
-{
- [Fact]
- public void UpdateTag_ShouldAddNewTag()
- {
- var allTags = new List
- {
- new TagBuilder("Action").Build(),
- new TagBuilder("action").Build(),
- new TagBuilder("Sci-fi").Build(),
- };
- var tagAdded = new List();
-
- TagHelper.UpdateTag(allTags, new[] {"Action", "Adventure"}, (tag, added) =>
- {
- if (added)
- {
- tagAdded.Add(tag);
- }
-
- });
-
- Assert.Single(tagAdded);
- Assert.Equal(4, allTags.Count);
- }
-
- [Fact]
- public void UpdateTag_ShouldNotAddDuplicateTag()
- {
- var allTags = new List
- {
- new TagBuilder("Action").Build(),
- new TagBuilder("action").Build(),
- new TagBuilder("Sci-fi").Build(),
-
- };
- var tagAdded = new List();
-
- TagHelper.UpdateTag(allTags, new[] {"Action", "Scifi"}, (tag, added) =>
- {
- if (added)
- {
- tagAdded.Add(tag);
- }
- TagHelper.AddTagIfNotExists(allTags, tag);
- });
-
- Assert.Equal(3, allTags.Count);
- Assert.Empty(tagAdded);
- }
-
- [Fact]
- public void AddTag_ShouldAddOnlyNonExistingTag()
- {
- var existingTags = new List
- {
- new TagBuilder("Action").Build(),
- new TagBuilder("action").Build(),
- new TagBuilder("Sci-fi").Build(),
- };
-
-
- TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Action").Build());
- Assert.Equal(3, existingTags.Count);
-
- TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("action").Build());
- Assert.Equal(3, existingTags.Count);
-
- TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Shonen").Build());
- Assert.Equal(4, existingTags.Count);
- }
-
- [Fact]
- public void KeepOnlySamePeopleBetweenLists()
- {
- var existingTags = new List
- {
- new TagBuilder("Action").Build(),
- new TagBuilder("Sci-fi").Build(),
- };
-
- var peopleFromChapters = new List
- {
- new TagBuilder("Action").Build(),
- };
-
- var tagRemoved = new List();
- TagHelper.KeepOnlySameTagBetweenLists(existingTags,
- peopleFromChapters, tag =>
- {
- tagRemoved.Add(tag);
- });
-
- Assert.Single(tagRemoved);
- }
-
- [Fact]
- public void RemoveEveryoneIfNothingInRemoveAllExcept()
- {
- var existingTags = new List
- {
- new TagBuilder("Action").Build(),
- new TagBuilder("Sci-fi").Build(),
- };
-
- var peopleFromChapters = new List();
-
- var tagRemoved = new List();
- TagHelper.KeepOnlySameTagBetweenLists(existingTags,
- peopleFromChapters, tag =>
- {
- tagRemoved.Add(tag);
- });
-
- Assert.Equal(2, tagRemoved.Count);
- }
-}
diff --git a/API.Tests/Parser/BookParserTests.cs b/API.Tests/Parser/BookParserTests.cs
deleted file mode 100644
index 52fd02ae8..000000000
--- a/API.Tests/Parser/BookParserTests.cs
+++ /dev/null
@@ -1,43 +0,0 @@
-using Xunit;
-
-namespace API.Tests.Parser;
-
-public class BookParserTests
-{
- [Theory]
- [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
- [InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
- [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
- public void ParseSeriesTest(string filename, string expected)
- {
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
- }
-
- [Theory]
- [InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
- [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
- public void ParseVolumeTest(string filename, string expected)
- {
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
- }
-
- // [Theory]
- // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")]
- // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")]
- // public void ReplaceFontSrcUrl(string input, string expected)
- // {
- // var apiBase = "TEST/";
- // var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
- // Assert.Equal(expected, actual);
- // }
- //
- // [Theory]
- // [InlineData("@import url('font.css');", "@import url('TEST/font.css');")]
- // public void ReplaceImportSrcUrl(string input, string expected)
- // {
- // var apiBase = "TEST/";
- // var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
- // Assert.Equal(expected, actual);
- // }
-
-}
diff --git a/API.Tests/Parsers/BasicParserTests.cs b/API.Tests/Parsers/BasicParserTests.cs
new file mode 100644
index 000000000..32673e0e6
--- /dev/null
+++ b/API.Tests/Parsers/BasicParserTests.cs
@@ -0,0 +1,249 @@
+using System.IO;
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class BasicParserTests : AbstractFsTest
+{
+ private readonly BasicParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private readonly string _rootDirectory;
+
+ public BasicParserTests()
+ {
+ var fileSystem = CreateFileSystem();
+ _rootDirectory = Path.Join(DataDirectory, "Books/");
+ fileSystem.AddDirectory(_rootDirectory);
+ fileSystem.AddFile($"{_rootDirectory}Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
+
+ fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1.cbz", new MockFileData(""));
+ fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1 Chapter 2.cbz", new MockFileData(""));
+ fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Chapter 3.cbz", new MockFileData(""));
+ fileSystem.AddFile("$\"{RootDirectory}Accel World/Accel World Gaiden SP01.cbz", new MockFileData(""));
+
+
+ fileSystem.AddFile($"{_rootDirectory}Accel World/cover.png", new MockFileData(""));
+
+ fileSystem.AddFile($"{_rootDirectory}Batman/Batman #1.cbz", new MockFileData(""));
+
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new BasicParser(ds, new ImageParser(ds));
+ }
+
+ #region Parse_Manga
+
+ ///
+ /// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Accel World/cover.png", $"{_rootDirectory}Accel World/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.Null(actual);
+ }
+
+ ///
+ /// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_OtherImage_ShouldReturnNull()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Accel World/page 01.png", $"{_rootDirectory}Accel World/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+ }
+
+ ///
+ /// Tests that when there is a volume and chapter in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_VolumeAndChapterInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz", $"{_rootDirectory}Mujaki no Rakuen/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Mujaki no Rakuen", actual.Series);
+ Assert.Equal("12", actual.Volumes);
+ Assert.Equal("76", actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is a volume in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_JustVolumeInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz",
+ $"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", actual.Series);
+ Assert.Equal("1", actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is a chapter only in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_JustChapterInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Beelzebub/Beelzebub_01_[Noodles].zip",
+ $"{_rootDirectory}Beelzebub/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Beelzebub", actual.Series);
+ Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is a SP Marker in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialMarkerInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr",
+ $"{_rootDirectory}Summer Time Rendering/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Summer Time Rendering", actual.Series);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+
+ ///
+ /// Tests that when the filename parses as a special, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Volume SP01.cbr",
+ $"{_rootDirectory}Summer Time Rendering/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Summer Time Rendering", actual.Series);
+ Assert.Equal("Volume", actual.Title);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when the filename parses as a special, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialInFilename2()
+ {
+ var actual = _parser.Parse("M:/Kimi wa Midara na Boku no Joou/Specials/[Renzokusei] Special 1 SP02.zip",
+ "M:/Kimi wa Midara na Boku no Joou/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Kimi wa Midara na Boku no Joou", actual.Series);
+ Assert.Equal("[Renzokusei] Special 1", actual.Title);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when the filename parses as a special, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialInFilename_StrangeNaming()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}My Dress-Up Darling/SP01 1. Special Name.cbz",
+ _rootDirectory,
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("My Dress-Up Darling", actual.Series);
+ Assert.Equal("1. Special Name", actual.Title);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is an edition in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_EditionInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz",
+ $"{_rootDirectory}Air Gear/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Air Gear", actual.Series);
+ Assert.Equal("1", actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ Assert.Equal("Omnibus", actual.Edition);
+ }
+
+ #endregion
+
+ #region Parse_Books
+ ///
+ /// Tests that when there is a volume in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaBooks_JustVolumeInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Epubs/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub",
+ $"{_rootDirectory}Epubs/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Harrison, Kim - The Good, The Bad, and the Undead - Hollows", actual.Series);
+ Assert.Equal("2.5", actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ }
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.ComicVine));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.png", LibraryType.Manga));
+ Assert.True(_parser.IsApplicable("something.png", LibraryType.Comic));
+ Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
+ Assert.True(_parser.IsApplicable("something.epub", LibraryType.LightNovel));
+ }
+
+
+ #endregion
+}
diff --git a/API.Tests/Parsers/BookParserTests.cs b/API.Tests/Parsers/BookParserTests.cs
new file mode 100644
index 000000000..90147ac6b
--- /dev/null
+++ b/API.Tests/Parsers/BookParserTests.cs
@@ -0,0 +1,73 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class BookParserTests
+{
+ private readonly BookParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Books/";
+
+ public BookParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Books/");
+ fileSystem.AddFile("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
+ fileSystem.AddFile("C:/Books/Adam Freeman - Pro ASP.NET Core 6.epub", new MockFileData(""));
+ fileSystem.AddFile("C:/Books/My Fav Book SP01.epub", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new BookParser(ds, Substitute.For(), new BasicParser(ds, new ImageParser(ds)));
+ }
+
+ #region Parse
+
+ // TODO: I'm not sure how to actually test this as it relies on an epub parser to actually do anything
+
+ ///
+ /// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
+ ///
+ // [Fact]
+ // public void Parse_SeriesWithDirectoryName()
+ // {
+ // var actual = _parser.Parse("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", "C:/Books/Birds of Prey/",
+ // RootDirectory, LibraryType.Book, new ComicInfo()
+ // {
+ // Series = "Harry Potter",
+ // Volume = "1"
+ // });
+ //
+ // Assert.NotNull(actual);
+ // Assert.Equal("Harry Potter", actual.Series);
+ // Assert.Equal("1", actual.Volumes);
+ // }
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Book));
+
+ }
+
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.epub", LibraryType.Image));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parsers/ComicVineParserTests.cs b/API.Tests/Parsers/ComicVineParserTests.cs
new file mode 100644
index 000000000..f01e98afd
--- /dev/null
+++ b/API.Tests/Parsers/ComicVineParserTests.cs
@@ -0,0 +1,115 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Data.Metadata;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class ComicVineParserTests
+{
+ private readonly ComicVineParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Comics/";
+
+ public ComicVineParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Comics/");
+ fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
+ fileSystem.AddFile("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new ComicVineParser(ds);
+ }
+
+ #region Parse
+
+ ///
+ /// Tests that when Series and Volume are filled out, Kavita uses that for the Series Name
+ ///
+ [Fact]
+ public void Parse_SeriesWithComicInfo()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
+ RootDirectory, LibraryType.ComicVine, new ComicInfo()
+ {
+ Series = "Birds of Prey",
+ Volume = "2002"
+ });
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey (2002)", actual.Series);
+ Assert.Equal("2002", actual.Volumes);
+ }
+
+ ///
+ /// Tests that no ComicInfo, take the Directory Name if it matches "Series (2002)" or "Series (2)"
+ ///
+ [Fact]
+ public void Parse_SeriesWithDirectoryNameAsSeriesYear()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
+ RootDirectory, LibraryType.ComicVine, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey (2002)", actual.Series);
+ Assert.Equal("2002", actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ }
+
+ ///
+ /// Tests that no ComicInfo, take a directory name up to root if it matches "Series (2002)" or "Series (2)"
+ ///
+ [Fact]
+ public void Parse_SeriesWithADirectoryNameAsSeriesYear()
+ {
+ var actual = _parser.Parse("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", "C:/Comics/DC Comics/",
+ RootDirectory, LibraryType.ComicVine, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey (1999)", actual.Series);
+ Assert.Equal("1999", actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ }
+
+ ///
+ /// Tests that no ComicInfo and nothing matches Series (Volume), then just take the directory name as the Series
+ ///
+ [Fact]
+ public void Parse_FallbackToDirectoryNameOnly()
+ {
+ var actual = _parser.Parse("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", "C:/Comics/DC Comics/",
+ RootDirectory, LibraryType.ComicVine, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Blood Syndicate", actual.Series);
+ Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ }
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on ComicVine type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("", LibraryType.Comic));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on ComicVine type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("", LibraryType.ComicVine));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parser/DefaultParserTests.cs b/API.Tests/Parsers/DefaultParserTests.cs
similarity index 73%
rename from API.Tests/Parser/DefaultParserTests.cs
rename to API.Tests/Parsers/DefaultParserTests.cs
index 14e75f353..733b55d62 100644
--- a/API.Tests/Parser/DefaultParserTests.cs
+++ b/API.Tests/Parsers/DefaultParserTests.cs
@@ -1,7 +1,5 @@
-using System;
-using System.Collections.Generic;
+using System.Collections.Generic;
using System.IO.Abstractions.TestingHelpers;
-using System.Linq;
using API.Entities.Enums;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
@@ -10,7 +8,7 @@ using NSubstitute;
using Xunit;
using Xunit.Abstractions;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsers;
public class DefaultParserTests
{
@@ -21,10 +19,12 @@ public class DefaultParserTests
{
_testOutputHelper = testOutputHelper;
var directoryService = new DirectoryService(Substitute.For>(), new MockFileSystem());
- _defaultParser = new DefaultParser(directoryService);
+ _defaultParser = new BasicParser(directoryService, new ImageParser(directoryService));
}
+
+
#region ParseFromFallbackFolders
[Theory]
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
@@ -33,7 +33,7 @@ public class DefaultParserTests
[InlineData("C:/", "C:/Something Random/Mujaki no Rakuen SP01.cbz", "Something Random")]
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
{
- var actual = _defaultParser.Parse(inputPath, rootDir);
+ var actual = _defaultParser.Parse(inputPath, rootDir, rootDir, LibraryType.Manga, null);
if (actual == null)
{
Assert.NotNull(actual);
@@ -44,19 +44,18 @@ public class DefaultParserTests
}
[Theory]
- [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
- [InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
- [InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster~0~1")]
- [InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", "Hajime no Ippo~0~0")]
- public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string expectedParseInfo)
+ [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", new [] {"Btooom!", "1", "1"})]
+ [InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", new [] {"Btooom!", "1", "2"})]
+ [InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", new [] {"Monster", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, "1"})]
+ [InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", new [] {"Hajime no Ippo", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter})]
+ public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string[] expectedParseInfo)
{
const string rootDirectory = "/manga/";
- var tokens = expectedParseInfo.Split("~");
- var actual = new ParserInfo {Series = "", Chapters = "0", Volumes = "0"};
+ var actual = new ParserInfo {Series = "", Chapters = Parser.DefaultChapter, Volumes = Parser.LooseLeafVolume};
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
- Assert.Equal(tokens[0], actual.Series);
- Assert.Equal(tokens[1], actual.Volumes);
- Assert.Equal(tokens[2], actual.Chapters);
+ Assert.Equal(expectedParseInfo[0], actual.Series);
+ Assert.Equal(expectedParseInfo[1], actual.Volumes);
+ Assert.Equal(expectedParseInfo[2], actual.Chapters);
}
[Theory]
@@ -74,8 +73,8 @@ public class DefaultParserTests
fs.AddDirectory(rootDirectory);
fs.AddFile(inputFile, new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), fs);
- var parser = new DefaultParser(ds);
- var actual = parser.Parse(inputFile, rootDirectory);
+ var parser = new BasicParser(ds, new ImageParser(ds));
+ var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, null);
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(expectedParseInfo, actual.Series);
}
@@ -90,8 +89,8 @@ public class DefaultParserTests
fs.AddDirectory(rootDirectory);
fs.AddFile(inputFile, new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), fs);
- var parser = new DefaultParser(ds);
- var actual = parser.Parse(inputFile, rootDirectory);
+ var parser = new BasicParser(ds, new ImageParser(ds));
+ var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, null);
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(expectedParseInfo, actual.Series);
}
@@ -101,13 +100,6 @@ public class DefaultParserTests
#region Parse
- [Fact]
- public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
- {
- const string rootPath = @"E:/Manga/";
- var actual = _defaultParser.Parse(@"E:/Manga/Accel World/cover.png", rootPath);
- Assert.Null(actual);
- }
[Fact]
public void Parse_ParseInfo_Manga()
@@ -127,19 +119,20 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
- Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
+ filepath = @"E:/Manga/Beelzebub/Beelzebub_01_[Noodles].zip";
expected.Add(filepath, new ParserInfo
{
- Series = "Beelzebub", Volumes = "0",
+ Series = "Beelzebub", Volumes = Parser.LooseLeafVolume,
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
+ // Note: Lots of duplicates here. I think I can move them to the ParserTests itself
+ filepath = @"E:/Manga/Ichinensei ni Nacchattara/Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
expected.Add(filepath, new ParserInfo
{
Series = "Ichinensei ni Nacchattara", Volumes = "1",
@@ -147,71 +140,71 @@ public class DefaultParserTests
FullFilePath = filepath
});
- filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
+ filepath = @"E:/Manga/Tenjo Tenge (Color)/Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
- Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
+ filepath = @"E:/Manga/Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)/Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
- Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
+ filepath = @"E:/Manga/Dorohedoro/Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Dorohedoro", Volumes = "1", Edition = "",
- Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
+ filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "APOSIMZ", Volumes = "0", Edition = "",
+ Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
+ filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
+ Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
+ filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
+ Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
+ filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Summer Time Rendering", Volumes = "0", Edition = "",
- Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
+ Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = true
});
- filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
+ filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
+ Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
+ filepath = @"E:/Manga/Kono Subarashii Sekai ni Bakuen wo!/Vol. 00 Ch. 000.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
@@ -219,7 +212,7 @@ public class DefaultParserTests
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
+ filepath = @"E:/Manga/Toukyou Akazukin/Vol. 01 Ch. 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
@@ -228,37 +221,37 @@ public class DefaultParserTests
});
// If an image is cover exclusively, ignore it
- filepath = @"E:\Manga\Seraph of the End\cover.png";
+ filepath = @"E:/Manga/Seraph of the End/cover.png";
expected.Add(filepath, null);
- filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
+ filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "The Beginning After the End", Volumes = "0", Edition = "",
+ Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Air Gear\Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
+ filepath = @"E:/Manga/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
- Chapters = "0", Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
+ filepath = @"E:/Manga/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
expected.Add(filepath, new ParserInfo
{
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
- Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
- var actual = _defaultParser.Parse(file, rootPath);
+ var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Manga, null);
if (expectedInfo == null)
{
Assert.Null(actual);
@@ -283,20 +276,20 @@ public class DefaultParserTests
}
}
- [Fact]
+ //[Fact]
public void Parse_ParseInfo_Manga_ImageOnly()
{
- // Images don't have root path as E:\Manga, but rather as the path of the folder
+ // Images don't have root path as E:/Manga, but rather as the path of the folder
// Note: Fallback to folder will parse Monster #8 and get Monster
- var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
+ var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg";
var expectedInfo2 = new ParserInfo
{
- Series = "Monster #8", Volumes = "0", Edition = "",
+ Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
FullFilePath = filepath, IsSpecial = false
};
- var actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Monster #8");
+ var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, null);
Assert.NotNull(actual2);
_testOutputHelper.WriteLine($"Validating {filepath}");
Assert.Equal(expectedInfo2.Format, actual2.Format);
@@ -314,7 +307,7 @@ public class DefaultParserTests
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
- filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
+ filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Vol19/ch. 186/Vol. 19 p106.gif";
expectedInfo2 = new ParserInfo
{
Series = "Just Images the second", Volumes = "19", Edition = "",
@@ -322,7 +315,7 @@ public class DefaultParserTests
FullFilePath = filepath, IsSpecial = false
};
- actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\");
+ actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, null);
Assert.NotNull(actual2);
_testOutputHelper.WriteLine($"Validating {filepath}");
Assert.Equal(expectedInfo2.Format, actual2.Format);
@@ -340,7 +333,7 @@ public class DefaultParserTests
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
- filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
+ filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Blank Folder/Vol19/ch. 186/Vol. 19 p106.gif";
expectedInfo2 = new ParserInfo
{
Series = "Just Images the second", Volumes = "19", Edition = "",
@@ -348,7 +341,7 @@ public class DefaultParserTests
FullFilePath = filepath, IsSpecial = false
};
- actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\");
+ actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, null);
Assert.NotNull(actual2);
_testOutputHelper.WriteLine($"Validating {filepath}");
Assert.Equal(expectedInfo2.Format, actual2.Format);
@@ -379,7 +372,7 @@ public class DefaultParserTests
filesystem.AddFile(@"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz", new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var parser = new DefaultParser(ds);
+ var parser = new BasicParser(ds, new ImageParser(ds));
var filepath = @"E:/Manga/Foo 50/Foo 50 v1.cbz";
// There is a bad parse for series like "Foo 50", so we have parsed chapter as 50
@@ -390,7 +383,7 @@ public class DefaultParserTests
FullFilePath = filepath
};
- var actual = parser.Parse(filepath, rootPath);
+ var actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, null);
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {filepath}");
@@ -414,12 +407,12 @@ public class DefaultParserTests
filepath = @"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz";
expected = new ParserInfo
{
- Series = "Foo 50", Volumes = "0", IsSpecial = true,
- Chapters = "50", Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
+ Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
+ Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
};
- actual = parser.Parse(filepath, rootPath);
+ actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, null);
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {filepath}");
Assert.Equal(expected.Format, actual.Format);
@@ -444,26 +437,26 @@ public class DefaultParserTests
[Fact]
public void Parse_ParseInfo_Comic()
{
- const string rootPath = @"E:/Comics/";
+ const string rootPath = "E:/Comics/";
var expected = new Dictionary();
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Teen Titans", Volumes = "0",
- Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
+ Series = "Teen Titans", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath
});
// Fallback test with bad naming
- filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
+ filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Babe", Volumes = "0", Edition = "",
+ Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
+ filepath = @"E:/Comics/Comics/Publisher/Batman the Detective (2021)/Batman the Detective - v6 - 11 - (2021).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman the Detective", Volumes = "6", Edition = "",
@@ -471,10 +464,10 @@ public class DefaultParserTests
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
+ filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "",
+ Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
@@ -482,7 +475,7 @@ public class DefaultParserTests
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
- var actual = _defaultParser.Parse(file, rootPath, LibraryType.Comic);
+ var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Comic, null);
if (expectedInfo == null)
{
Assert.Null(actual);
diff --git a/API.Tests/Parsers/ImageParserTests.cs b/API.Tests/Parsers/ImageParserTests.cs
new file mode 100644
index 000000000..f95c98ddf
--- /dev/null
+++ b/API.Tests/Parsers/ImageParserTests.cs
@@ -0,0 +1,97 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class ImageParserTests
+{
+ private readonly ImageParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Comics/";
+
+ public ImageParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Comics/");
+ fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
+ fileSystem.AddFile("C:/Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new ImageParser(ds);
+ }
+
+ #region Parse
+
+ ///
+ /// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
+ ///
+ [Fact]
+ public void Parse_SeriesWithDirectoryName()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01/01.jpg", "C:/Comics/Birds of Prey/",
+ RootDirectory, LibraryType.Image, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey", actual.Series);
+ Assert.Equal("1", actual.Chapters);
+ }
+
+ ///
+ /// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder
+ ///
+ [Fact]
+ public void Parse_SeriesWithNoNestedChapter()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01 page 01.jpg", "C:/Comics/",
+ RootDirectory, LibraryType.Image, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey", actual.Series);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ }
+
+ ///
+ /// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder and everything else as a
+ ///
+ [Fact]
+ public void Parse_SeriesWithLooseImages()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey/page 01.jpg", "C:/Comics/",
+ RootDirectory, LibraryType.Image, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey", actual.Series);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.png", LibraryType.Image));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parsers/PdfParserTests.cs b/API.Tests/Parsers/PdfParserTests.cs
new file mode 100644
index 000000000..72088526d
--- /dev/null
+++ b/API.Tests/Parsers/PdfParserTests.cs
@@ -0,0 +1,71 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class PdfParserTests
+{
+ private readonly PdfParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Books/";
+
+ public PdfParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Books/");
+ fileSystem.AddDirectory("C:/Books/Birds of Prey (2002)");
+ fileSystem.AddFile("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new PdfParser(ds);
+ }
+
+ #region Parse
+
+ ///
+ /// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
+ ///
+ [Fact]
+ public void Parse_Book_SeriesWithDirectoryName()
+ {
+ var actual = _parser.Parse("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf",
+ "C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/",
+ RootDirectory, LibraryType.Book, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("A Dictionary of Japanese Food - Ingredients and Culture", actual.Series);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on pdfs
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.png", LibraryType.Book));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on pdfs
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
+ Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Manga));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parsing/BookParsingTests.cs b/API.Tests/Parsing/BookParsingTests.cs
new file mode 100644
index 000000000..9b02eff63
--- /dev/null
+++ b/API.Tests/Parsing/BookParsingTests.cs
@@ -0,0 +1,24 @@
+using API.Entities.Enums;
+using Xunit;
+
+namespace API.Tests.Parsing;
+
+public class BookParsingTests
+{
+ [Theory]
+ [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
+ [InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
+ [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
+ public void ParseSeriesTest(string filename, string expected)
+ {
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Book));
+ }
+
+ [Theory]
+ [InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
+ [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
+ public void ParseVolumeTest(string filename, string expected)
+ {
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Book));
+ }
+}
diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parsing/ComicParsingTests.cs
similarity index 67%
rename from API.Tests/Parser/ComicParserTests.cs
rename to API.Tests/Parsing/ComicParsingTests.cs
index 4740c4f54..a0375a566 100644
--- a/API.Tests/Parser/ComicParserTests.cs
+++ b/API.Tests/Parsing/ComicParsingTests.cs
@@ -1,26 +1,11 @@
-using System.IO.Abstractions.TestingHelpers;
-using API.Services;
+using API.Entities.Enums;
using API.Services.Tasks.Scanner.Parser;
-using Microsoft.Extensions.Logging;
-using NSubstitute;
using Xunit;
-using Xunit.Abstractions;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
-public class ComicParserTests
+public class ComicParsingTests
{
- private readonly ITestOutputHelper _testOutputHelper;
- private readonly DefaultParser _defaultParser;
-
- public ComicParserTests(ITestOutputHelper testOutputHelper)
- {
- _testOutputHelper = testOutputHelper;
- _defaultParser =
- new DefaultParser(new DirectoryService(Substitute.For>(),
- new MockFileSystem()));
- }
-
[Theory]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")]
@@ -66,56 +51,58 @@ public class ComicParserTests
[InlineData("Demon 012 (Sep 1973) c2c", "Demon")]
[InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")]
[InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")]
- [InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")]
- [InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")]
+ [InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire Special - Adam Strange")]
+ [InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis Extra - Rags Morales Sketches")]
[InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")]
[InlineData("Batgirl T2000 #57", "Batgirl")]
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
[InlineData("Conquistador_-Tome_2", "Conquistador")]
[InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
- [InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")]
+ [InlineData("Bd Fr-Aldebaran-Antares-t6", "Bd Fr-Aldebaran-Antares")]
[InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")]
[InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")]
[InlineData("Kebab Том 1 Глава 1", "Kebab")]
[InlineData("Манга Глава 1", "Манга")]
+ [InlineData("ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก เล่ม 1", "ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก")]
+ [InlineData("SKY WORLD สกายเวิลด์ เล่มที่ 1", "SKY WORLD สกายเวิลด์")]
public void ParseComicSeriesTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename));
+ Assert.Equal(expected, Parser.ParseComicSeries(filename));
}
[Theory]
- [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")]
- [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
- [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
- [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")]
- [InlineData("Batman & Daredevil - King of New York", "0")]
- [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")]
- [InlineData("Batman & Robin the Teen Wonder #0", "0")]
- [InlineData("Batman & Wildcat (1 of 3)", "0")]
- [InlineData("Batman And Superman World's Finest #01", "0")]
- [InlineData("Babe 01", "0")]
- [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")]
+ [InlineData("01 Spider-Man & Wolverine 01.cbr", Parser.LooseLeafVolume)]
+ [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
+ [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Catwoman - Trail of the Gun 01", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Daredevil - King of New York", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Robin the Teen Wonder #0", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Wildcat (1 of 3)", Parser.LooseLeafVolume)]
+ [InlineData("Batman And Superman World's Finest #01", Parser.LooseLeafVolume)]
+ [InlineData("Babe 01", Parser.LooseLeafVolume)]
+ [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", Parser.LooseLeafVolume)]
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
- [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")]
+ [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", Parser.LooseLeafVolume)]
[InlineData("Superman v1 024 (09-10 1943)", "1")]
[InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
- [InlineData("Amazing Man Comics chapter 25", "0")]
- [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")]
- [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
- [InlineData("spawn-123", "0")]
- [InlineData("spawn-chapter-123", "0")]
- [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")]
- [InlineData("Batman Beyond 04 (of 6) (1999)", "0")]
- [InlineData("Batman Beyond 001 (2012)", "0")]
- [InlineData("Batman Beyond 2.0 001 (2013)", "0")]
- [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")]
+ [InlineData("Amazing Man Comics chapter 25", Parser.LooseLeafVolume)]
+ [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", Parser.LooseLeafVolume)]
+ [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", Parser.LooseLeafVolume)]
+ [InlineData("spawn-123", Parser.LooseLeafVolume)]
+ [InlineData("spawn-chapter-123", Parser.LooseLeafVolume)]
+ [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", Parser.LooseLeafVolume)]
+ [InlineData("Batman Beyond 04 (of 6) (1999)", Parser.LooseLeafVolume)]
+ [InlineData("Batman Beyond 001 (2012)", Parser.LooseLeafVolume)]
+ [InlineData("Batman Beyond 2.0 001 (2013)", Parser.LooseLeafVolume)]
+ [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", Parser.LooseLeafVolume)]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
- [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
+ [InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.LooseLeafVolume)]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
[InlineData("Batgirl V2000 #57", "2000")]
- [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")]
- [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")]
+ [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", Parser.LooseLeafVolume)]
+ [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", Parser.LooseLeafVolume)]
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
[InlineData("Daredevil - v6.5", "6.5")]
// Tome Tests
@@ -125,22 +112,25 @@ public class ComicParserTests
[InlineData("Conquistador_Tome_2", "2")]
[InlineData("Max_l_explorateur-_Tome_0", "0")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
- [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "0")]
+ [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", Parser.LooseLeafVolume)]
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
// Russian Tests
[InlineData("Kebab Том 1 Глава 3", "1")]
- [InlineData("Манга Глава 2", "0")]
+ [InlineData("Манга Глава 2", Parser.LooseLeafVolume)]
+ [InlineData("ย้อนเวลากลับมาร้าย เล่ม 1", "1")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "1")]
+ [InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", Parser.LooseLeafVolume)]
public void ParseComicVolumeTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename));
+ Assert.Equal(expected, Parser.ParseComicVolume(filename));
}
[Theory]
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
- [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
- [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
+ [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.DefaultChapter)]
+ [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.DefaultChapter)]
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
- [InlineData("Batman & Daredevil - King of New York", "0")]
+ [InlineData("Batman & Daredevil - King of New York", Parser.DefaultChapter)]
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
[InlineData("Batman & Wildcat (1 of 3)", "1")]
@@ -164,8 +154,8 @@ public class ComicParserTests
[InlineData("Batman Beyond 001 (2012)", "1")]
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
- [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")]
- [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
+ [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", Parser.DefaultChapter)]
+ [InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.DefaultChapter)]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
[InlineData("Batgirl V2000 #57", "57")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
@@ -174,43 +164,47 @@ public class ComicParserTests
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
- [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "0")]
+ [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", Parser.DefaultChapter)]
[InlineData("Kebab Том 1 Глава 3", "3")]
[InlineData("Манга Глава 2", "2")]
[InlineData("Манга 2 Глава", "2")]
[InlineData("Манга Том 1 2 Глава", "2")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
+ [InlineData("Max Level Returner ตอนที่ 5", "5")]
+ [InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
public void ParseComicChapterTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(filename));
+ Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Comic));
}
[Theory]
- [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
- [InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
+ [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", false)]
+ [InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", false)]
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
- [InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
- [InlineData("Boule et Bill - THS -Bill à disparu", true)]
- [InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)]
- [InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)]
+ [InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", false)]
+ [InlineData("Boule et Bill - THS -Bill à disparu", false)]
+ [InlineData("Asterix - HS - Les 12 travaux d'Astérix", false)]
+ [InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", false)]
[InlineData("laughs", false)]
- [InlineData("Annual Days of Summer", true)]
- [InlineData("Adventure Time 2013 Annual #001 (2013)", true)]
- [InlineData("Adventure Time 2013_Annual_#001 (2013)", true)]
- [InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)]
+ [InlineData("Annual Days of Summer", false)]
+ [InlineData("Adventure Time 2013 Annual #001 (2013)", false)]
+ [InlineData("Adventure Time 2013_Annual_#001 (2013)", false)]
+ [InlineData("Adventure Time 2013_-_Annual #001 (2013)", false)]
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
[InlineData("Mazebook 001", false)]
- [InlineData("X-23 One Shot (2010)", true)]
- [InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)]
- [InlineData("Batman Beyond Annual", true)]
- [InlineData("Batman Beyond Bonus", true)]
- [InlineData("Batman Beyond OneShot", true)]
- [InlineData("Batman Beyond Specials", true)]
- [InlineData("Batman Beyond Omnibus (1999)", true)]
- [InlineData("Batman Beyond Omnibus", true)]
- [InlineData("01 Annual Batman Beyond", true)]
+ [InlineData("X-23 One Shot (2010)", false)]
+ [InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", false)]
+ [InlineData("Batman Beyond Annual", false)]
+ [InlineData("Batman Beyond Bonus", false)]
+ [InlineData("Batman Beyond OneShot", false)]
+ [InlineData("Batman Beyond Specials", false)]
+ [InlineData("Batman Beyond Omnibus (1999)", false)]
+ [InlineData("Batman Beyond Omnibus", false)]
+ [InlineData("01 Annual Batman Beyond", false)]
+ [InlineData("Blood Syndicate Annual #001", false)]
public void IsComicSpecialTest(string input, bool expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsComicSpecial(input));
+ Assert.Equal(expected, Parser.IsSpecial(input, LibraryType.Comic));
}
}
diff --git a/API.Tests/Parsing/ImageParsingTests.cs b/API.Tests/Parsing/ImageParsingTests.cs
new file mode 100644
index 000000000..3d78d9372
--- /dev/null
+++ b/API.Tests/Parsing/ImageParsingTests.cs
@@ -0,0 +1,107 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace API.Tests.Parsing;
+
+public class ImageParsingTests
+{
+ private readonly ITestOutputHelper _testOutputHelper;
+ private readonly ImageParser _parser;
+
+ public ImageParsingTests(ITestOutputHelper testOutputHelper)
+ {
+ _testOutputHelper = testOutputHelper;
+ var directoryService = new DirectoryService(Substitute.For>(), new MockFileSystem());
+ _parser = new ImageParser(directoryService);
+ }
+
+ //[Fact]
+ public void Parse_ParseInfo_Manga_ImageOnly()
+ {
+ // Images don't have root path as E:\Manga, but rather as the path of the folder
+
+ // Note: Fallback to folder will parse Monster #8 and get Monster
+ var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
+ var expectedInfo2 = new ParserInfo
+ {
+ Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
+ Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+ var actual2 = _parser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Image, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+
+ filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
+ expectedInfo2 = new ParserInfo
+ {
+ Series = "Just Images the second", Volumes = "19", Edition = "",
+ Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+
+ actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+
+ filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
+ expectedInfo2 = new ParserInfo
+ {
+ Series = "Just Images the second", Volumes = "19", Edition = "",
+ Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+
+ actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+ }
+}
diff --git a/API.Tests/Parser/MagazineParserTests.cs b/API.Tests/Parsing/MagazineParserTests.cs
similarity index 100%
rename from API.Tests/Parser/MagazineParserTests.cs
rename to API.Tests/Parsing/MagazineParserTests.cs
diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parsing/MangaParsingTests.cs
similarity index 85%
rename from API.Tests/Parser/MangaParserTests.cs
rename to API.Tests/Parsing/MangaParsingTests.cs
index 126e781d6..8b93c5f90 100644
--- a/API.Tests/Parser/MangaParserTests.cs
+++ b/API.Tests/Parsing/MangaParsingTests.cs
@@ -1,18 +1,10 @@
using API.Entities.Enums;
using Xunit;
-using Xunit.Abstractions;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
-public class MangaParserTests
+public class MangaParsingTests
{
- private readonly ITestOutputHelper _testOutputHelper;
-
- public MangaParserTests(ITestOutputHelper testOutputHelper)
- {
- _testOutputHelper = testOutputHelper;
- }
-
[Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
@@ -25,7 +17,7 @@ public class MangaParserTests
[InlineData("v001", "1")]
[InlineData("Vol 1", "1")]
[InlineData("vol_356-1", "356")] // Mangapy syntax
- [InlineData("No Volume", "0")]
+ [InlineData("No Volume", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1.1")]
[InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")]
@@ -40,18 +32,18 @@ public class MangaParserTests
[InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")]
[InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")]
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")]
- [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")]
+ [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")]
- [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")]
+ [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
- [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")]
+ [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Vagabond_v03", "3")]
[InlineData("Mujaki No Rakune Volume 10.cbz", "10")]
- [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "0")]
+ [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
[InlineData("Gantz.V26.cbz", "26")]
@@ -60,7 +52,7 @@ public class MangaParserTests
[InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")]
[InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")]
[InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")]
- [InlineData("Noblesse - Episode 406 (52 Pages).7z", "0")]
+ [InlineData("Noblesse - Episode 406 (52 Pages).7z", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("X-Men v1 #201 (September 2007).cbz", "1")]
[InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")]
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")]
@@ -72,21 +64,23 @@ public class MangaParserTests
[InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")]
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "3.5")]
[InlineData("Kebab Том 1 Глава 3", "1")]
- [InlineData("Манга Глава 2", "0")]
+ [InlineData("Манга Глава 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Манга Тома 1-4", "1-4")]
[InlineData("Манга Том 1-4", "1-4")]
[InlineData("조선왕조실톡 106화", "106")]
[InlineData("죽음 13회", "13")]
[InlineData("동의보감 13장", "13")]
[InlineData("몰?루 아카이브 7.5권", "7.5")]
+ [InlineData("주술회전 1.5권", "1.5")]
[InlineData("63권#200", "63")]
[InlineData("시즌34삽화2", "34")]
[InlineData("Accel World Chapter 001 Volume 002", "2")]
[InlineData("Accel World Volume 2", "2")]
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.31 Omake", "30")]
+ [InlineData("Zom 100 - Bucket List of the Dead v01", "1")]
public void ParseVolumeTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Manga));
}
[Theory]
@@ -139,7 +133,6 @@ public class MangaParserTests
[InlineData("Vagabond_v03", "Vagabond")]
[InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")]
[InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")]
- [InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")]
[InlineData("Baketeriya ch01-05.zip", "Baketeriya")]
[InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")]
[InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")]
@@ -206,21 +199,30 @@ public class MangaParserTests
[InlineData("test 2 years 1권", "test 2 years")]
[InlineData("test 2 years 1화", "test 2 years")]
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.30 Omake", "Nagasarete Airantou")]
+ [InlineData("Cynthia The Mission - c000 - c006 (v06)", "Cynthia The Mission")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1", "เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท")]
+ [InlineData("Max Level Returner เล่มที่ 5", "Max Level Returner")]
+ [InlineData("หนึ่งความคิด นิจนิรันดร์ เล่ม 2", "หนึ่งความคิด นิจนิรันดร์")]
+ [InlineData("不安の種\uff0b - 01", "不安の種\uff0b")]
+ [InlineData("Giant Ojou-sama - Ch. 33.5 - Volume 04 Bonus Chapter", "Giant Ojou-sama")]
+ [InlineData("[218565]-(C92) [BRIO (Puyocha)] Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", "")]
+ [InlineData("Monster #8 Ch. 001", "Monster #8")]
+ [InlineData("Zom 100 - Bucket List of the Dead v01", "Zom 100 - Bucket List of the Dead")]
public void ParseSeriesTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Manga));
}
[Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")]
- [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")]
- [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")]
+ [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
+ [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")]
- [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")]
+ [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("c001", "1")]
- [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "0")]
+ [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
[InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")]
[InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")]
@@ -243,7 +245,7 @@ public class MangaParserTests
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")]
[InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")]
- [InlineData("Vol 1", "0")]
+ [InlineData("Vol 1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")]
@@ -255,10 +257,10 @@ public class MangaParserTests
[InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")]
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
- [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
+ [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Beelzebub_153b_RHS.zip", "153.5")]
[InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
- [InlineData("Transferred to another world magical swordsman v1.1", "0")]
+ [InlineData("Transferred to another world magical swordsman v1.1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")]
[InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")]
[InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")]
@@ -277,26 +279,31 @@ public class MangaParserTests
[InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")]
[InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")]
[InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")]
- [InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")]
+ [InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")]
- [InlineData("Samurai Jack Vol. 01 - The threads of Time", "0")]
+ [InlineData("Samurai Jack Vol. 01 - The threads of Time", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")]
[InlineData("자유록 13회#2", "13")]
[InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")]
[InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")]
- [InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "0")]
+ [InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Kebab Том 1 Глава 3", "3")]
[InlineData("Манга Глава 2", "2")]
[InlineData("Манга 2 Глава", "2")]
[InlineData("Манга Том 1 2 Глава", "2")]
[InlineData("Accel World Chapter 001 Volume 002", "1")]
[InlineData("Bleach 001-003", "1-3")]
- [InlineData("Accel World Volume 2", "0")]
+ [InlineData("Accel World Volume 2", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98", "90-98")]
[InlineData("Historys Strongest Disciple Kenichi c01-c04", "1-4")]
+ [InlineData("Adabana c00-02", "0-2")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
+ [InlineData("Max Level Returner ตอนที่ 5", "5")]
+ [InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
+ [InlineData("Monster #8 Ch. 001", "1")]
public void ParseChaptersTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Manga));
}
@@ -316,25 +323,25 @@ public class MangaParserTests
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
}
[Theory]
- [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
- [InlineData("Beelzebub_Omake_June_2012_RHS", true)]
+ [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
+ [InlineData("Beelzebub_Omake_June_2012_RHS", false)]
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
- [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
- [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
- [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
- [InlineData("Ani-Hina Art Collection.cbz", true)]
- [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
- [InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
+ [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", false)]
+ [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", false)]
+ [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", false)]
+ [InlineData("Ani-Hina Art Collection.cbz", false)]
+ [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", false)]
+ [InlineData("A Town Where You Live - Bonus Chapter.zip", false)]
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
- [InlineData("Beastars - SP01", false)]
- [InlineData("Beastars SP01", false)]
+ [InlineData("Beastars - SP01", true)]
+ [InlineData("Beastars SP01", true)]
[InlineData("The League of Extraordinary Gentlemen", false)]
[InlineData("The League of Extra-ordinary Gentlemen", false)]
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]
[InlineData("Hajime no Ippo - Artbook", false)]
public void IsMangaSpecialTest(string input, bool expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsMangaSpecial(input));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsSpecial(input, LibraryType.Manga));
}
[Theory]
diff --git a/API.Tests/Parser/ParserInfoTests.cs b/API.Tests/Parsing/ParserInfoTests.cs
similarity index 90%
rename from API.Tests/Parser/ParserInfoTests.cs
rename to API.Tests/Parsing/ParserInfoTests.cs
index e7c48317b..cbb8ae99a 100644
--- a/API.Tests/Parser/ParserInfoTests.cs
+++ b/API.Tests/Parsing/ParserInfoTests.cs
@@ -2,7 +2,7 @@
using API.Services.Tasks.Scanner.Parser;
using Xunit;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
public class ParserInfoTests
{
@@ -11,14 +11,14 @@ public class ParserInfoTests
{
var p1 = new ParserInfo()
{
- Chapters = "0",
+ Chapters = Parser.DefaultChapter,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
IsSpecial = false,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
var p2 = new ParserInfo()
@@ -30,7 +30,7 @@ public class ParserInfoTests
IsSpecial = false,
Series = "darker than black",
Title = "Darker Than Black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
var expected = new ParserInfo()
@@ -42,7 +42,7 @@ public class ParserInfoTests
IsSpecial = false,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
p1.Merge(p2);
@@ -62,12 +62,12 @@ public class ParserInfoTests
IsSpecial = true,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
var p2 = new ParserInfo()
{
- Chapters = "0",
+ Chapters = Parser.DefaultChapter,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parsing/ParsingTests.cs
similarity index 89%
rename from API.Tests/Parser/ParserTest.cs
rename to API.Tests/Parsing/ParsingTests.cs
index 5bdd3eb6e..7d5da4f9c 100644
--- a/API.Tests/Parser/ParserTest.cs
+++ b/API.Tests/Parsing/ParsingTests.cs
@@ -3,18 +3,32 @@ using System.Linq;
using Xunit;
using static API.Services.Tasks.Scanner.Parser.Parser;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
-public class ParserTests
+public class ParsingTests
{
[Fact]
public void ShouldWork()
{
- var s = 6.5f + "";
+ var s = 6.5f.ToString(CultureInfo.InvariantCulture);
var a = float.Parse(s, CultureInfo.InvariantCulture);
Assert.Equal(6.5f, a);
+
+ s = 6.5f + "";
+ a = float.Parse(s, CultureInfo.CurrentCulture);
+ Assert.Equal(6.5f, a);
}
+ // [Theory]
+ // [InlineData("de-DE")]
+ // [InlineData("en-US")]
+ // public void ShouldParse(string culture)
+ // {
+ // var s = 6.5f + "";
+ // var a = float.Parse(s, CultureInfo.CreateSpecificCulture(culture));
+ // Assert.Equal(6.5f, a);
+ // }
+
[Theory]
[InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")]
[InlineData("Shmo, Joe", "Shmo, Joe")]
@@ -29,6 +43,7 @@ public class ParserTests
[InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")]
[InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")]
[InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")]
+ [InlineData("SP01 1. DEAD Tube Prologue", "1. DEAD Tube Prologue")]
public void CleanSpecialTitleTest(string input, string expected)
{
Assert.Equal(expected, CleanSpecialTitle(input));
@@ -45,6 +60,18 @@ public class ParserTests
Assert.Equal(expected, HasSpecialMarker(input));
}
+ [Theory]
+ [InlineData("Beastars - SP01", 1)]
+ [InlineData("Beastars SP01", 1)]
+ [InlineData("Beastars Special 01", 0)]
+ [InlineData("Beastars Extra 01", 0)]
+ [InlineData("Batman Beyond - Return of the Joker (2001) SP01", 1)]
+ [InlineData("Batman Beyond - Return of the Joker (2001)", 0)]
+ public void ParseSpecialIndexTest(string input, int expected)
+ {
+ Assert.Equal(expected, ParseSpecialIndex(input));
+ }
+
[Theory]
[InlineData("0001", "1")]
[InlineData("1", "1")]
@@ -71,7 +98,8 @@ public class ParserTests
[InlineData("-The Title", false, "The Title")]
[InlineData("- The Title", false, "The Title")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
- [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")]
+ [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)",
+ true, "Batman - Detective Comics - Rebirth Deluxe Edition Book 04")]
[InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")]
[InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")]
[InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")]
@@ -155,6 +183,7 @@ public class ParserTests
[InlineData("3.5", 3.5)]
[InlineData("3.5-4.0", 3.5)]
[InlineData("asdfasdf", 0.0)]
+ [InlineData("-10", -10.0)]
public void MinimumNumberFromRangeTest(string input, float expected)
{
Assert.Equal(expected, MinNumberFromRange(input));
@@ -171,6 +200,7 @@ public class ParserTests
[InlineData("3.5", 3.5)]
[InlineData("3.5-4.0", 4.0)]
[InlineData("asdfasdf", 0.0)]
+ [InlineData("-10", -10.0)]
public void MaximumNumberFromRangeTest(string input, float expected)
{
Assert.Equal(expected, MaxNumberFromRange(input));
@@ -186,6 +216,7 @@ public class ParserTests
[InlineData("카비타", "카비타")]
[InlineData("06", "06")]
[InlineData("", "")]
+ [InlineData("不安の種+", "不安の種+")]
public void NormalizeTest(string input, string expected)
{
Assert.Equal(expected, Normalize(input));
@@ -220,6 +251,7 @@ public class ParserTests
[InlineData("ch1/backcover.png", false)]
[InlineData("backcover.png", false)]
[InlineData("back_cover.png", false)]
+ [InlineData("LD Blacklands #1 35 (back cover).png", false)]
public void IsCoverImageTest(string inputPath, bool expected)
{
Assert.Equal(expected, IsCoverImage(inputPath));
@@ -235,6 +267,7 @@ public class ParserTests
[InlineData("@recycle/Love Hina/", true)]
[InlineData("E:/Test/__MACOSX/Love Hina/", true)]
[InlineData("E:/Test/.caltrash/Love Hina/", true)]
+ [InlineData("E:/Test/.yacreaderlibrary/Love Hina/", true)]
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
{
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
diff --git a/API.Tests/Repository/CollectionTagRepositoryTests.cs b/API.Tests/Repository/CollectionTagRepositoryTests.cs
index 1859ab1fc..5318260be 100644
--- a/API.Tests/Repository/CollectionTagRepositoryTests.cs
+++ b/API.Tests/Repository/CollectionTagRepositoryTests.cs
@@ -15,7 +15,6 @@ using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
-using Xunit;
namespace API.Tests.Repository;
@@ -114,65 +113,65 @@ public class CollectionTagRepositoryTests
#endregion
- #region RemoveTagsWithoutSeries
-
- [Fact]
- public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
- {
- var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
- var series = new SeriesBuilder("Test 1").Build();
- var commonTag = new CollectionTagBuilder("Tag 1").Build();
- series.Metadata.CollectionTags.Add(commonTag);
- series.Metadata.CollectionTags.Add(new CollectionTagBuilder("Tag 2").Build());
-
- var series2 = new SeriesBuilder("Test 1").Build();
- series2.Metadata.CollectionTags.Add(commonTag);
- library.Series.Add(series);
- library.Series.Add(series2);
- _unitOfWork.LibraryRepository.Add(library);
- await _unitOfWork.CommitAsync();
-
- Assert.Equal(2, series.Metadata.CollectionTags.Count);
- Assert.Single(series2.Metadata.CollectionTags);
-
- // Delete both series
- _unitOfWork.SeriesRepository.Remove(series);
- _unitOfWork.SeriesRepository.Remove(series2);
-
- await _unitOfWork.CommitAsync();
-
- // Validate that both tags exist
- Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
-
- await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
-
- Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
- }
-
- [Fact]
- public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
- {
- var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
- var series = new SeriesBuilder("Test 1").Build();
- var commonTag = new CollectionTagBuilder("Tag 1").Build();
- series.Metadata.CollectionTags.Add(commonTag);
- series.Metadata.CollectionTags.Add(new CollectionTagBuilder("Tag 2").Build());
-
- var series2 = new SeriesBuilder("Test 1").Build();
- series2.Metadata.CollectionTags.Add(commonTag);
- library.Series.Add(series);
- library.Series.Add(series2);
- _unitOfWork.LibraryRepository.Add(library);
- await _unitOfWork.CommitAsync();
-
- Assert.Equal(2, series.Metadata.CollectionTags.Count);
- Assert.Single(series2.Metadata.CollectionTags);
-
- await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
-
- // Validate that both tags exist
- Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
- }
-
- #endregion
+ // #region RemoveTagsWithoutSeries
+ //
+ // [Fact]
+ // public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
+ // {
+ // var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
+ // var series = new SeriesBuilder("Test 1").Build();
+ // var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
+ // series.Metadata.CollectionTags.Add(commonTag);
+ // series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
+ //
+ // var series2 = new SeriesBuilder("Test 1").Build();
+ // series2.Metadata.CollectionTags.Add(commonTag);
+ // library.Series.Add(series);
+ // library.Series.Add(series2);
+ // _unitOfWork.LibraryRepository.Add(library);
+ // await _unitOfWork.CommitAsync();
+ //
+ // Assert.Equal(2, series.Metadata.CollectionTags.Count);
+ // Assert.Single(series2.Metadata.CollectionTags);
+ //
+ // // Delete both series
+ // _unitOfWork.SeriesRepository.Remove(series);
+ // _unitOfWork.SeriesRepository.Remove(series2);
+ //
+ // await _unitOfWork.CommitAsync();
+ //
+ // // Validate that both tags exist
+ // Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
+ //
+ // await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
+ //
+ // Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
+ // }
+ //
+ // [Fact]
+ // public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
+ // {
+ // var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
+ // var series = new SeriesBuilder("Test 1").Build();
+ // var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
+ // series.Metadata.CollectionTags.Add(commonTag);
+ // series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
+ //
+ // var series2 = new SeriesBuilder("Test 1").Build();
+ // series2.Metadata.CollectionTags.Add(commonTag);
+ // library.Series.Add(series);
+ // library.Series.Add(series2);
+ // _unitOfWork.LibraryRepository.Add(library);
+ // await _unitOfWork.CommitAsync();
+ //
+ // Assert.Equal(2, series.Metadata.CollectionTags.Count);
+ // Assert.Single(series2.Metadata.CollectionTags);
+ //
+ // await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
+ //
+ // // Validate that both tags exist
+ // Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
+ // }
+ //
+ // #endregion
}
diff --git a/API.Tests/Repository/SeriesRepositoryTests.cs b/API.Tests/Repository/SeriesRepositoryTests.cs
index ec4b2a9f5..5705e1bc0 100644
--- a/API.Tests/Repository/SeriesRepositoryTests.cs
+++ b/API.Tests/Repository/SeriesRepositoryTests.cs
@@ -6,7 +6,6 @@ using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
-using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using API.Services;
@@ -159,4 +158,6 @@ public class SeriesRepositoryTests
}
}
+ // TODO: GetSeriesDtoForLibraryIdV2Async Tests (On Deck)
+
}
diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs
index 086d99863..8cf93df37 100644
--- a/API.Tests/Services/ArchiveServiceTests.cs
+++ b/API.Tests/Services/ArchiveServiceTests.cs
@@ -7,7 +7,6 @@ using System.Linq;
using API.Archive;
using API.Entities.Enums;
using API.Services;
-using EasyCaching.Core;
using Microsoft.Extensions.Logging;
using NetVips;
using NSubstitute;
@@ -29,7 +28,7 @@ public class ArchiveServiceTests
{
_testOutputHelper = testOutputHelper;
_archiveService = new ArchiveService(_logger, _directoryService,
- new ImageService(Substitute.For>(), _directoryService, Substitute.For()),
+ new ImageService(Substitute.For>(), _directoryService),
Substitute.For());
}
@@ -167,7 +166,7 @@ public class ArchiveServiceTests
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
{
var ds = Substitute.For(_directoryServiceLogger, new FileSystem());
- var imageService = new ImageService(Substitute.For>(), ds, Substitute.For());
+ var imageService = new ImageService(Substitute.For>(), ds);
var archiveService = Substitute.For(_logger, ds, imageService, Substitute.For());
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
@@ -198,7 +197,7 @@ public class ArchiveServiceTests
[InlineData("sorting.zip", "sorting.expected.png")]
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
{
- var imageService = new ImageService(Substitute.For>(), _directoryService, Substitute.For());
+ var imageService = new ImageService(Substitute.For>(), _directoryService);
var archiveService = Substitute.For(_logger,
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService,
Substitute.For());
diff --git a/API.Tests/Services/BackupServiceTests.cs b/API.Tests/Services/BackupServiceTests.cs
index c4ca95a11..aac5724f7 100644
--- a/API.Tests/Services/BackupServiceTests.cs
+++ b/API.Tests/Services/BackupServiceTests.cs
@@ -1,10 +1,8 @@
-using System.Collections.Generic;
-using System.Data.Common;
+using System.Data.Common;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
-using API.Entities;
using API.Entities.Enums;
using API.Helpers.Builders;
using API.Services;
@@ -21,7 +19,7 @@ using Xunit;
namespace API.Tests.Services;
-public class BackupServiceTests
+public class BackupServiceTests: AbstractFsTest
{
private readonly ILogger _logger = Substitute.For>();
private readonly IUnitOfWork _unitOfWork;
@@ -31,13 +29,6 @@ public class BackupServiceTests
private readonly DbConnection _connection;
private readonly DataContext _context;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string LogDirectory = "C:/kavita/config/logs/";
- private const string ConfigDirectory = "C:/kavita/config/";
- private const string BookmarkDirectory = "C:/kavita/config/bookmarks";
- private const string ThemesDirectory = "C:/kavita/config/theme";
public BackupServiceTests()
{
@@ -82,7 +73,7 @@ public class BackupServiceTests
_context.ServerSetting.Update(setting);
_context.Library.Add(new LibraryBuilder("Manga")
- .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
return await _context.SaveChangesAsync() > 0;
}
@@ -94,22 +85,6 @@ public class BackupServiceTests
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(LogDirectory);
- fileSystem.AddDirectory(ThemesDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory("C:/data/");
-
- return fileSystem;
- }
-
#endregion
diff --git a/API.Tests/Services/BookServiceTests.cs b/API.Tests/Services/BookServiceTests.cs
index e4647524e..a80c1ca01 100644
--- a/API.Tests/Services/BookServiceTests.cs
+++ b/API.Tests/Services/BookServiceTests.cs
@@ -1,7 +1,8 @@
using System.IO;
using System.IO.Abstractions;
+using API.Entities.Enums;
using API.Services;
-using EasyCaching.Core;
+using API.Services.Tasks.Scanner.Parser;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
@@ -17,7 +18,7 @@ public class BookServiceTests
{
var directoryService = new DirectoryService(Substitute.For>(), new FileSystem());
_bookService = new BookService(_logger, directoryService,
- new ImageService(Substitute.For>(), directoryService, Substitute.For())
+ new ImageService(Substitute.For>(), directoryService)
, Substitute.For());
}
@@ -81,4 +82,64 @@ public class BookServiceTests
Assert.Equal("Accel World", comicInfo.Series);
}
+ [Fact]
+ public void ShouldHaveComicInfoForPdf()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var document = Path.Join(testDirectory, "test.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.NotNull(comicInfo);
+ Assert.Equal("Variations Chromatiques de concert", comicInfo.Title);
+ Assert.Equal("Georges Bizet \\(1838-1875\\)", comicInfo.Writer);
+ }
+
+ //[Fact]
+ public void ShouldUsePdfInfoDict()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Library/Books/PDFs");
+ var document = Path.Join(testDirectory, "Rollo at Work SP01.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.NotNull(comicInfo);
+ Assert.Equal("Rollo at Work", comicInfo.Title);
+ Assert.Equal("Jacob Abbott", comicInfo.Writer);
+ Assert.Equal(2008, comicInfo.Year);
+ }
+
+ [Fact]
+ public void ShouldHandleIndirectPdfObjects()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var document = Path.Join(testDirectory, "indirect.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.NotNull(comicInfo);
+ Assert.Equal(2018, comicInfo.Year);
+ Assert.Equal(8, comicInfo.Month);
+ }
+
+ [Fact]
+ public void FailGracefullyWithEncryptedPdf()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var document = Path.Join(testDirectory, "encrypted.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.Null(comicInfo);
+ }
+
+ [Fact]
+ public void SeriesFallBackToMetadataTitle()
+ {
+ var ds = new DirectoryService(Substitute.For>(), new FileSystem());
+ var pdfParser = new PdfParser(ds);
+
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var filePath = Path.Join(testDirectory, "Bizet-Variations_Chromatiques_de_concert_Theme_A4.pdf");
+
+ var comicInfo = _bookService.GetComicInfo(filePath);
+ Assert.NotNull(comicInfo);
+
+ var parserInfo = pdfParser.Parse(filePath, testDirectory, ds.GetParentDirectoryName(testDirectory), LibraryType.Book, comicInfo);
+ Assert.NotNull(parserInfo);
+ Assert.Equal(parserInfo.Title, comicInfo.Title);
+ Assert.Equal(parserInfo.Series, comicInfo.Title);
+ }
}
diff --git a/API.Tests/Services/BookmarkServiceTests.cs b/API.Tests/Services/BookmarkServiceTests.cs
index 6a82f457d..596fbbc4d 100644
--- a/API.Tests/Services/BookmarkServiceTests.cs
+++ b/API.Tests/Services/BookmarkServiceTests.cs
@@ -9,12 +9,9 @@ using API.Data.Repositories;
using API.DTOs.Reader;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
-using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using API.Services;
-using API.SignalR;
using AutoMapper;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
@@ -25,17 +22,12 @@ using Xunit;
namespace API.Tests.Services;
-public class BookmarkServiceTests
+public class BookmarkServiceTests: AbstractFsTest
{
private readonly IUnitOfWork _unitOfWork;
private readonly DbConnection _connection;
private readonly DataContext _context;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
-
public BookmarkServiceTests()
{
@@ -88,7 +80,7 @@ Substitute.For());
_context.ServerSetting.Update(setting);
_context.Library.Add(new LibraryBuilder("Manga")
- .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
return await _context.SaveChangesAsync() > 0;
}
@@ -102,20 +94,6 @@ Substitute.For());
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory("C:/data/");
-
- return fileSystem;
- }
-
#endregion
#region BookmarkPage
@@ -132,7 +110,7 @@ Substitute.For());
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.Build())
.Build())
@@ -181,7 +159,7 @@ Substitute.For());
.WithFormat(MangaFormat.Epub)
.WithVolume(new VolumeBuilder("1")
.WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.Build())
.Build())
.Build();
diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs
index e1419e052..5c1752cd8 100644
--- a/API.Tests/Services/CacheServiceTests.cs
+++ b/API.Tests/Services/CacheServiceTests.cs
@@ -1,12 +1,10 @@
-using System.Collections.Generic;
-using System.Data.Common;
+using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
-using API.Entities;
using API.Entities.Enums;
using API.Helpers.Builders;
using API.Services;
@@ -52,17 +50,17 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
throw new System.NotImplementedException();
}
- public ParserInfo Parse(string path, string rootPath, LibraryType type)
+ public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type)
{
throw new System.NotImplementedException();
}
- public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
+ public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
{
throw new System.NotImplementedException();
}
}
-public class CacheServiceTests
+public class CacheServiceTests: AbstractFsTest
{
private readonly ILogger _logger = Substitute.For>();
private readonly IUnitOfWork _unitOfWork;
@@ -71,11 +69,6 @@ public class CacheServiceTests
private readonly DbConnection _connection;
private readonly DataContext _context;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string DataDirectory = "C:/data/";
-
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
@@ -118,7 +111,7 @@ public class CacheServiceTests
_context.ServerSetting.Update(setting);
_context.Library.Add(new LibraryBuilder("Manga")
- .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
return await _context.SaveChangesAsync() > 0;
}
@@ -130,19 +123,6 @@ public class CacheServiceTests
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(DataDirectory);
-
- return fileSystem;
- }
-
#endregion
#region Ensure
@@ -156,7 +136,9 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(),
+ Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
await ResetDB();
var s = new SeriesBuilder("Test").Build();
@@ -231,7 +213,8 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
cleanupService.CleanupChapters(new []{1, 3});
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
@@ -252,14 +235,15 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
var c = new ChapterBuilder("1")
.WithFile(new MangaFileBuilder($"{DataDirectory}1.epub", MangaFormat.Epub).Build())
.WithFile(new MangaFileBuilder($"{DataDirectory}2.epub", MangaFormat.Epub).Build())
.Build();
cs.GetCachedFile(c);
- Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
+ Assert.Equal($"{DataDirectory}1.epub", cs.GetCachedFile(c));
}
#endregion
@@ -292,7 +276,8 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
@@ -335,7 +320,8 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
@@ -375,7 +361,8 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
@@ -419,7 +406,8 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
diff --git a/API.Tests/Services/CleanupServiceTests.cs b/API.Tests/Services/CleanupServiceTests.cs
index 8c29c5c18..0f1e9e9da 100644
--- a/API.Tests/Services/CleanupServiceTests.cs
+++ b/API.Tests/Services/CleanupServiceTests.cs
@@ -1,16 +1,13 @@
using System;
using System.Collections.Generic;
using System.IO;
-using System.IO.Abstractions;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
-using API.Data;
using API.Data.Repositories;
using API.DTOs.Filtering;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
@@ -30,11 +27,10 @@ public class CleanupServiceTests : AbstractDbTest
private readonly IEventHub _messageHub = Substitute.For();
private readonly IReaderService _readerService;
-
public CleanupServiceTests() : base()
{
_context.Library.Add(new LibraryBuilder("Manga")
- .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
_readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For(),
@@ -139,7 +135,7 @@ public class CleanupServiceTests : AbstractDbTest
// Add 2 series with cover images
_context.Series.Add(new SeriesBuilder("Test 1")
.WithVolume(new VolumeBuilder("1")
- .WithChapter(new ChapterBuilder("0").WithCoverImage("v01_c01.jpg").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c01.jpg").Build())
.WithCoverImage("v01_c01.jpg")
.Build())
.WithCoverImage("series_01.jpg")
@@ -148,7 +144,7 @@ public class CleanupServiceTests : AbstractDbTest
_context.Series.Add(new SeriesBuilder("Test 2")
.WithVolume(new VolumeBuilder("1")
- .WithChapter(new ChapterBuilder("0").WithCoverImage("v01_c03.jpg").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c03.jpg").Build())
.WithCoverImage("v01_c03.jpg")
.Build())
.WithCoverImage("series_03.jpg")
@@ -167,53 +163,53 @@ public class CleanupServiceTests : AbstractDbTest
}
#endregion
- #region DeleteTagCoverImages
-
- [Fact]
- public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
- {
- var filesystem = CreateFileSystem();
- filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
- filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
- filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
-
- // Delete all Series to reset state
- await ResetDb();
-
- // Add 2 series with cover images
-
- _context.Series.Add(new SeriesBuilder("Test 1")
- .WithMetadata(new SeriesMetadataBuilder()
- .WithCollectionTag(new CollectionTagBuilder("Something")
- .WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
- .Build())
- .Build())
- .WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
- .WithLibraryId(1)
- .Build());
-
- _context.Series.Add(new SeriesBuilder("Test 2")
- .WithMetadata(new SeriesMetadataBuilder()
- .WithCollectionTag(new CollectionTagBuilder("Something")
- .WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
- .Build())
- .Build())
- .WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
- .WithLibraryId(1)
- .Build());
-
-
- await _context.SaveChangesAsync();
- var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
- ds);
-
- await cleanupService.DeleteTagCoverImages();
-
- Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
- }
-
- #endregion
+ // #region DeleteTagCoverImages
+ //
+ // [Fact]
+ // public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
+ // {
+ // var filesystem = CreateFileSystem();
+ // filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
+ // filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
+ // filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
+ //
+ // // Delete all Series to reset state
+ // await ResetDb();
+ //
+ // // Add 2 series with cover images
+ //
+ // _context.Series.Add(new SeriesBuilder("Test 1")
+ // .WithMetadata(new SeriesMetadataBuilder()
+ // .WithCollectionTag(new AppUserCollectionBuilder("Something")
+ // .WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
+ // .Build())
+ // .Build())
+ // .WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
+ // .WithLibraryId(1)
+ // .Build());
+ //
+ // _context.Series.Add(new SeriesBuilder("Test 2")
+ // .WithMetadata(new SeriesMetadataBuilder()
+ // .WithCollectionTag(new AppUserCollectionBuilder("Something")
+ // .WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
+ // .Build())
+ // .Build())
+ // .WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
+ // .WithLibraryId(1)
+ // .Build());
+ //
+ //
+ // await _context.SaveChangesAsync();
+ // var ds = new DirectoryService(Substitute.For>(), filesystem);
+ // var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ // ds);
+ //
+ // await cleanupService.DeleteTagCoverImages();
+ //
+ // Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
+ // }
+ //
+ // #endregion
#region DeleteReadingListCoverImages
[Fact]
@@ -389,13 +385,12 @@ public class CleanupServiceTests : AbstractDbTest
[Fact]
public async Task CleanupDbEntries_CleanupAbandonedChapters()
{
- var c = new ChapterBuilder("0")
+ var c = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(1)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(c)
.Build())
.Build();
@@ -436,24 +431,26 @@ public class CleanupServiceTests : AbstractDbTest
[Fact]
public async Task CleanupDbEntries_RemoveTagsWithoutSeries()
{
- var c = new CollectionTag()
+ var s = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithMetadata(new SeriesMetadataBuilder().Build())
+ .Build();
+ s.Library = new LibraryBuilder("Test LIb").Build();
+ _context.Series.Add(s);
+
+ var c = new AppUserCollection()
{
Title = "Test Tag",
NormalizedTitle = "Test Tag".ToNormalized(),
+ AgeRating = AgeRating.Unknown,
+ Items = new List() {s}
};
- var s = new SeriesBuilder("Test")
- .WithFormat(MangaFormat.Epub)
- .WithMetadata(new SeriesMetadataBuilder().WithCollectionTag(c).Build())
- .Build();
- s.Library = new LibraryBuilder("Test LIb").Build();
-
- _context.Series.Add(s);
_context.AppUser.Add(new AppUser()
{
- UserName = "majora2007"
+ UserName = "majora2007",
+ Collections = new List() {c}
});
-
await _context.SaveChangesAsync();
var cleanupService = new CleanupService(Substitute.For>(), _unitOfWork,
@@ -466,7 +463,7 @@ public class CleanupServiceTests : AbstractDbTest
await cleanupService.CleanupDbEntries();
- Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
+ Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
}
#endregion
@@ -520,6 +517,71 @@ public class CleanupServiceTests : AbstractDbTest
}
#endregion
+ #region ConsolidateProgress
+
+ [Fact]
+ public async Task ConsolidateProgress_ShouldRemoveDuplicates()
+ {
+ await ResetDb();
+
+ var s = new SeriesBuilder("Test ConsolidateProgress_ShouldRemoveDuplicates")
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithPages(3)
+ .Build())
+ .Build())
+ .Build();
+
+ s.Library = new LibraryBuilder("Test Lib").Build();
+ _context.Series.Add(s);
+
+ var user = new AppUser()
+ {
+ UserName = "ConsolidateProgress_ShouldRemoveDuplicates",
+ };
+ _context.AppUser.Add(user);
+
+ await _unitOfWork.CommitAsync();
+
+ // Add 2 progress events
+ user.Progresses ??= [];
+ user.Progresses.Add(new AppUserProgress()
+ {
+ ChapterId = 1,
+ VolumeId = 1,
+ SeriesId = 1,
+ LibraryId = s.LibraryId,
+ PagesRead = 1,
+ });
+ await _unitOfWork.CommitAsync();
+
+ // Add a duplicate with higher page number
+ user.Progresses.Add(new AppUserProgress()
+ {
+ ChapterId = 1,
+ VolumeId = 1,
+ SeriesId = 1,
+ LibraryId = s.LibraryId,
+ PagesRead = 3,
+ });
+ await _unitOfWork.CommitAsync();
+
+ Assert.Equal(2, (await _unitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
+
+ var cleanupService = new CleanupService(Substitute.For>(), _unitOfWork,
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()));
+
+
+ await cleanupService.ConsolidateProgress();
+
+ var progress = await _unitOfWork.AppUserProgressRepository.GetAllProgress();
+
+ Assert.Single(progress);
+ Assert.True(progress.First().PagesRead == 3);
+ }
+ #endregion
+
#region EnsureChapterProgressIsCapped
@@ -537,7 +599,7 @@ public class CleanupServiceTests : AbstractDbTest
c.UserProgress = new List();
s.Volumes = new List()
{
- new VolumeBuilder("0").WithChapter(c).Build()
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume).WithChapter(c).Build()
};
_context.Series.Add(s);
@@ -586,7 +648,7 @@ public class CleanupServiceTests : AbstractDbTest
}
#endregion
- // #region CleanupBookmarks
+ #region CleanupBookmarks
//
// [Fact]
// public async Task CleanupBookmarks_LeaveAllFiles()
@@ -723,5 +785,5 @@ public class CleanupServiceTests : AbstractDbTest
// Assert.Equal(1, ds.FileSystem.Directory.GetDirectories($"{BookmarkDirectory}1/1/").Length);
// }
//
- // #endregion
+ #endregion
}
diff --git a/API.Tests/Services/CollectionTagServiceTests.cs b/API.Tests/Services/CollectionTagServiceTests.cs
index c06767ed1..14ce131d8 100644
--- a/API.Tests/Services/CollectionTagServiceTests.cs
+++ b/API.Tests/Services/CollectionTagServiceTests.cs
@@ -1,15 +1,18 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
+using API.Constants;
using API.Data;
using API.Data.Repositories;
-using API.DTOs.CollectionTags;
+using API.DTOs.Collection;
using API.Entities;
using API.Entities.Enums;
using API.Helpers.Builders;
using API.Services;
+using API.Services.Plus;
using API.SignalR;
-using API.Tests.Helpers;
+using Kavita.Common;
using NSubstitute;
using Xunit;
@@ -25,7 +28,7 @@ public class CollectionTagServiceTests : AbstractDbTest
protected override async Task ResetDb()
{
- _context.CollectionTag.RemoveRange(_context.CollectionTag.ToList());
+ _context.AppUserCollection.RemoveRange(_context.AppUserCollection.ToList());
_context.Library.RemoveRange(_context.Library.ToList());
await _unitOfWork.CommitAsync();
@@ -33,119 +36,494 @@ public class CollectionTagServiceTests : AbstractDbTest
private async Task SeedSeries()
{
- if (_context.CollectionTag.Any()) return;
+ if (_context.AppUserCollection.Any()) return;
+ var s1 = new SeriesBuilder("Series 1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build()).Build();
+ var s2 = new SeriesBuilder("Series 2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build()).Build();
_context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
- .WithSeries(new SeriesBuilder("Series 1").Build())
- .WithSeries(new SeriesBuilder("Series 2").Build())
+ .WithSeries(s1)
+ .WithSeries(s2)
.Build());
- _context.CollectionTag.Add(new CollectionTagBuilder("Tag 1").Build());
- _context.CollectionTag.Add(new CollectionTagBuilder("Tag 2").WithIsPromoted(true).Build());
+ var user = new AppUserBuilder("majora2007", "majora2007", Seed.DefaultThemes.First()).Build();
+ user.Collections = new List()
+ {
+ new AppUserCollectionBuilder("Tag 1").WithItems(new []{s1}).Build(),
+ new AppUserCollectionBuilder("Tag 2").WithItems(new []{s1, s2}).WithIsPromoted(true).Build()
+ };
+ _unitOfWork.UserRepository.Add(user);
+
await _unitOfWork.CommitAsync();
}
+ #region DeleteTag
[Fact]
- public async Task TagExistsByName_ShouldFindTag()
+ public async Task DeleteTag_ShouldDeleteTag_WhenTagExists()
{
+ // Arrange
await SeedSeries();
- Assert.True(await _service.TagExistsByName("Tag 1"));
- Assert.True(await _service.TagExistsByName("tag 1"));
- Assert.False(await _service.TagExistsByName("tag5"));
+
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act
+ var result = await _service.DeleteTag(1, user);
+
+ // Assert
+ Assert.True(result);
+ var deletedTag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.Null(deletedTag);
+ Assert.Single(user.Collections); // Only one collection should remain
}
+ [Fact]
+ public async Task DeleteTag_ShouldReturnTrue_WhenTagDoesNotExist()
+ {
+ // Arrange
+ await SeedSeries();
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act - Try to delete a non-existent tag
+ var result = await _service.DeleteTag(999, user);
+
+ // Assert
+ Assert.True(result); // Should return true because the tag is already "deleted"
+ Assert.Equal(2, user.Collections.Count); // Both collections should remain
+ }
+
+ [Fact]
+ public async Task DeleteTag_ShouldNotAffectOtherTags()
+ {
+ // Arrange
+ await SeedSeries();
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act
+ var result = await _service.DeleteTag(1, user);
+
+ // Assert
+ Assert.True(result);
+ var remainingTag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(remainingTag);
+ Assert.Equal("Tag 2", remainingTag.Title);
+ Assert.True(remainingTag.Promoted);
+ }
+
+ #endregion
+
+ #region UpdateTag
+
[Fact]
public async Task UpdateTag_ShouldUpdateFields()
{
await SeedSeries();
- _context.CollectionTag.Add(new CollectionTagBuilder("UpdateTag_ShouldUpdateFields").WithId(3).WithIsPromoted(true).Build());
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldUpdateFields").WithIsPromoted(true).Build());
+ _unitOfWork.UserRepository.Update(user);
await _unitOfWork.CommitAsync();
- await _service.UpdateTag(new CollectionTagDto()
+ await _service.UpdateTag(new AppUserCollectionDto()
{
Title = "UpdateTag_ShouldUpdateFields",
Id = 3,
Promoted = true,
Summary = "Test Summary",
- });
+ AgeRating = AgeRating.Unknown
+ }, 1);
- var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(3);
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
Assert.NotNull(tag);
Assert.True(tag.Promoted);
- Assert.True(!string.IsNullOrEmpty(tag.Summary));
+ Assert.False(string.IsNullOrEmpty(tag.Summary));
+ }
+
+ ///
+ /// UpdateTag should not change any title if non-Kavita source
+ ///
+ [Fact]
+ public async Task UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource()
+ {
+ await SeedSeries();
+
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource").WithSource(ScrobbleProvider.Mal).Build());
+ _unitOfWork.UserRepository.Update(user);
+ await _unitOfWork.CommitAsync();
+
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "New Title",
+ Id = 3,
+ Promoted = true,
+ Summary = "Test Summary",
+ AgeRating = AgeRating.Unknown
+ }, 1);
+
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
+ Assert.NotNull(tag);
+ Assert.Equal("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource", tag.Title);
+ Assert.False(string.IsNullOrEmpty(tag.Summary));
}
[Fact]
- public async Task AddTagToSeries_ShouldAddTagToAllSeries()
+ public async Task UpdateTag_ShouldThrowException_WhenTagDoesNotExist()
{
+ // Arrange
await SeedSeries();
- var ids = new[] {1, 2};
- await _service.AddTagToSeries(await _unitOfWork.CollectionTagRepository.GetTagAsync(1, CollectionTagIncludes.SeriesMetadata), ids);
- var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(ids);
- Assert.Contains(metadatas.ElementAt(0).CollectionTags, t => t.Title.Equals("Tag 1"));
- Assert.Contains(metadatas.ElementAt(1).CollectionTags, t => t.Title.Equals("Tag 1"));
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Non-existent Tag",
+ Id = 999, // Non-existent ID
+ Promoted = false
+ }, 1));
+
+ Assert.Equal("collection-doesnt-exist", exception.Message);
}
[Fact]
- public async Task RemoveTagFromSeries_ShouldRemoveMultiple()
+ public async Task UpdateTag_ShouldThrowException_WhenUserDoesNotOwnTag()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Create a second user
+ var user2 = new AppUserBuilder("user2", "user2", Seed.DefaultThemes.First()).Build();
+ _unitOfWork.UserRepository.Add(user2);
+ await _unitOfWork.CommitAsync();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1, // This belongs to user1
+ Promoted = false
+ }, 2)); // User with ID 2
+
+ Assert.Equal("access-denied", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldThrowException_WhenTitleIsEmpty()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = " ", // Empty after trimming
+ Id = 1,
+ Promoted = false
+ }, 1));
+
+ Assert.Equal("collection-tag-title-required", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldThrowException_WhenTitleAlreadyExists()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 2", // Already exists
+ Id = 1, // Trying to rename Tag 1 to Tag 2
+ Promoted = false
+ }, 1));
+
+ Assert.Equal("collection-tag-duplicate", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldUpdateCoverImageSettings()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ CoverImageLocked = true
+ }, 1);
+
+ // Assert
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.True(tag.CoverImageLocked);
+
+ // Now test unlocking the cover image
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ CoverImageLocked = false
+ }, 1);
+
+ tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.False(tag.CoverImageLocked);
+ Assert.Equal(string.Empty, tag.CoverImage);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldAllowPromoteForAdminRole()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Setup a user with admin role
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+ await AddUserWithRole(user.Id, PolicyConstants.AdminRole);
+
+
+ // Act - Try to promote a tag that wasn't previously promoted
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ Promoted = true
+ }, 1);
+
+ // Assert
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.True(tag.Promoted);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldAllowPromoteForPromoteRole()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Setup a user with promote role
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Mock to return promote role for the user
+ await AddUserWithRole(user.Id, PolicyConstants.PromoteRole);
+
+ // Act - Try to promote a tag that wasn't previously promoted
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ Promoted = true
+ }, 1);
+
+ // Assert
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.True(tag.Promoted);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldNotChangePromotion_WhenUserHasNoPermission()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Setup a user with no special roles
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act - Try to promote a tag without proper role
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ Promoted = true
+ }, 1);
+
+ // Assert
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.False(tag.Promoted); // Should remain unpromoted
+ }
+ #endregion
+
+
+ #region RemoveTagFromSeries
+
+ [Fact]
+ public async Task RemoveTagFromSeries_RemoveSeriesFromTag()
{
await SeedSeries();
- var ids = new[] {1, 2};
- var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(2, CollectionTagIncludes.SeriesMetadata);
- await _service.AddTagToSeries(tag, ids);
+
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Tag 2 has 2 series
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+
+ await _service.RemoveTagFromSeries(tag, new[] {1});
+ var userCollections = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.Equal(2, userCollections!.Collections.Count);
+ Assert.Single(tag.Items);
+ Assert.Equal(2, tag.Items.First().Id);
+ }
+
+ ///
+ /// Ensure the rating of the tag updates after a series change
+ ///
+ [Fact]
+ public async Task RemoveTagFromSeries_RemoveSeriesFromTag_UpdatesRating()
+ {
+ await SeedSeries();
+
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Tag 2 has 2 series
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
await _service.RemoveTagFromSeries(tag, new[] {1});
- var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {1});
-
- Assert.Single(metadatas);
- Assert.Empty(metadatas.First().CollectionTags);
- Assert.NotEmpty(await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {2}));
+ Assert.Equal(AgeRating.G, tag.AgeRating);
}
+ ///
+ /// Should remove the tag when there are no items left on the tag
+ ///
[Fact]
- public async Task GetTagOrCreate_ShouldReturnNewTag()
+ public async Task RemoveTagFromSeries_RemoveSeriesFromTag_DeleteTagWhenNoSeriesLeft()
{
await SeedSeries();
- var tag = await _service.GetTagOrCreate(0, "GetTagOrCreate_ShouldReturnNewTag");
+
+ var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Tag 1 has 1 series
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
- Assert.Equal(0, tag.Id);
- }
-
- [Fact]
- public async Task GetTagOrCreate_ShouldReturnExistingTag()
- {
- await SeedSeries();
- var tag = await _service.GetTagOrCreate(1, "Some new tag");
- Assert.NotNull(tag);
- Assert.Equal(1, tag.Id);
- Assert.Equal("Tag 1", tag.Title);
- }
-
- [Fact]
- public async Task RemoveTagsWithoutSeries_ShouldRemoveAbandonedEntries()
- {
- await SeedSeries();
- // Setup a tag with one series
- var tag = await _service.GetTagOrCreate(0, "Tag with a series");
- await _unitOfWork.CommitAsync();
-
- var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {1});
- tag.SeriesMetadatas.Add(metadatas.First());
- var tagId = tag.Id;
- await _unitOfWork.CommitAsync();
-
- // Validate it doesn't remove tags it shouldn't
- await _service.RemoveTagsWithoutSeries();
- Assert.NotNull(await _unitOfWork.CollectionTagRepository.GetTagAsync(tagId));
await _service.RemoveTagFromSeries(tag, new[] {1});
-
- // Validate it does remove tags it should
- await _service.RemoveTagsWithoutSeries();
- Assert.Null(await _unitOfWork.CollectionTagRepository.GetTagAsync(tagId));
+ var tag2 = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.Null(tag2);
}
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldReturnFalse_WhenTagIsNull()
+ {
+ // Act
+ var result = await _service.RemoveTagFromSeries(null, [1]);
+
+ // Assert
+ Assert.False(result);
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldHandleEmptySeriesIdsList()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ var initialItemCount = tag.Items.Count;
+
+ // Act
+ var result = await _service.RemoveTagFromSeries(tag, Array.Empty());
+
+ // Assert
+ Assert.True(result);
+ tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldHandleNonExistentSeriesIds()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ var initialItemCount = tag.Items.Count;
+
+ // Act - Try to remove a series that doesn't exist in the tag
+ var result = await _service.RemoveTagFromSeries(tag, [999]);
+
+ // Assert
+ Assert.True(result);
+ tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldHandleNullItemsList()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+
+ // Force null items list
+ tag.Items = null;
+ _unitOfWork.CollectionTagRepository.Update(tag);
+ await _unitOfWork.CommitAsync();
+
+ // Act
+ var result = await _service.RemoveTagFromSeries(tag, [1]);
+
+ // Assert
+ Assert.True(result);
+ // The tag should not be removed since the items list was null, not empty
+ var tagAfter = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.Null(tagAfter);
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldUpdateAgeRating_WhenMultipleSeriesRemain()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Add a third series with a different age rating
+ var s3 = new SeriesBuilder("Series 3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.PG).Build()).Build();
+ _context.Library.First().Series.Add(s3);
+ await _unitOfWork.CommitAsync();
+
+ // Add series 3 to tag 2
+ var tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+ tag.Items.Add(s3);
+ _unitOfWork.CollectionTagRepository.Update(tag);
+ await _unitOfWork.CommitAsync();
+
+ // Act - Remove the series with Mature rating
+ await _service.RemoveTagFromSeries(tag, new[] {1});
+
+ // Assert
+ tag = await _unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+ Assert.Equal(2, tag.Items.Count);
+
+ // The age rating should be updated to the highest remaining rating (PG)
+ Assert.Equal(AgeRating.PG, tag.AgeRating);
+ }
+
+
+ #endregion
+
}
diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs
index 0de244cac..c5216bebf 100644
--- a/API.Tests/Services/DirectoryServiceTests.cs
+++ b/API.Tests/Services/DirectoryServiceTests.cs
@@ -1,20 +1,30 @@
using System;
using System.Collections.Generic;
+using System.Globalization;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
+using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
using API.Services;
+using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
+using Xunit.Abstractions;
namespace API.Tests.Services;
-public class DirectoryServiceTests
+public class DirectoryServiceTests: AbstractFsTest
{
private readonly ILogger _logger = Substitute.For>();
+ private readonly ITestOutputHelper _testOutputHelper;
+
+ public DirectoryServiceTests(ITestOutputHelper testOutputHelper)
+ {
+ _testOutputHelper = testOutputHelper;
+ }
#region TraverseTreeParallelForEach
@@ -372,9 +382,16 @@ public class DirectoryServiceTests
#endregion
#region IsDriveMounted
+ // The root directory (/) is always mounted on non windows
[Fact]
public void IsDriveMounted_DriveIsNotMounted()
{
+ if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ {
+ _testOutputHelper.WriteLine("Skipping test on non Windows platform");
+ return;
+ }
+
const string testDirectory = "c:/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
@@ -386,6 +403,12 @@ public class DirectoryServiceTests
[Fact]
public void IsDriveMounted_DriveIsMounted()
{
+ if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ {
+ _testOutputHelper.WriteLine("Skipping test on non Windows platform");
+ return;
+ }
+
const string testDirectory = "c:/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
@@ -721,6 +744,54 @@ public class DirectoryServiceTests
#endregion
+ #region FindLowestDirectoriesFromFiles
+
+ [Theory]
+ [InlineData(new [] {"C:/Manga/"},
+ new [] {"C:/Manga/Love Hina/Vol. 01.cbz"},
+ "C:/Manga/Love Hina")]
+ [InlineData(new [] {"C:/Manga/"},
+ new [] {"C:/Manga/Romance/Love Hina/Vol. 01.cbz"},
+ "C:/Manga/Romance/Love Hina")]
+ [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"},
+ new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"},
+ "C:/Manga/Dir 1/Love Hina")]
+ [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"},
+ new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"},
+ null)]
+ [InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"},
+ new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"},
+ @"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")]
+ [InlineData(new [] {"C:/Manga/"},
+ new [] {"C:/Manga/Love Hina/Vol. 01.cbz", "C:/Manga/Love Hina/Specials/Sp01.cbz"},
+ "C:/Manga/Love Hina")]
+ [InlineData(new [] {"/manga"},
+ new [] {"/manga/Love Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
+ "/manga/Love Hina")]
+ [InlineData(new [] {"/manga"},
+ new [] {"/manga/Love Hina/Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
+ "/manga/Love Hina")]
+ [InlineData(new [] {"/manga"},
+ new [] {"/manga/Dress Up Darling/Dress Up Darling Ch 01.cbz", "/manga/Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz"},
+ "/manga/Dress Up Darling")]
+ public void FindLowestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
+ {
+ var fileSystem = new MockFileSystem();
+ foreach (var directory in rootDirectories)
+ {
+ fileSystem.AddDirectory(directory);
+ }
+ foreach (var f in files)
+ {
+ fileSystem.AddFile(f, new MockFileData(""));
+ }
+ var ds = new DirectoryService(Substitute.For>(), fileSystem);
+
+ var actual = ds.FindLowestDirectoriesFromFiles(rootDirectories, files);
+ Assert.Equal(expectedDirectory, actual);
+ }
+
+ #endregion
#region GetFoldersTillRoot
[Theory]
@@ -851,12 +922,14 @@ public class DirectoryServiceTests
#region GetHumanReadableBytes
[Theory]
- [InlineData(1200, "1.17 KB")]
- [InlineData(1, "1 B")]
- [InlineData(10000000, "9.54 MB")]
- [InlineData(10000000000, "9.31 GB")]
- public void GetHumanReadableBytesTest(long bytes, string expected)
+ [InlineData(1200, 1.17, " KB")]
+ [InlineData(1, 1, " B")]
+ [InlineData(10000000, 9.54, " MB")]
+ [InlineData(10000000000, 9.31, " GB")]
+ public void GetHumanReadableBytesTest(long bytes, float number, string suffix)
{
+ // GetHumanReadableBytes is user facing, should be in CultureInfo.CurrentCulture
+ var expected = number.ToString(CultureInfo.CurrentCulture) + suffix;
Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes));
}
#endregion
@@ -878,8 +951,9 @@ public class DirectoryServiceTests
var ds = new DirectoryService(Substitute.For>(), fileSystem);
-
- var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
+ var globMatcher = new GlobMatcher();
+ globMatcher.AddExclude("*.*");
+ var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
Assert.Empty(allFiles);
@@ -903,7 +977,9 @@ public class DirectoryServiceTests
var ds = new DirectoryService(Substitute.For>(), fileSystem);
- var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
+ var globMatcher = new GlobMatcher();
+ globMatcher.AddExclude("**/Accel World/*");
+ var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
Assert.Single(allFiles); // Ignore files are not counted in files, only valid extensions
@@ -932,7 +1008,10 @@ public class DirectoryServiceTests
var ds = new DirectoryService(Substitute.For>(), fileSystem);
- var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
+ var globMatcher = new GlobMatcher();
+ globMatcher.AddExclude("**/Accel World/*");
+ globMatcher.AddExclude("**/ArtBooks/*");
+ var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions
@@ -986,11 +1065,14 @@ public class DirectoryServiceTests
#region GetParentDirectory
[Theory]
- [InlineData(@"C:/file.txt", "C:/")]
- [InlineData(@"C:/folder/file.txt", "C:/folder")]
- [InlineData(@"C:/folder/subfolder/file.txt", "C:/folder/subfolder")]
+ [InlineData(@"file.txt", "")]
+ [InlineData(@"folder/file.txt", "folder")]
+ [InlineData(@"folder/subfolder/file.txt", "folder/subfolder")]
public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected)
{
+ path = Root + path;
+ expected = Root + expected;
+
var fileSystem = new MockFileSystem(new Dictionary
{
{ path, new MockFileData(string.Empty)}
@@ -1000,11 +1082,14 @@ public class DirectoryServiceTests
Assert.Equal(expected, ds.GetParentDirectoryName(path));
}
[Theory]
- [InlineData(@"C:/folder", "C:/")]
- [InlineData(@"C:/folder/subfolder", "C:/folder")]
- [InlineData(@"C:/folder/subfolder/another", "C:/folder/subfolder")]
+ [InlineData(@"folder", "")]
+ [InlineData(@"folder/subfolder", "folder")]
+ [InlineData(@"folder/subfolder/another", "folder/subfolder")]
public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected)
{
+ path = Root + path;
+ expected = Root + expected;
+
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(path);
diff --git a/API.Tests/Services/ExternalMetadataServiceTests.cs b/API.Tests/Services/ExternalMetadataServiceTests.cs
new file mode 100644
index 000000000..127bceb7a
--- /dev/null
+++ b/API.Tests/Services/ExternalMetadataServiceTests.cs
@@ -0,0 +1,2860 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using API.Constants;
+using API.Data.Repositories;
+using API.DTOs.KavitaPlus.Metadata;
+using API.DTOs.Recommendation;
+using API.DTOs.Scrobbling;
+using API.Entities;
+using API.Entities.Enums;
+using API.Entities.Metadata;
+using API.Entities.MetadataMatching;
+using API.Entities.Person;
+using API.Helpers.Builders;
+using API.Services.Plus;
+using API.Services.Tasks.Metadata;
+using API.SignalR;
+using Hangfire;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Services;
+
+///
+/// Given these rely on Kavita+, this will not have any [Fact]/[Theory] on them and must be manually checked
+///
+public class ExternalMetadataServiceTests : AbstractDbTest
+{
+ private readonly ExternalMetadataService _externalMetadataService;
+ private readonly Dictionary _genreLookup = new Dictionary();
+ private readonly Dictionary _tagLookup = new Dictionary();
+ private readonly Dictionary _personLookup = new Dictionary();
+
+
+ public ExternalMetadataServiceTests()
+ {
+ // Set up Hangfire to use in-memory storage for testing
+ GlobalConfiguration.Configuration.UseInMemoryStorage();
+
+ _externalMetadataService = new ExternalMetadataService(_unitOfWork, Substitute.For>(),
+ _mapper, Substitute.For(), Substitute.For(), Substitute.For(),
+ Substitute.For());
+ }
+
+ #region Gloabl
+
+ [Fact]
+ public async Task Off_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Summary";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = false;
+ metadataSettings.EnableSummary = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Summary = "Test"
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(string.Empty, postSeries.Metadata.Summary);
+ }
+
+ #endregion
+
+ #region Summary
+
+ [Fact]
+ public async Task Summary_NoExisting_Off_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Summary";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableSummary = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Summary = "Test"
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(string.Empty, postSeries.Metadata.Summary);
+ }
+
+ [Fact]
+ public async Task Summary_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Summary";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableSummary = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Summary = "Test"
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.False(string.IsNullOrEmpty(postSeries.Metadata.Summary));
+ Assert.Equal(series.Metadata.Summary, postSeries.Metadata.Summary);
+ }
+
+ [Fact]
+ public async Task Summary_Existing_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Summary";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithSummary("This summary is not locked")
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableSummary = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Summary = "This should not write"
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.False(string.IsNullOrEmpty(postSeries.Metadata.Summary));
+ Assert.Equal("This summary is not locked", postSeries.Metadata.Summary);
+ }
+
+ [Fact]
+ public async Task Summary_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Summary";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithSummary("This summary is not locked", true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableSummary = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Summary = "This should not write"
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.False(string.IsNullOrEmpty(postSeries.Metadata.Summary));
+ Assert.Equal("This summary is not locked", postSeries.Metadata.Summary);
+ }
+
+ [Fact]
+ public async Task Summary_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Summary";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithSummary("This summary is not locked", true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableSummary = true;
+ metadataSettings.Overrides = [MetadataSettingField.Summary];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Summary = "This should write"
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.False(string.IsNullOrEmpty(postSeries.Metadata.Summary));
+ Assert.Equal("This should write", postSeries.Metadata.Summary);
+ }
+
+
+ #endregion
+
+ #region Release Year
+
+ [Fact]
+ public async Task ReleaseYear_NoExisting_Off_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Release Year";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableStartDate = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ StartDate = DateTime.UtcNow
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(0, postSeries.Metadata.ReleaseYear);
+ }
+
+ [Fact]
+ public async Task ReleaseYear_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Release Year";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableStartDate = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ StartDate = DateTime.UtcNow
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(DateTime.UtcNow.Year, postSeries.Metadata.ReleaseYear);
+ }
+
+ [Fact]
+ public async Task ReleaseYear_Existing_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Release Year";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithReleaseYear(1990)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableStartDate = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ StartDate = DateTime.UtcNow
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(1990, postSeries.Metadata.ReleaseYear);
+ }
+
+ [Fact]
+ public async Task ReleaseYear_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Release Year";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithReleaseYear(1990, true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableStartDate = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ StartDate = DateTime.UtcNow
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(1990, postSeries.Metadata.ReleaseYear);
+ }
+
+ [Fact]
+ public async Task ReleaseYear_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Release Year";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithReleaseYear(1990, true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableStartDate = true;
+ metadataSettings.Overrides = [MetadataSettingField.StartDate];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ StartDate = DateTime.UtcNow
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(DateTime.UtcNow.Year, postSeries.Metadata.ReleaseYear);
+ }
+
+ #endregion
+
+ #region LocalizedName
+
+ [Fact]
+ public async Task LocalizedName_NoExisting_Off_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Localized Name";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithLocalizedNameAllowEmpty(string.Empty)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableLocalizedName = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Synonyms = [seriesName, "設定しないでください", "Kimchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(string.Empty, postSeries.LocalizedName);
+ }
+
+ [Fact]
+ public async Task LocalizedName_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Localized Name";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithLocalizedNameAllowEmpty(string.Empty)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableLocalizedName = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Synonyms = [seriesName, "設定しないでください", "Kimchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal("Kimchi", postSeries.LocalizedName);
+ }
+
+ [Fact]
+ public async Task LocalizedName_Existing_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Localized Name";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithLocalizedName("Localized Name here")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableLocalizedName = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Synonyms = [seriesName, "設定しないでください", "Kimchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal("Localized Name here", postSeries.LocalizedName);
+ }
+
+ [Fact]
+ public async Task LocalizedName_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Localized Name";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithLocalizedName("Localized Name here", true)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableLocalizedName = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Synonyms = [seriesName, "設定しないでください", "Kimchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal("Localized Name here", postSeries.LocalizedName);
+ }
+
+ [Fact]
+ public async Task LocalizedName_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Localized Name";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithLocalizedName("Localized Name here", true)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableLocalizedName = true;
+ metadataSettings.Overrides = [MetadataSettingField.LocalizedName];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Synonyms = [seriesName, "設定しないでください", "Kimchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal("Kimchi", postSeries.LocalizedName);
+ }
+
+ [Fact]
+ public async Task LocalizedName_OnlyNonEnglishSynonyms_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Localized Name";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithLocalizedNameAllowEmpty(string.Empty)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableLocalizedName = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Synonyms = [seriesName, "設定しないでください"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.True(string.IsNullOrEmpty(postSeries.LocalizedName));
+ }
+
+ #endregion
+
+ #region Publication Status
+
+ [Fact]
+ public async Task PublicationStatus_NoExisting_Off_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Publication Status";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePublicationStatus = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Volumes = 2
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(PublicationStatus.OnGoing, postSeries.Metadata.PublicationStatus);
+ }
+
+ [Fact]
+ public async Task PublicationStatus_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Publication Status";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePublicationStatus = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Volumes = 2
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(PublicationStatus.Completed, postSeries.Metadata.PublicationStatus);
+ }
+
+ [Fact]
+ public async Task PublicationStatus_Existing_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Publication Status";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPublicationStatus(PublicationStatus.Hiatus)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePublicationStatus = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Volumes = 2
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(PublicationStatus.Completed, postSeries.Metadata.PublicationStatus);
+ }
+
+ [Fact]
+ public async Task PublicationStatus_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Publication Status";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPublicationStatus(PublicationStatus.Hiatus, true)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePublicationStatus = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Volumes = 2
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(PublicationStatus.Hiatus, postSeries.Metadata.PublicationStatus);
+ }
+
+ [Fact]
+ public async Task PublicationStatus_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Publication Status";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPublicationStatus(PublicationStatus.Hiatus, true)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePublicationStatus = true;
+ metadataSettings.Overrides = [MetadataSettingField.PublicationStatus];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Volumes = 2
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(PublicationStatus.Completed, postSeries.Metadata.PublicationStatus);
+ }
+
+ [Fact]
+ public async Task PublicationStatus_Existing_CorrectState_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Publication Status";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPublicationStatus(PublicationStatus.Hiatus)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePublicationStatus = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Volumes = 2
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(PublicationStatus.Ended, postSeries.Metadata.PublicationStatus);
+ }
+
+
+
+ #endregion
+
+ #region Age Rating
+
+ [Fact]
+ public async Task AgeRating_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Age Rating";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.AgeRatingMappings = new Dictionary()
+ {
+ {"Ecchi", AgeRating.Teen}, // Genre
+ {"H", AgeRating.R18Plus}, // Tag
+ };
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(AgeRating.Teen, postSeries.Metadata.AgeRating);
+ }
+
+ [Fact]
+ public async Task AgeRating_ExistingHigher_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Age Rating";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithAgeRating(AgeRating.Mature)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.AgeRatingMappings = new Dictionary()
+ {
+ {"Ecchi", AgeRating.Teen}, // Genre
+ {"H", AgeRating.R18Plus}, // Tag
+ };
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(AgeRating.Mature, postSeries.Metadata.AgeRating);
+ }
+
+ [Fact]
+ public async Task AgeRating_ExistingLower_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Age Rating";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithAgeRating(AgeRating.Everyone)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.AgeRatingMappings = new Dictionary()
+ {
+ {"Ecchi", AgeRating.Teen}, // Genre
+ {"H", AgeRating.R18Plus}, // Tag
+ };
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(AgeRating.Teen, postSeries.Metadata.AgeRating);
+ }
+
+ [Fact]
+ public async Task AgeRating_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Age Rating";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithAgeRating(AgeRating.Everyone, true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.AgeRatingMappings = new Dictionary()
+ {
+ {"Ecchi", AgeRating.Teen}, // Genre
+ {"H", AgeRating.R18Plus}, // Tag
+ };
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(AgeRating.Everyone, postSeries.Metadata.AgeRating);
+ }
+
+ [Fact]
+ public async Task AgeRating_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Age Rating";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithAgeRating(AgeRating.Everyone, true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.Overrides = [MetadataSettingField.AgeRating];
+ metadataSettings.AgeRatingMappings = new Dictionary()
+ {
+ {"Ecchi", AgeRating.Teen}, // Genre
+ {"H", AgeRating.R18Plus}, // Tag
+ };
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(AgeRating.Teen, postSeries.Metadata.AgeRating);
+ }
+
+ #endregion
+
+ #region Genres
+
+ [Fact]
+ public async Task Genres_NoExisting_Off_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal([], postSeries.Metadata.Genres);
+ }
+
+ [Fact]
+ public async Task Genres_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Ecchi"], postSeries.Metadata.Genres.Select(g => g.Title));
+ }
+
+ [Fact]
+ public async Task Genres_Existing_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenre(_genreLookup["Action"])
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Ecchi"], postSeries.Metadata.Genres.Select(g => g.Title));
+ }
+
+ [Fact]
+ public async Task Genres_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenre(_genreLookup["Action"], true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Action"], postSeries.Metadata.Genres.Select(g => g.Title));
+ }
+
+ [Fact]
+ public async Task Genres_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenre(_genreLookup["Action"], true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.Overrides = [MetadataSettingField.Genres];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Ecchi"], postSeries.Metadata.Genres.Select(g => g.Title));
+ }
+
+ #endregion
+
+ #region Tags
+
+ [Fact]
+ public async Task Tags_NoExisting_Off_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal([], postSeries.Metadata.Tags);
+ }
+
+ [Fact]
+ public async Task Tags_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Boxing"], postSeries.Metadata.Tags.Select(t => t.Title));
+ }
+
+ [Fact]
+ public async Task Tags_Existing_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithTag(_tagLookup["H"], true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["H"], postSeries.Metadata.Tags.Select(t => t.Title));
+ }
+
+ [Fact]
+ public async Task Tags_Existing_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithTag(_tagLookup["H"], true)
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.Overrides = [MetadataSettingField.Tags];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Boxing"], postSeries.Metadata.Tags.Select(t => t.Title));
+ }
+
+ #endregion
+
+ #region People - Writers/Artists
+
+ [Fact]
+ public async Task People_Writer_NoExisting_Off_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal([], postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer));
+ }
+
+ [Fact]
+ public async Task People_Writer_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["John Doe"], postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer).Select(p => p.Person.Name));
+ }
+
+ [Fact]
+ public async Task People_Writer_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Writer)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+ [Fact]
+ public async Task People_Writer_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Writer)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [PersonRole.Writer];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"John Doe", "Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ Assert.True( postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .FirstOrDefault(p => p.Person.Name == "John Doe")!.KavitaPlusConnection);
+ }
+
+ [Fact]
+ public async Task People_Writer_Locked_Override_ReverseNamingMatch_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Writer)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = false;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [PersonRole.Writer];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("Twowheeler", "Johnny", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+ [Fact]
+ public async Task People_Writer_Locked_Override_PersonRoleNotSet_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Writer)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+
+ [Fact]
+ public async Task People_Writer_OverrideReMatchDeletesOld_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Writer/Artists";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [PersonRole.Writer];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe", "Story")]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"John Doe"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Staff = [CreateStaff("John", "Doe 2", "Story")]
+ }, 1);
+
+ postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"John Doe 2"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Writer)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+ #endregion
+
+ #region People - Characters
+
+ [Fact]
+ public async Task People_Character_NoExisting_Off_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = false;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal([], postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character));
+ }
+
+ [Fact]
+ public async Task People_Character_NoExisting_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["John Doe"], postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character).Select(p => p.Person.Name));
+ }
+
+ [Fact]
+ public async Task People_Character_Locked_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Character)
+ .Build())
+ .Build();
+ series.Metadata.CharacterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+ [Fact]
+ public async Task People_Character_Locked_Override_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Character)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [PersonRole.Character];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"John Doe", "Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ Assert.True( postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .FirstOrDefault(p => p.Person.Name == "John Doe")!.KavitaPlusConnection);
+ }
+
+ [Fact]
+ public async Task People_Character_Locked_Override_ReverseNamingNoMatch_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Character)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = false;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [PersonRole.Character];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("Twowheeler", "Johnny", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"Johnny Twowheeler", "Twowheeler Johnny"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+ [Fact]
+ public async Task People_Character_Locked_Override_PersonRoleNotSet_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(_personLookup["Johnny Twowheeler"], PersonRole.Character)
+ .Build())
+ .Build();
+ series.Metadata.WriterLocked = true;
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"Johnny Twowheeler"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+
+ [Fact]
+ public async Task People_Character_OverrideReMatchDeletesOld_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - People - Character";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnablePeople = true;
+ metadataSettings.FirstLastPeopleNaming = true;
+ metadataSettings.Overrides = [MetadataSettingField.People];
+ metadataSettings.PersonRoles = [PersonRole.Character];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe", CharacterRole.Main)]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"John Doe"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Characters = [CreateCharacter("John", "Doe 2", CharacterRole.Main)]
+ }, 1);
+
+ postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[]{"John Doe 2"}.OrderBy(s => s),
+ postSeries.Metadata.People.Where(p => p.Role == PersonRole.Character)
+ .Select(p => p.Person.Name)
+ .OrderBy(s => s));
+ }
+
+ #endregion
+
+ #region Series Cover
+ // Not sure how to test this
+ #endregion
+
+ #region Relationships
+
+ // Not enabled
+
+ // Non-Sequel
+
+ [Fact]
+ public async Task Relationships_NonSequel()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Relationships Side Story";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+
+ var series2 = new SeriesBuilder("Test - Relationships Side Story - Target")
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .WithExternalMetadata(new ExternalSeriesMetadata()
+ {
+ AniListId = 10
+ })
+ .Build();
+ _context.Series.Attach(series2);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableRelationships = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Relations = [new SeriesRelationship()
+ {
+ Relation = RelationKind.SideStory,
+ SeriesName = new ALMediaTitle()
+ {
+ PreferredTitle = series2.Name,
+ EnglishTitle = null,
+ NativeTitle = series2.Name,
+ RomajiTitle = series2.Name,
+ },
+ AniListId = 10,
+ PlusMediaFormat = PlusMediaFormat.Manga
+ }]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var sourceSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(sourceSeries);
+ Assert.Single(sourceSeries.Relations);
+ Assert.Equal(series2.Name, sourceSeries.Relations.First().TargetSeries.Name);
+ }
+
+ [Fact]
+ public async Task Relationships_NonSequel_LocalizedName()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Relationships Side Story";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+
+ var series2 = new SeriesBuilder("Test - Relationships Side Story - Target")
+ .WithLibraryId(1)
+ .WithLocalizedName("School bus")
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series2);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableRelationships = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Relations = [new SeriesRelationship()
+ {
+ Relation = RelationKind.SideStory,
+ SeriesName = new ALMediaTitle()
+ {
+ PreferredTitle = "School bus",
+ EnglishTitle = null,
+ NativeTitle = series2.Name,
+ RomajiTitle = series2.Name,
+ },
+ AniListId = 10,
+ PlusMediaFormat = PlusMediaFormat.Manga
+ }]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var sourceSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(sourceSeries);
+ Assert.Single(sourceSeries.Relations);
+ Assert.Equal(series2.Name, sourceSeries.Relations.First().TargetSeries.Name);
+ }
+
+ // Non-Sequel with no match due to Format difference
+ [Fact]
+ public async Task Relationships_NonSequel_FormatDifference()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Relationships Side Story";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+
+ var series2 = new SeriesBuilder("Test - Relationships Side Story - Target")
+ .WithLibraryId(1)
+ .WithLocalizedName("School bus")
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series2);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableRelationships = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Relations = [new SeriesRelationship()
+ {
+ Relation = RelationKind.SideStory,
+ SeriesName = new ALMediaTitle()
+ {
+ PreferredTitle = "School bus",
+ EnglishTitle = null,
+ NativeTitle = series2.Name,
+ RomajiTitle = series2.Name,
+ },
+ AniListId = 10,
+ PlusMediaFormat = PlusMediaFormat.Book
+ }]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var sourceSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(sourceSeries);
+ Assert.Empty(sourceSeries.Relations);
+ }
+
+ // Non-Sequel existing relationship with new link, both exist
+ [Fact]
+ public async Task Relationships_NonSequel_ExistingLink_DifferentType_BothExist()
+ {
+ await ResetDb();
+
+ var existingRelationshipSeries = new SeriesBuilder("Existing")
+ .WithLibraryId(1)
+ .Build();
+ _context.Series.Attach(existingRelationshipSeries);
+ await _context.SaveChangesAsync();
+
+ const string seriesName = "Test - Relationships Side Story";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithRelationship(existingRelationshipSeries.Id, RelationKind.Annual)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+
+ var series2 = new SeriesBuilder("Test - Relationships Side Story - Target")
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .WithExternalMetadata(new ExternalSeriesMetadata()
+ {
+ AniListId = 10
+ })
+ .Build();
+ _context.Series.Attach(series2);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableRelationships = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Relations = [new SeriesRelationship()
+ {
+ Relation = RelationKind.SideStory,
+ SeriesName = new ALMediaTitle()
+ {
+ PreferredTitle = series2.Name,
+ EnglishTitle = null,
+ NativeTitle = series2.Name,
+ RomajiTitle = series2.Name,
+ },
+ PlusMediaFormat = PlusMediaFormat.Manga
+ }]
+ }, 2);
+
+ // Repull Series and validate what is overwritten
+ var sourceSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(2, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(sourceSeries);
+ Assert.Equal(seriesName, sourceSeries.Name);
+
+ Assert.Contains(sourceSeries.Relations, r => r.RelationKind == RelationKind.Annual && r.TargetSeriesId == existingRelationshipSeries.Id);
+ Assert.Contains(sourceSeries.Relations, r => r.RelationKind == RelationKind.SideStory && r.TargetSeriesId == series2.Id);
+ }
+
+
+
+ // Sequel/Prequel
+ [Fact]
+ public async Task Relationships_Sequel_CreatesPrequel()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Relationships Source";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+
+ var series2 = new SeriesBuilder("Test - Relationships Target")
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series2);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableRelationships = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Relations = [new SeriesRelationship()
+ {
+ Relation = RelationKind.Sequel,
+ SeriesName = new ALMediaTitle()
+ {
+ PreferredTitle = series2.Name,
+ EnglishTitle = null,
+ NativeTitle = series2.Name,
+ RomajiTitle = series2.Name,
+ },
+ PlusMediaFormat = PlusMediaFormat.Manga
+ }]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var sourceSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(sourceSeries);
+ Assert.Single(sourceSeries.Relations);
+ Assert.Equal(series2.Name, sourceSeries.Relations.First().TargetSeries.Name);
+
+ var sequel = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(2, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(sequel);
+ Assert.Equal(seriesName, sequel.Relations.First().TargetSeries.Name);
+ }
+
+ [Fact]
+ public async Task Relationships_Prequel_CreatesSequel()
+ {
+ await ResetDb();
+
+ // ID 1: Blue Lock - Episode Nagi
+ var series = new SeriesBuilder("Blue Lock - Episode Nagi")
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+
+ // ID 2: Blue Lock
+ var series2 = new SeriesBuilder("Blue Lock")
+ .WithLibraryId(1)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series2);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableRelationships = true;
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+ // Apply to Blue Lock - Episode Nagi (ID 1), setting Blue Lock (ID 2) as its prequel
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = "Blue Lock - Episode Nagi", // The series we're updating metadata for
+ Relations = [new SeriesRelationship()
+ {
+ Relation = RelationKind.Prequel, // Blue Lock is the prequel to Nagi
+ SeriesName = new ALMediaTitle()
+ {
+ PreferredTitle = "Blue Lock",
+ EnglishTitle = "Blue Lock",
+ NativeTitle = "ブルーロック",
+ RomajiTitle = "Blue Lock",
+ },
+ PlusMediaFormat = PlusMediaFormat.Manga,
+ AniListId = 106130,
+ MalId = 114745,
+ Provider = ScrobbleProvider.AniList
+ }]
+ }, 1); // Apply to series ID 1 (Nagi)
+
+ // Verify Blue Lock - Episode Nagi has Blue Lock as prequel
+ var nagiSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(nagiSeries);
+ Assert.Single(nagiSeries.Relations);
+ Assert.Equal("Blue Lock", nagiSeries.Relations.First().TargetSeries.Name);
+ Assert.Equal(RelationKind.Prequel, nagiSeries.Relations.First().RelationKind);
+
+ // Verify Blue Lock has Blue Lock - Episode Nagi as sequel
+ var blueLockSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(2, SeriesIncludes.Metadata | SeriesIncludes.Related);
+ Assert.NotNull(blueLockSeries);
+ Assert.Single(blueLockSeries.Relations);
+ Assert.Equal("Blue Lock - Episode Nagi", blueLockSeries.Relations.First().TargetSeries.Name);
+ Assert.Equal(RelationKind.Sequel, blueLockSeries.Relations.First().RelationKind);
+ }
+
+
+ #endregion
+
+ #region Blacklist
+
+ [Fact]
+ public async Task Blacklist_Genres()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Blacklist Genres";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.Blacklist = ["Sports", "Action"];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Boxing", "Sports", "Action"],
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[] {"Boxing"}.OrderBy(s => s), postSeries.Metadata.Genres.Select(t => t.Title).OrderBy(s => s));
+ }
+
+
+ [Fact]
+ public async Task Blacklist_Tags()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Blacklist Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.Blacklist = ["Sports", "Action"];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}, new MetadataTagDto() {Name = "Sports"}, new MetadataTagDto() {Name = "Action"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[] {"Boxing"}.OrderBy(s => s), postSeries.Metadata.Tags.Select(t => t.Title).OrderBy(s => s));
+ }
+
+ // Blacklist Tag
+
+ // Field Map then Blacklist Genre
+
+ // Field Map then Blacklist Tag
+
+ #endregion
+
+ #region Whitelist
+
+ [Fact]
+ public async Task Whitelist_Tags()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Whitelist Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.Whitelist = ["Sports", "Action"];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}, new MetadataTagDto() {Name = "Sports"}, new MetadataTagDto() {Name = "Action"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[] {"Sports", "Action"}.OrderBy(s => s), postSeries.Metadata.Tags.Select(t => t.Title).OrderBy(s => s));
+ }
+
+ [Fact]
+ public async Task Whitelist_WithFieldMap_Tags()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Whitelist Tags";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Tag,
+ SourceValue = "Boxing",
+ DestinationType = MetadataFieldType.Tag,
+ DestinationValue = "Sports",
+ ExcludeFromSource = false
+
+ }];
+ metadataSettings.Whitelist = ["Sports", "Action"];
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Boxing"}, new MetadataTagDto() {Name = "Action"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(new[] {"Sports", "Action"}.OrderBy(s => s), postSeries.Metadata.Tags.Select(t => t.Title).OrderBy(s => s));
+ }
+
+ #endregion
+
+ #region Field Mapping
+
+ [Fact]
+ public async Task FieldMap_GenreToGenre_KeepSource_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres Field Mapping";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.Overrides = [MetadataSettingField.Genres];
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Genre,
+ SourceValue = "Ecchi",
+ DestinationType = MetadataFieldType.Genre,
+ DestinationValue = "Fanservice",
+ ExcludeFromSource = false
+
+ }];
+
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(
+ new[] { "Ecchi", "Fanservice" }.OrderBy(s => s),
+ postSeries.Metadata.Genres.Select(g => g.Title).OrderBy(s => s)
+ );
+ }
+
+ [Fact]
+ public async Task FieldMap_GenreToGenre_RemoveSource_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres Field Mapping";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.Overrides = [MetadataSettingField.Genres];
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Genre,
+ SourceValue = "Ecchi",
+ DestinationType = MetadataFieldType.Genre,
+ DestinationValue = "Fanservice",
+ ExcludeFromSource = true
+
+ }];
+
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Fanservice"], postSeries.Metadata.Genres.Select(g => g.Title));
+ }
+
+ [Fact]
+ public async Task FieldMap_TagToTag_KeepSource_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Tag Field Mapping";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Tag,
+ SourceValue = "Ecchi",
+ DestinationType = MetadataFieldType.Tag,
+ DestinationValue = "Fanservice",
+ ExcludeFromSource = false
+
+ }];
+
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Ecchi"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(
+ new[] { "Ecchi", "Fanservice" }.OrderBy(s => s),
+ postSeries.Metadata.Tags.Select(g => g.Title).OrderBy(s => s)
+ );
+ }
+
+ [Fact]
+ public async Task Tags_Existing_FieldMap_RemoveSource_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Tag Field Mapping";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.Overrides = [MetadataSettingField.Genres];
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Tag,
+ SourceValue = "Ecchi",
+ DestinationType = MetadataFieldType.Tag,
+ DestinationValue = "Fanservice",
+ ExcludeFromSource = true
+
+ }];
+
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Tags = [new MetadataTagDto() {Name = "Ecchi"}]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(["Fanservice"], postSeries.Metadata.Tags.Select(g => g.Title));
+ }
+
+ [Fact]
+ public async Task FieldMap_GenreToTag_KeepSource_Modification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres Field Mapping";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.Overrides = [MetadataSettingField.Genres, MetadataSettingField.Tags];
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Genre,
+ SourceValue = "Ecchi",
+ DestinationType = MetadataFieldType.Tag,
+ DestinationValue = "Fanservice",
+ ExcludeFromSource = false
+
+ }];
+
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ Genres = ["Ecchi"]
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(
+ new[] {"Ecchi"}.OrderBy(s => s),
+ postSeries.Metadata.Genres.Select(g => g.Title).OrderBy(s => s)
+ );
+ Assert.Equal(
+ new[] {"Fanservice"}.OrderBy(s => s),
+ postSeries.Metadata.Tags.Select(g => g.Title).OrderBy(s => s)
+ );
+ }
+
+
+
+ [Fact]
+ public async Task FieldMap_GenreToGenre_RemoveSource_NoExternalGenre_NoModification()
+ {
+ await ResetDb();
+
+ const string seriesName = "Test - Genres Field Mapping";
+ var series = new SeriesBuilder(seriesName)
+ .WithLibraryId(1)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenre(_genreLookup["Action"])
+ .Build())
+ .Build();
+ _context.Series.Attach(series);
+ await _context.SaveChangesAsync();
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = true;
+ metadataSettings.EnableGenres = true;
+ metadataSettings.EnableTags = true;
+ metadataSettings.Overrides = [MetadataSettingField.Genres, MetadataSettingField.Tags];
+ metadataSettings.FieldMappings = [new MetadataFieldMapping()
+ {
+ SourceType = MetadataFieldType.Genre,
+ SourceValue = "Action",
+ DestinationType = MetadataFieldType.Genre,
+ DestinationValue = "Adventure",
+ ExcludeFromSource = true
+
+ }];
+
+ _context.MetadataSettings.Update(metadataSettings);
+ await _context.SaveChangesAsync();
+
+
+ await _externalMetadataService.WriteExternalMetadataToSeries(new ExternalSeriesDetailDto()
+ {
+ Name = seriesName,
+ }, 1);
+
+ // Repull Series and validate what is overwritten
+ var postSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Metadata);
+ Assert.NotNull(postSeries);
+ Assert.Equal(
+ new[] {"Action"}.OrderBy(s => s),
+ postSeries.Metadata.Genres.Select(g => g.Title).OrderBy(s => s)
+ );
+ }
+
+ #endregion
+
+
+
+ protected override async Task ResetDb()
+ {
+ _context.Series.RemoveRange(_context.Series);
+ _context.AppUser.RemoveRange(_context.AppUser);
+ _context.Genre.RemoveRange(_context.Genre);
+ _context.Tag.RemoveRange(_context.Tag);
+ _context.Person.RemoveRange(_context.Person);
+
+ var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettings();
+ metadataSettings.Enabled = false;
+ metadataSettings.EnableSummary = false;
+ metadataSettings.EnableCoverImage = false;
+ metadataSettings.EnableLocalizedName = false;
+ metadataSettings.EnableGenres = false;
+ metadataSettings.EnablePeople = false;
+ metadataSettings.EnableRelationships = false;
+ metadataSettings.EnableTags = false;
+ metadataSettings.EnablePublicationStatus = false;
+ metadataSettings.EnableStartDate = false;
+ _context.MetadataSettings.Update(metadataSettings);
+
+ await _context.SaveChangesAsync();
+
+ _context.AppUser.Add(new AppUserBuilder("Joe", "Joe")
+ .WithRole(PolicyConstants.AdminRole)
+ .WithLibrary(await _context.Library.FirstAsync(l => l.Id == 1))
+ .Build());
+
+ // Create a bunch of Genres for this test and store their string in _genreLookup
+ _genreLookup.Clear();
+ var g1 = new GenreBuilder("Action").Build();
+ var g2 = new GenreBuilder("Ecchi").Build();
+ _context.Genre.Add(g1);
+ _context.Genre.Add(g2);
+ _genreLookup.Add("Action", g1);
+ _genreLookup.Add("Ecchi", g2);
+
+ _tagLookup.Clear();
+ var t1 = new TagBuilder("H").Build();
+ var t2 = new TagBuilder("Boxing").Build();
+ _context.Tag.Add(t1);
+ _context.Tag.Add(t2);
+ _tagLookup.Add("H", t1);
+ _tagLookup.Add("Boxing", t2);
+
+ _personLookup.Clear();
+ var p1 = new PersonBuilder("Johnny Twowheeler").Build();
+ var p2 = new PersonBuilder("Boxing").Build();
+ _context.Person.Add(p1);
+ _context.Person.Add(p2);
+ _personLookup.Add("Johnny Twowheeler", p1);
+ _personLookup.Add("Batman Robin", p2);
+
+ await _context.SaveChangesAsync();
+ }
+
+ private static SeriesStaffDto CreateStaff(string first, string last, string role)
+ {
+ return new SeriesStaffDto() {Name = $"{first} {last}", Role = role, Url = "", FirstName = first, LastName = last};
+ }
+
+ private static SeriesCharacter CreateCharacter(string first, string last, CharacterRole role)
+ {
+ return new SeriesCharacter() {Name = $"{first} {last}", Description = "", Url = "", ImageUrl = "", Role = role};
+ }
+}
diff --git a/API.Tests/Services/ImageServiceTests.cs b/API.Tests/Services/ImageServiceTests.cs
new file mode 100644
index 000000000..a1073a55b
--- /dev/null
+++ b/API.Tests/Services/ImageServiceTests.cs
@@ -0,0 +1,221 @@
+using System.IO;
+using System.Linq;
+using System.Text;
+using API.Entities.Enums;
+using API.Services;
+using NetVips;
+using Xunit;
+using Image = NetVips.Image;
+
+namespace API.Tests.Services;
+
+public class ImageServiceTests
+{
+ private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/Covers");
+ private readonly string _testDirectoryColorScapes = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/ColorScapes");
+ private const string OutputPattern = "_output";
+ private const string BaselinePattern = "_baseline";
+
+ ///
+ /// Run this once to get the baseline generation
+ ///
+ [Fact]
+ public void GenerateBaseline()
+ {
+ GenerateFiles(BaselinePattern);
+ Assert.True(true);
+ }
+
+ ///
+ /// Change the Scaling/Crop code then run this continuously
+ ///
+ [Fact]
+ public void TestScaling()
+ {
+ GenerateFiles(OutputPattern);
+ GenerateHtmlFile();
+ Assert.True(true);
+ }
+
+ private void GenerateFiles(string outputExtension)
+ {
+ // Step 1: Delete any images that have _output in the name
+ var outputFiles = Directory.GetFiles(_testDirectory, "*_output.*");
+ foreach (var file in outputFiles)
+ {
+ File.Delete(file);
+ }
+
+ // Step 2: Scan the _testDirectory for images
+ var imageFiles = Directory.GetFiles(_testDirectory, "*.*")
+ .Where(file => !file.EndsWith("html"))
+ .Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
+ .ToList();
+
+ // Step 3: Process each image
+ foreach (var imagePath in imageFiles)
+ {
+ var fileName = Path.GetFileNameWithoutExtension(imagePath);
+ var dims = CoverImageSize.Default.GetDimensions();
+ using var sourceImage = Image.NewFromFile(imagePath, false, Enums.Access.SequentialUnbuffered);
+
+ var size = ImageService.GetSizeForDimensions(sourceImage, dims.Width, dims.Height);
+ var crop = ImageService.GetCropForDimensions(sourceImage, dims.Width, dims.Height);
+
+ using var thumbnail = Image.Thumbnail(imagePath, dims.Width, dims.Height,
+ size: size,
+ crop: crop);
+
+ var outputFileName = fileName + outputExtension + ".png";
+ thumbnail.WriteToFile(Path.Join(_testDirectory, outputFileName));
+ }
+ }
+
+ private void GenerateHtmlFile()
+ {
+ var imageFiles = Directory.GetFiles(_testDirectory, "*.*")
+ .Where(file => !file.EndsWith("html"))
+ .Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
+ .ToList();
+
+ var htmlBuilder = new StringBuilder();
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("Image Comparison");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+
+ foreach (var imagePath in imageFiles)
+ {
+ var fileName = Path.GetFileNameWithoutExtension(imagePath);
+ var baselinePath = Path.Combine(_testDirectory, fileName + "_baseline.png");
+ var outputPath = Path.Combine(_testDirectory, fileName + "_output.png");
+ var dims = CoverImageSize.Default.GetDimensions();
+
+ using var sourceImage = Image.NewFromFile(imagePath, false, Enums.Access.SequentialUnbuffered);
+ htmlBuilder.AppendLine("
");
+ htmlBuilder.AppendLine($"
{fileName} ({((double) sourceImage.Width / sourceImage.Height).ToString("F2")}) - {ImageService.WillScaleWell(sourceImage, dims.Width, dims.Height)}
");
+ htmlBuilder.AppendLine($"
}\")
");
+ if (File.Exists(baselinePath))
+ {
+ htmlBuilder.AppendLine($"
}\")
");
+ }
+ if (File.Exists(outputPath))
+ {
+ htmlBuilder.AppendLine($"
}\")
");
+ }
+ htmlBuilder.AppendLine("
");
+ }
+
+ htmlBuilder.AppendLine("
");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+
+ File.WriteAllText(Path.Combine(_testDirectory, "index.html"), htmlBuilder.ToString());
+ }
+
+
+ [Fact]
+ public void TestColorScapes()
+ {
+ // Step 1: Delete any images that have _output in the name
+ var outputFiles = Directory.GetFiles(_testDirectoryColorScapes, "*_output.*");
+ foreach (var file in outputFiles)
+ {
+ File.Delete(file);
+ }
+
+ // Step 2: Scan the _testDirectory for images
+ var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
+ .Where(file => !file.EndsWith("html"))
+ .Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
+ .ToList();
+
+ // Step 3: Process each image
+ foreach (var imagePath in imageFiles)
+ {
+ var fileName = Path.GetFileNameWithoutExtension(imagePath);
+ var colors = ImageService.CalculateColorScape(imagePath);
+
+ // Generate primary color image
+ GenerateColorImage(colors.Primary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png"));
+
+ // Generate secondary color image
+ GenerateColorImage(colors.Secondary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png"));
+ }
+
+ // Step 4: Generate HTML file
+ GenerateHtmlFileForColorScape();
+ Assert.True(true);
+ }
+
+ private static void GenerateColorImage(string hexColor, string outputPath)
+ {
+ var color = ImageService.HexToRgb(hexColor);
+ using var colorImage = Image.Black(200, 100);
+ using var output = colorImage + new[] { color.R / 255.0, color.G / 255.0, color.B / 255.0 };
+ output.WriteToFile(outputPath);
+ }
+
+ private void GenerateHtmlFileForColorScape()
+ {
+ var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
+ .Where(file => !file.EndsWith("html"))
+ .Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
+ .ToList();
+
+ var htmlBuilder = new StringBuilder();
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("Color Scape Comparison");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+
+ foreach (var imagePath in imageFiles)
+ {
+ var fileName = Path.GetFileNameWithoutExtension(imagePath);
+ var primaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png");
+ var secondaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png");
+
+ htmlBuilder.AppendLine("
");
+ htmlBuilder.AppendLine($"
{fileName}
");
+ htmlBuilder.AppendLine($"
}\")
");
+ if (File.Exists(primaryPath))
+ {
+ htmlBuilder.AppendLine($"
}\")
");
+ }
+ if (File.Exists(secondaryPath))
+ {
+ htmlBuilder.AppendLine($"
}\")
");
+ }
+ htmlBuilder.AppendLine("
");
+ }
+
+ htmlBuilder.AppendLine("
");
+ htmlBuilder.AppendLine("");
+ htmlBuilder.AppendLine("");
+
+ File.WriteAllText(Path.Combine(_testDirectoryColorScapes, "colorscape_index.html"), htmlBuilder.ToString());
+ }
+}
diff --git a/API.Tests/Services/ParseScannedFilesTests.cs b/API.Tests/Services/ParseScannedFilesTests.cs
index a0f5aa90b..f81ebd3c4 100644
--- a/API.Tests/Services/ParseScannedFilesTests.cs
+++ b/API.Tests/Services/ParseScannedFilesTests.cs
@@ -1,37 +1,41 @@
using System;
using System.Collections.Generic;
-using System.Data.Common;
+using System.IO;
+using System.IO.Abstractions;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
-using API.Data;
using API.Data.Metadata;
using API.Data.Repositories;
-using API.Entities;
using API.Entities.Enums;
-using API.Extensions;
-using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner;
using API.Services.Tasks.Scanner.Parser;
using API.SignalR;
-using AutoMapper;
-using Microsoft.Data.Sqlite;
-using Microsoft.EntityFrameworkCore;
-using Microsoft.EntityFrameworkCore.Infrastructure;
+using API.Tests.Helpers;
+using Hangfire;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
+using Xunit.Abstractions;
namespace API.Tests.Services;
-internal class MockReadingItemService : IReadingItemService
+public class MockReadingItemService : IReadingItemService
{
- private readonly IDefaultParser _defaultParser;
+ private readonly BasicParser _basicParser;
+ private readonly ComicVineParser _comicVineParser;
+ private readonly ImageParser _imageParser;
+ private readonly BookParser _bookParser;
+ private readonly PdfParser _pdfParser;
- public MockReadingItemService(IDefaultParser defaultParser)
+ public MockReadingItemService(IDirectoryService directoryService, IBookService bookService)
{
- _defaultParser = defaultParser;
+ _imageParser = new ImageParser(directoryService);
+ _basicParser = new BasicParser(directoryService, _imageParser);
+ _bookParser = new BookParser(directoryService, bookService, _basicParser);
+ _comicVineParser = new ComicVineParser(directoryService);
+ _pdfParser = new PdfParser(directoryService);
}
public ComicInfo GetComicInfo(string filePath)
@@ -54,99 +58,57 @@ internal class MockReadingItemService : IReadingItemService
throw new NotImplementedException();
}
- public ParserInfo Parse(string path, string rootPath, LibraryType type)
+ public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type)
{
- return _defaultParser.Parse(path, rootPath, type);
+ if (_comicVineParser.IsApplicable(path, type))
+ {
+ return _comicVineParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
+ }
+ if (_imageParser.IsApplicable(path, type))
+ {
+ return _imageParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
+ }
+ if (_bookParser.IsApplicable(path, type))
+ {
+ return _bookParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
+ }
+ if (_pdfParser.IsApplicable(path, type))
+ {
+ return _pdfParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
+ }
+ if (_basicParser.IsApplicable(path, type))
+ {
+ return _basicParser.Parse(path, rootPath, libraryRoot, type, GetComicInfo(path));
+ }
+
+ return null;
}
- public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
+ public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
{
- return _defaultParser.Parse(path, rootPath, type);
+ return Parse(path, rootPath, libraryRoot, type);
}
}
-public class ParseScannedFilesTests
+public class ParseScannedFilesTests : AbstractDbTest
{
private readonly ILogger _logger = Substitute.For>();
- private readonly IUnitOfWork _unitOfWork;
+ private readonly ScannerHelper _scannerHelper;
- private readonly DbConnection _connection;
- private readonly DataContext _context;
-
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string DataDirectory = "C:/data/";
-
- public ParseScannedFilesTests()
+ public ParseScannedFilesTests(ITestOutputHelper testOutputHelper)
{
- var contextOptions = new DbContextOptionsBuilder()
- .UseSqlite(CreateInMemoryDatabase())
- .Options;
- _connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
-
- _context = new DataContext(contextOptions);
- Task.Run(SeedDb).GetAwaiter().GetResult();
-
- _unitOfWork = new UnitOfWork(_context, Substitute.For(), null);
-
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
+ GlobalConfiguration.Configuration.UseInMemoryStorage();
+ _scannerHelper = new ScannerHelper(_unitOfWork, testOutputHelper);
}
- #region Setup
-
- private static DbConnection CreateInMemoryDatabase()
- {
- var connection = new SqliteConnection("Filename=:memory:");
-
- connection.Open();
-
- return connection;
- }
-
- private async Task SeedDb()
- {
- await _context.Database.MigrateAsync();
- var filesystem = CreateFileSystem();
-
- await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem));
-
- var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
- setting.Value = CacheDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
- setting.Value = BackupDirectory;
-
- _context.ServerSetting.Update(setting);
-
- _context.Library.Add(new LibraryBuilder("Manga")
- .WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
- .Build());
- return await _context.SaveChangesAsync() > 0;
- }
-
- private async Task ResetDB()
+ protected override async Task ResetDb()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(DataDirectory);
-
- return fileSystem;
- }
-
- #endregion
-
#region MergeName
// NOTE: I don't think I can test MergeName as it relies on Tracking Files, which is more complicated than I need
@@ -219,48 +181,45 @@ public class ParseScannedFilesTests
#region ScanLibrariesForSeries
+ ///
+ /// Test that when a folder has 2 series with a localizedSeries, they combine into one final series
+ ///
+ // [Fact]
+ // public async Task ScanLibrariesForSeries_ShouldCombineSeries()
+ // {
+ // // TODO: Implement these unit tests
+ // }
+
[Fact]
public async Task ScanLibrariesForSeries_ShouldFindFiles()
{
var fileSystem = new MockFileSystem();
- fileSystem.AddDirectory("C:/Data/");
- fileSystem.AddFile("C:/Data/Accel World v1.cbz", new MockFileData(string.Empty));
- fileSystem.AddFile("C:/Data/Accel World v2.cbz", new MockFileData(string.Empty));
- fileSystem.AddFile("C:/Data/Accel World v2.pdf", new MockFileData(string.Empty));
- fileSystem.AddFile("C:/Data/Nothing.pdf", new MockFileData(string.Empty));
+ fileSystem.AddDirectory(Root + "Data/");
+ fileSystem.AddFile(Root + "Data/Accel World v1.cbz", new MockFileData(string.Empty));
+ fileSystem.AddFile(Root + "Data/Accel World v2.cbz", new MockFileData(string.Empty));
+ fileSystem.AddFile(Root + "Data/Accel World v2.pdf", new MockFileData(string.Empty));
+ fileSystem.AddFile(Root + "Data/Nothing.pdf", new MockFileData(string.Empty));
var ds = new DirectoryService(Substitute.For>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For>(), ds,
- new MockReadingItemService(new DefaultParser(ds)), Substitute.For());
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
- var parsedSeries = new Dictionary>();
-
- Task TrackFiles(Tuple> parsedInfo)
- {
- var skippedScan = parsedInfo.Item1;
- var parsedFiles = parsedInfo.Item2;
- if (parsedFiles.Count == 0) return Task.CompletedTask;
-
- var foundParsedSeries = new ParsedSeries()
- {
- Name = parsedFiles.First().Series,
- NormalizedName = parsedFiles.First().Series.ToNormalized(),
- Format = parsedFiles.First().Format
- };
-
- parsedSeries.Add(foundParsedSeries, parsedFiles);
- return Task.CompletedTask;
- }
var library =
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
+ Assert.NotNull(library);
+
library.Type = LibraryType.Manga;
- await psf.ScanLibrariesForSeries(library, new List() {"C:/Data/"}, false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), TrackFiles);
+ var parsedSeries = await psf.ScanLibrariesForSeries(library, new List() {Root + "Data/"}, false,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(1));
- Assert.Equal(3, parsedSeries.Values.Count);
- Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
+ // Assert.Equal(3, parsedSeries.Values.Count);
+ // Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
+
+ Assert.Equal(3, parsedSeries.Count);
+ Assert.NotEmpty(parsedSeries.Select(p => p.ParsedSeries).Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
}
#endregion
@@ -289,18 +248,16 @@ public class ParseScannedFilesTests
var fileSystem = CreateTestFilesystem();
var ds = new DirectoryService(Substitute.For>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For>(), ds,
- new MockReadingItemService(new DefaultParser(ds)), Substitute.For());
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
var directoriesSeen = new HashSet();
- var library =
- await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
- await psf.ProcessFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),
- (files, directoryPath) =>
+ var scanResults = await psf.ScanFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
+ foreach (var scanResult in scanResults)
{
- directoriesSeen.Add(directoryPath);
- return Task.CompletedTask;
- }, library);
+ directoriesSeen.Add(scanResult.Folder);
+ }
Assert.Equal(2, directoriesSeen.Count);
}
@@ -311,16 +268,20 @@ public class ParseScannedFilesTests
var fileSystem = CreateTestFilesystem();
var ds = new DirectoryService(Substitute.For>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For>(), ds,
- new MockReadingItemService(new DefaultParser(ds)), Substitute.For());
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
+ LibraryIncludes.Folders | LibraryIncludes.FileTypes);
+ Assert.NotNull(library);
var directoriesSeen = new HashSet();
- await psf.ProcessFiles("C:/Data/", false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),
- (files, directoryPath) =>
+ var scanResults = await psf.ScanFiles("C:/Data/", false,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
+
+ foreach (var scanResult in scanResults)
{
- directoriesSeen.Add(directoryPath);
- return Task.CompletedTask;
- }, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
- LibraryIncludes.Folders | LibraryIncludes.FileTypes));
+ directoriesSeen.Add(scanResult.Folder);
+ }
Assert.Single(directoriesSeen);
directoriesSeen.TryGetValue("C:/Data/", out var actual);
@@ -342,18 +303,14 @@ public class ParseScannedFilesTests
var ds = new DirectoryService(Substitute.For>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For>(), ds,
- new MockReadingItemService(new DefaultParser(ds)), Substitute.For());
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
- var callCount = 0;
- await psf.ProcessFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),(files, folderPath) =>
- {
- callCount++;
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
+ LibraryIncludes.Folders | LibraryIncludes.FileTypes);
+ Assert.NotNull(library);
+ var scanResults = await psf.ScanFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
- return Task.CompletedTask;
- }, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
- LibraryIncludes.Folders | LibraryIncludes.FileTypes));
-
- Assert.Equal(2, callCount);
+ Assert.Equal(2, scanResults.Count);
}
@@ -375,18 +332,235 @@ public class ParseScannedFilesTests
var ds = new DirectoryService(Substitute.For>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For>(), ds,
- new MockReadingItemService(new DefaultParser(ds)), Substitute.For());
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
- var callCount = 0;
- await psf.ProcessFiles("C:/Data", false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),(files, folderPath) =>
- {
- callCount++;
- return Task.CompletedTask;
- }, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
- LibraryIncludes.Folders | LibraryIncludes.FileTypes));
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
+ LibraryIncludes.Folders | LibraryIncludes.FileTypes);
+ Assert.NotNull(library);
+ var scanResults = await psf.ScanFiles("C:/Data", false,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
- Assert.Equal(1, callCount);
+ Assert.Single(scanResults);
}
+
+
+
#endregion
+
+ // TODO: Add back in (removed for Hotfix v0.8.5.x)
+ //[Fact]
+ public async Task HasSeriesFolderNotChangedSinceLastScan_AllSeriesFoldersHaveChanges()
+ {
+ const string testcase = "Subfolders always scanning all series changes - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Equal(4, postLib.Series.Count);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(2, spiceAndWolf.Volumes.Count);
+
+ var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
+ Assert.Single(frieren.Volumes);
+
+ var executionerAndHerWayOfLife = postLib.Series.First(x => x.Name == "The Executioner and Her Way of Life");
+ Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
+
+ await Task.Delay(1100); // Ensure at least one second has passed since library scan
+
+ // Add a new chapter to a volume of the series, and scan. Validate that only, and all directories of this
+ // series are marked as HasChanged
+ var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "The Executioner and Her Way of Life"),
+ "The Executioner and Her Way of Life Vol. 1");
+ File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0001.cbz"),
+ Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
+
+ // 4 series, of which 2 have volumes as directories
+ var folderMap = await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id);
+ Assert.Equal(6, folderMap.Count);
+
+ var res = await psf.ScanFiles(testDirectoryPath, true, folderMap, postLib);
+ var changes = res.Where(sc => sc.HasChanged).ToList();
+ Assert.Equal(2, changes.Count);
+ // Only volumes of The Executioner and Her Way of Life should be marked as HasChanged (Spice and Wolf also has 2 volumes dirs)
+ Assert.Equal(2, changes.Count(sc => sc.Folder.Contains("The Executioner and Her Way of Life")));
+ }
+
+ [Fact]
+ public async Task HasSeriesFolderNotChangedSinceLastScan_PublisherLayout()
+ {
+ const string testcase = "Subfolder always scanning fix publisher layout - Comic.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Equal(4, postLib.Series.Count);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(2, spiceAndWolf.Volumes.Count);
+
+ var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
+ Assert.Equal(2, frieren.Volumes.Count);
+
+ await Task.Delay(1100); // Ensure at least one second has passed since library scan
+
+ // Add a volume to a series, and scan. Ensure only this series is marked as HasChanged
+ var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "YenPress"), "The Executioner and Her Way of Life");
+ File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1.cbz"),
+ Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 2.cbz"));
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ var changes = res.Count(sc => sc.HasChanged);
+ Assert.Equal(1, changes);
+ }
+
+ // TODO: Add back in (removed for Hotfix v0.8.5.x)
+ //[Fact]
+ public async Task SubFoldersNoSubFolders_SkipAll()
+ {
+ const string testcase = "Subfolders and files at root - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ // Needs to be actual time as the write time is now, so if we set LastFolderChecked in the past
+ // it'll always a scan as it was changed since the last scan.
+ await Task.Delay(1100); // Ensure at least one second has passed since library scan
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ Assert.DoesNotContain(res, sc => sc.HasChanged);
+ }
+
+ [Fact]
+ public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
+ {
+ const string testcase = "Subfolders and files at root - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
+ _context.Series.Update(spiceAndWolf);
+ await _context.SaveChangesAsync();
+
+ // Add file at series root
+ var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
+ File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
+ Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ var changes = res.Count(sc => sc.HasChanged);
+ Assert.Equal(2, changes);
+ }
+
+ [Fact]
+ public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
+ {
+ const string testcase = "Subfolders and files at root - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
+ _context.Series.Update(spiceAndWolf);
+ await _context.SaveChangesAsync();
+
+ // Add file in subfolder
+ var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
+ File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0011.cbz"),
+ Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ var changes = res.Count(sc => sc.HasChanged);
+ Assert.Equal(2, changes);
+ }
}
diff --git a/API.Tests/Services/ProcessSeriesTests.cs b/API.Tests/Services/ProcessSeriesTests.cs
index ef5c45007..119e1bc10 100644
--- a/API.Tests/Services/ProcessSeriesTests.cs
+++ b/API.Tests/Services/ProcessSeriesTests.cs
@@ -1,23 +1,8 @@
-using System.IO;
-using API.Data;
-using API.Data.Metadata;
-using API.Entities;
-using API.Entities.Enums;
-using API.Helpers;
-using API.Helpers.Builders;
-using API.Services;
-using API.Services.Tasks.Metadata;
-using API.Services.Tasks.Scanner;
-using API.SignalR;
-using Microsoft.Extensions.Logging;
-using NSubstitute;
-using Xunit;
-
-namespace API.Tests.Services;
+namespace API.Tests.Services;
public class ProcessSeriesTests
{
-
+ // TODO: Implement
#region UpdateSeriesMetadata
diff --git a/API.Tests/Services/ReaderServiceTests.cs b/API.Tests/Services/ReaderServiceTests.cs
index 3134997ff..102ea3b81 100644
--- a/API.Tests/Services/ReaderServiceTests.cs
+++ b/API.Tests/Services/ReaderServiceTests.cs
@@ -1,24 +1,20 @@
using System.Collections.Generic;
using System.Data.Common;
-using System.Globalization;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
-using API.DTOs;
+using API.DTOs.Progress;
using API.DTOs.Reader;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using API.Services;
using API.Services.Plus;
-using API.Services.Tasks;
using API.SignalR;
-using API.Tests.Helpers;
using AutoMapper;
using Hangfire;
using Hangfire.InMemory;
@@ -31,18 +27,13 @@ using Xunit.Abstractions;
namespace API.Tests.Services;
-public class ReaderServiceTests
+public class ReaderServiceTests: AbstractFsTest
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly IUnitOfWork _unitOfWork;
private readonly DataContext _context;
private readonly ReaderService _readerService;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string DataDirectory = "C:/data/";
-
public ReaderServiceTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
@@ -100,19 +91,6 @@ public class ReaderServiceTests
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(DataDirectory);
-
- return fileSystem;
- }
-
#endregion
#region FormatBookmarkFolderPath
@@ -135,9 +113,8 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build())
.Build())
@@ -165,9 +142,8 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build())
.Build())
@@ -204,9 +180,8 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build())
.Build())
@@ -259,12 +234,11 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build())
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(2)
.Build())
.Build())
@@ -298,12 +272,11 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build())
- .WithChapter(new ChapterBuilder("0")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(2)
.Build())
.Build())
@@ -347,19 +320,16 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("21").Build())
.WithChapter(new ChapterBuilder("22").Build())
.Build())
.WithVolume(new VolumeBuilder("3")
- .WithMinNumber(3)
.WithChapter(new ChapterBuilder("31").Build())
.WithChapter(new ChapterBuilder("32").Build())
.Build())
@@ -379,6 +349,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 1, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("2", actualChapter.Range);
}
@@ -390,12 +361,10 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1-2")
- .WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
.Build())
.WithVolume(new VolumeBuilder("3-4")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("1").Build())
.Build())
.Build();
@@ -412,6 +381,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 1, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("3-4", actualChapter.Volume.Name);
Assert.Equal("1", actualChapter.Range);
}
@@ -456,6 +426,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 2, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("31", actualChapter.Range);
}
@@ -466,19 +437,16 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("21").Build())
.WithChapter(new ChapterBuilder("22").Build())
.Build())
.WithVolume(new VolumeBuilder("3")
- .WithMinNumber(3)
.WithChapter(new ChapterBuilder("31").Build())
.WithChapter(new ChapterBuilder("32").Build())
.Build())
@@ -497,6 +465,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 2, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("21", actualChapter.Range);
}
@@ -507,19 +476,16 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("1.5")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("21").Build())
.WithChapter(new ChapterBuilder("22").Build())
.Build())
.WithVolume(new VolumeBuilder("3")
- .WithMinNumber(3)
.WithChapter(new ChapterBuilder("31").Build())
.WithChapter(new ChapterBuilder("32").Build())
.Build())
@@ -539,6 +505,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 2, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("21", actualChapter.Range);
}
@@ -548,16 +515,14 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("1").Build())
- .WithChapter(new ChapterBuilder("2").Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("21").Build())
+ .WithChapter(new ChapterBuilder("22").Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
- .WithChapter(new ChapterBuilder("21").Build())
- .WithChapter(new ChapterBuilder("22").Build())
+ .WithChapter(new ChapterBuilder("1").Build())
+ .WithChapter(new ChapterBuilder("2").Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -574,7 +539,8 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 4, 1);
Assert.NotEqual(-1, nextChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
- Assert.Equal("1", actualChapter.Range);
+ Assert.NotNull(actualChapter);
+ Assert.Equal("21", actualChapter.Range);
}
[Fact]
@@ -583,20 +549,17 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("66").Build())
.WithChapter(new ChapterBuilder("67").Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
- .WithChapter(new ChapterBuilder("0").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -616,7 +579,8 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 3, 1);
Assert.NotEqual(-1, nextChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
- Assert.Equal("0", actualChapter.Range);
+ Assert.NotNull(actualChapter);
+ Assert.Equal(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, actualChapter.Range);
}
[Fact]
@@ -626,15 +590,13 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber).Build())
+ .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -658,7 +620,6 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
@@ -683,8 +644,7 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
@@ -704,68 +664,69 @@ public class ReaderServiceTests
}
// This is commented out because, while valid, I can't solve how to make this pass (https://github.com/Kareadita/Kavita/issues/2099)
- // [Fact]
- // public async Task GetNextChapterIdAsync_ShouldFindNoNextChapterFromLastChapter_NoSpecials_FirstIsVolume()
- // {
- // await ResetDb();
- //
- // var series = new SeriesBuilder("Test")
- // .WithVolume(new VolumeBuilder("0")
- // .WithMinNumber(0)
- // .WithChapter(new ChapterBuilder("1").Build())
- // .WithChapter(new ChapterBuilder("2").Build())
- // .Build())
- // .WithVolume(new VolumeBuilder("1")
- // .WithMinNumber(1)
- // .WithChapter(new ChapterBuilder("0").Build())
- // .Build())
- // .Build();
- // series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
- //
- // _context.Series.Add(series);
- // _context.AppUser.Add(new AppUser()
- // {
- // UserName = "majora2007"
- // });
- //
- // await _context.SaveChangesAsync();
- //
- // var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 3, 1);
- // Assert.Equal(-1, nextChapter);
- // }
+ [Fact]
+ public async Task GetNextChapterIdAsync_ShouldFindNoNextChapterFromLastChapter_NoSpecials_FirstIsVolume()
+ {
+ await ResetDb();
- // This is commented out because, while valid, I can't solve how to make this pass
- // [Fact]
- // public async Task GetNextChapterIdAsync_ShouldFindNoNextChapterFromLastChapter_WithSpecials()
- // {
- // await ResetDb();
- //
- // var series = new SeriesBuilder("Test")
- // .WithVolume(new VolumeBuilder("0")
- // .WithMinNumber(0)
- // .WithChapter(new ChapterBuilder("1").Build())
- // .WithChapter(new ChapterBuilder("2").Build())
- // .WithChapter(new ChapterBuilder("0").WithIsSpecial(true).Build())
- // .Build())
- //
- // .WithVolume(new VolumeBuilder("1")
- // .WithMinNumber(1)
- // .WithChapter(new ChapterBuilder("2").Build())
- // .Build())
- // .Build();
- // series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
- //
- // _context.Series.Add(series);
- // _context.AppUser.Add(new AppUser()
- // {
- // UserName = "majora2007"
- // });
- //
- // await _context.SaveChangesAsync();
- //
- // var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 4, 1);
- // Assert.Equal(-1, nextChapter);
- // }
+ var series = new SeriesBuilder("Test")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
+
+ _context.Series.Add(series);
+ _context.AppUser.Add(new AppUser()
+ {
+ UserName = "majora2007"
+ });
+
+ await _context.SaveChangesAsync();
+
+ var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 2, 1);
+ Assert.Equal(-1, nextChapter);
+ }
+
+ [Fact]
+ public async Task GetNextChapterIdAsync_ShouldFindNoNextChapterFromLastChapter_WithSpecials()
+ {
+ await ResetDb();
+
+ var series = new SeriesBuilder("Test")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("2").Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
+
+ _context.Series.Add(series);
+ _context.AppUser.Add(new AppUser()
+ {
+ UserName = "majora2007"
+ });
+
+ await _context.SaveChangesAsync();
+
+ var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 3, 1);
+ Assert.Equal(-1, nextChapter);
+ }
@@ -776,15 +737,19 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .WithChapter(new ChapterBuilder("B.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 2)
+ .Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -802,6 +767,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 2, 1);
Assert.NotEqual(-1, nextChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("A.cbz", actualChapter.Range);
}
@@ -811,11 +777,17 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .WithPages(1)
+ .Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -833,6 +805,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 2, 1);
Assert.NotEqual(-1, nextChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("A.cbz", actualChapter.Range);
}
@@ -842,15 +815,21 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
.Build())
+
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .WithPages(1)
+ .Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -864,7 +843,7 @@ public class ReaderServiceTests
await _context.SaveChangesAsync();
- var nextChapter = await _readerService.GetNextChapterIdAsync(1, 1, 3, 1);
+ var nextChapter = await _readerService.GetNextChapterIdAsync(1, 3, 4, 1);
Assert.Equal(-1, nextChapter);
}
@@ -876,14 +855,18 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .WithChapter(new ChapterBuilder("B.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 2)
+ .Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -901,6 +884,7 @@ public class ReaderServiceTests
var nextChapter = await _readerService.GetNextChapterIdAsync(1, 2, 3, 1);
Assert.NotEqual(-1, nextChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("B.cbz", actualChapter.Range);
}
@@ -911,12 +895,10 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("12").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("12").Build())
.Build())
.Build();
@@ -952,19 +934,16 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("21").Build())
.WithChapter(new ChapterBuilder("22").Build())
.Build())
.WithVolume(new VolumeBuilder("3")
- .WithMinNumber(3)
.WithChapter(new ChapterBuilder("31").Build())
.WithChapter(new ChapterBuilder("32").Build())
.Build())
@@ -984,6 +963,7 @@ public class ReaderServiceTests
var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 1, 2, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("1", actualChapter.Range);
}
@@ -995,19 +975,16 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("1.5")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("21").Build())
.WithChapter(new ChapterBuilder("22").Build())
.Build())
.WithVolume(new VolumeBuilder("3")
- .WithMinNumber(3)
.WithChapter(new ChapterBuilder("31").Build())
.WithChapter(new ChapterBuilder("32").Build())
.Build())
@@ -1025,6 +1002,7 @@ public class ReaderServiceTests
var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 3, 5, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("22", actualChapter.Range);
}
@@ -1034,11 +1012,18 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("40").WithPages(1).Build())
.WithChapter(new ChapterBuilder("50").WithPages(1).Build())
.WithChapter(new ChapterBuilder("60").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithPages(1).WithIsSpecial(true).Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .WithPages(1)
+ .Build())
.Build())
.WithVolume(new VolumeBuilder("1997")
@@ -1065,7 +1050,7 @@ public class ReaderServiceTests
// prevChapter should be id from ch.21 from volume 2001
- var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 4, 7, 1);
+ var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 5, 7, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
Assert.NotNull(actualChapter);
@@ -1109,6 +1094,7 @@ public class ReaderServiceTests
var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 2, 3, 1);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("2", actualChapter.Range);
}
@@ -1119,15 +1105,13 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).Build())
+ .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 2).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1147,6 +1131,7 @@ public class ReaderServiceTests
var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 2, 3, 1);
Assert.Equal(2, prevChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("2", actualChapter.Range);
}
@@ -1157,7 +1142,6 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
@@ -1187,8 +1171,7 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1215,15 +1198,13 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
- .WithChapter(new ChapterBuilder("0").Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1237,10 +1218,7 @@ public class ReaderServiceTests
await _context.SaveChangesAsync();
-
-
-
- var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 1, 1, 1);
+ var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 2, 3, 1);
Assert.Equal(-1, prevChapter);
}
@@ -1250,23 +1228,20 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("5").Build())
.WithChapter(new ChapterBuilder("6").Build())
.WithChapter(new ChapterBuilder("7").Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
- .WithChapter(new ChapterBuilder("1").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("2").WithIsSpecial(true).Build())
+ .WithChapter(new ChapterBuilder("1").Build())
+ .WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
- .WithChapter(new ChapterBuilder("3").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("4").WithIsSpecial(true).Build())
+ .WithChapter(new ChapterBuilder("3").Build())
+ .WithChapter(new ChapterBuilder("4").Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1298,8 +1273,7 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
@@ -1329,14 +1303,18 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
- .WithChapter(new ChapterBuilder("A.cbz").WithIsSpecial(true).Build())
- .WithChapter(new ChapterBuilder("B.cbz").WithIsSpecial(true).Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("A.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .WithChapter(new ChapterBuilder("B.cbz")
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 2)
+ .Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1357,6 +1335,7 @@ public class ReaderServiceTests
var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 2, 4, 1);
Assert.NotEqual(-1, prevChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("A.cbz", actualChapter.Range);
}
@@ -1366,13 +1345,11 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithMinNumber(0)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("21").Build())
.WithChapter(new ChapterBuilder("22").Build())
.Build())
@@ -1389,12 +1366,10 @@ public class ReaderServiceTests
await _context.SaveChangesAsync();
-
-
-
var prevChapter = await _readerService.GetPrevChapterIdAsync(1, 1, 1, 1);
Assert.NotEqual(-1, prevChapter);
var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter);
+ Assert.NotNull(actualChapter);
Assert.Equal("22", actualChapter.Range);
}
@@ -1405,12 +1380,10 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithMinNumber(1)
.WithChapter(new ChapterBuilder("12").Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithMinNumber(2)
.WithChapter(new ChapterBuilder("12").Build())
.Build())
.Build();
@@ -1438,7 +1411,7 @@ public class ReaderServiceTests
{
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("95").Build())
.WithChapter(new ChapterBuilder("96").Build())
.Build())
@@ -1485,7 +1458,7 @@ public class ReaderServiceTests
.WithChapter(new ChapterBuilder("1").WithPages(3).Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithPages(4)
.Build();
@@ -1524,7 +1497,7 @@ public class ReaderServiceTests
.WithChapter(new ChapterBuilder("1", "1-11").WithPages(3).Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithPages(4)
.Build();
@@ -1625,16 +1598,21 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
// Loose chapters
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("45").WithPages(1).Build())
.WithChapter(new ChapterBuilder("46").WithPages(1).Build())
.WithChapter(new ChapterBuilder("47").WithPages(1).Build())
.WithChapter(new ChapterBuilder("48").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title")
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .WithIsSpecial(true).WithPages(1)
+ .Build())
.Build())
// One file volume
.WithVolume(new VolumeBuilder("1")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build()) // Read
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build()) // Read
.Build())
// Chapter-based volume
.WithVolume(new VolumeBuilder("2")
@@ -1694,10 +1672,12 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
// Loose chapters
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
.WithChapter(new ChapterBuilder("2").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Prologue").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Prologue").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1728,7 +1708,7 @@ public class ReaderServiceTests
.WithVolume(new VolumeBuilder("2")
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("31").WithPages(1).Build())
.WithChapter(new ChapterBuilder("32").WithPages(1).Build())
.Build())
@@ -1782,7 +1762,7 @@ public class ReaderServiceTests
{
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("230").WithPages(1).Build())
.WithChapter(new ChapterBuilder("231").WithPages(1).Build())
.Build())
@@ -1818,17 +1798,19 @@ public class ReaderServiceTests
{
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("100").WithPages(1).Build())
.WithChapter(new ChapterBuilder("101").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Christmas Eve").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Christmas Eve").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -1871,7 +1853,7 @@ public class ReaderServiceTests
{
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("100").WithPages(1).Build())
.WithChapter(new ChapterBuilder("101").WithPages(1).Build())
.WithChapter(new ChapterBuilder("102").WithPages(1).Build())
@@ -1987,7 +1969,7 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
.WithChapter(new ChapterBuilder("2").WithPages(1).Build())
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
@@ -2027,11 +2009,13 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
.WithChapter(new ChapterBuilder("2").WithPages(1).Build())
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -2083,7 +2067,7 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("230").WithPages(1).Build())
//.WithChapter(new ChapterBuilder("231").WithPages(1).Build()) (Added later)
.Build())
@@ -2093,7 +2077,7 @@ public class ReaderServiceTests
.WithChapter(new ChapterBuilder("2").WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
//.WithChapter(new ChapterBuilder("14.9").WithPages(1).Build()) (added later)
.Build())
.Build();
@@ -2133,13 +2117,13 @@ public class ReaderServiceTests
public async Task GetContinuePoint_ShouldReturnUnreadSingleVolume_WhenThereAreSomeSingleVolumesBeforeLooseLeafChapters()
{
await ResetDb();
- var readChapter1 = new ChapterBuilder("0").WithPages(1).Build();
- var readChapter2 = new ChapterBuilder("0").WithPages(1).Build();
- var volume = new VolumeBuilder("3").WithChapter(new ChapterBuilder("0").WithPages(1).Build()).Build();
+ var readChapter1 = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build();
+ var readChapter2 = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build();
+ var volume = new VolumeBuilder("3").WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build()).Build();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("51").WithPages(1).Build())
.WithChapter(new ChapterBuilder("52").WithPages(1).Build())
.WithChapter(new ChapterBuilder("53").WithPages(1).Build())
@@ -2153,7 +2137,7 @@ public class ReaderServiceTests
.Build())
// 3, 4, and all loose leafs are unread should be unread
.WithVolume(new VolumeBuilder("3")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("4")
.WithChapter(new ChapterBuilder("40").WithPages(1).Build())
@@ -2207,11 +2191,13 @@ public class ReaderServiceTests
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
.WithChapter(new ChapterBuilder("22").WithPages(1).Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("51").WithPages(1).Build())
.WithChapter(new ChapterBuilder("52").WithPages(1).Build())
.WithChapter(new ChapterBuilder("91").WithPages(2).Build())
- .WithChapter(new ChapterBuilder("Special").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Special").WithIsSpecial(true).WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -2376,11 +2362,13 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
.WithChapter(new ChapterBuilder("2").WithPages(1).Build())
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title").WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithIsSpecial(true).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -2413,13 +2401,15 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
.WithChapter(new ChapterBuilder("2").WithPages(1).Build())
.WithChapter(new ChapterBuilder("2.5").WithPages(1).Build())
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(1).Build())
.Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title").WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithIsSpecial(true).WithPages(1).Build())
+ .Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -2453,10 +2443,10 @@ public class ReaderServiceTests
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -2486,21 +2476,23 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("45").WithPages(5).Build())
.WithChapter(new ChapterBuilder("46").WithPages(46).Build())
.WithChapter(new ChapterBuilder("47").WithPages(47).Build())
.WithChapter(new ChapterBuilder("48").WithPages(48).Build())
.WithChapter(new ChapterBuilder("49").WithPages(49).Build())
.WithChapter(new ChapterBuilder("50").WithPages(50).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(10).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title").WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithIsSpecial(true).WithPages(10).Build())
.Build())
.WithVolume(new VolumeBuilder("1")
- .WithChapter(new ChapterBuilder("0").WithPages(6).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(6).Build())
.Build())
.WithVolume(new VolumeBuilder("2")
- .WithChapter(new ChapterBuilder("0").WithPages(7).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(7).Build())
.Build())
.WithVolume(new VolumeBuilder("3")
.WithChapter(new ChapterBuilder("12").WithPages(5).Build())
@@ -2550,15 +2542,15 @@ public class ReaderServiceTests
public async Task MarkSeriesAsReadTest()
{
await ResetDb();
- // TODO: Validate this is correct, shouldn't be possible to have 2 Volume 0's in a series
+
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.WithChapter(new ChapterBuilder("1").WithPages(2).Build())
.Build())
- .WithVolume(new VolumeBuilder("0")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithVolume(new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.WithChapter(new ChapterBuilder("1").WithPages(2).Build())
.Build())
.Build();
@@ -2592,8 +2584,8 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.WithChapter(new ChapterBuilder("1").WithPages(2).Build())
.Build())
.Build();
@@ -2665,22 +2657,24 @@ public class ReaderServiceTests
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("10").WithPages(1).Build())
.WithChapter(new ChapterBuilder("20").WithPages(1).Build())
.WithChapter(new ChapterBuilder("30").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title").WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithIsSpecial(true).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("1997")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("2002")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("2003")
- .WithChapter(new ChapterBuilder("0").WithPages(1).Build())
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithPages(1).Build())
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb", LibraryType.Manga).Build();
@@ -2718,11 +2712,13 @@ public class ReaderServiceTests
{
await ResetDb();
var series = new SeriesBuilder("Test")
- .WithVolume(new VolumeBuilder("0")
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("10").WithPages(1).Build())
.WithChapter(new ChapterBuilder("20").WithPages(1).Build())
.WithChapter(new ChapterBuilder("30").WithPages(1).Build())
- .WithChapter(new ChapterBuilder("Some Special Title").WithIsSpecial(true).WithPages(1).Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("Some Special Title").WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1).WithIsSpecial(true).WithPages(1).Build())
.Build())
.WithVolume(new VolumeBuilder("1997")
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
diff --git a/API.Tests/Services/ReadingListServiceTests.cs b/API.Tests/Services/ReadingListServiceTests.cs
index 23de53674..7a6ed3e0b 100644
--- a/API.Tests/Services/ReadingListServiceTests.cs
+++ b/API.Tests/Services/ReadingListServiceTests.cs
@@ -11,15 +11,11 @@ using API.DTOs.ReadingLists;
using API.DTOs.ReadingLists.CBL;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
-using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using API.Services;
using API.Services.Plus;
-using API.Services.Tasks;
using API.SignalR;
-using API.Tests.Helpers;
using AutoMapper;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
@@ -52,7 +48,9 @@ public class ReadingListServiceTests
var mapper = config.CreateMapper();
_unitOfWork = new UnitOfWork(_context, mapper, null!);
- _readingListService = new ReadingListService(_unitOfWork, Substitute.For>(), Substitute.For());
+ var ds = new DirectoryService(Substitute.For>(), new MockFileSystem());
+ _readingListService = new ReadingListService(_unitOfWork, Substitute.For>(),
+ Substitute.For(), Substitute.For(), ds);
_readerService = new ReaderService(_unitOfWork, Substitute.For>(),
Substitute.For(), Substitute.For(),
@@ -128,7 +126,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithAgeRating(AgeRating.Everyone)
.Build()
@@ -177,7 +175,7 @@ public class ReadingListServiceTests
.WithSeries(new SeriesBuilder("Test")
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithAgeRating(AgeRating.Everyone)
.Build()
@@ -236,7 +234,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithAgeRating(AgeRating.Everyone)
.Build()
@@ -296,7 +294,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithAgeRating(AgeRating.Everyone)
.Build()
@@ -375,7 +373,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithAgeRating(AgeRating.Everyone)
.Build()
@@ -432,7 +430,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithAgeRating(AgeRating.Everyone)
.Build()
@@ -497,7 +495,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.Build()
)
@@ -538,7 +536,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.Build()
)
@@ -581,6 +579,93 @@ public class ReadingListServiceTests
Assert.Equal(AgeRating.G, readingList.AgeRating);
}
+ [Fact]
+ public async Task UpdateReadingListAgeRatingForSeries()
+ {
+ await ResetDb();
+ var spiceAndWolf = new SeriesBuilder("Spice and Wolf")
+ .WithMetadata(new SeriesMetadataBuilder().Build())
+ .WithVolumes([
+ new VolumeBuilder("1")
+ .WithChapters([
+ new ChapterBuilder("1").Build(),
+ new ChapterBuilder("2").Build(),
+ ]).Build()
+ ]).Build();
+ spiceAndWolf.Metadata.AgeRating = AgeRating.Everyone;
+
+ var othersidePicnic = new SeriesBuilder("Otherside Picnic ")
+ .WithMetadata(new SeriesMetadataBuilder().Build())
+ .WithVolumes([
+ new VolumeBuilder("1")
+ .WithChapters([
+ new ChapterBuilder("1").Build(),
+ new ChapterBuilder("2").Build(),
+ ]).Build()
+ ]).Build();
+ othersidePicnic.Metadata.AgeRating = AgeRating.Everyone;
+
+ _context.AppUser.Add(new AppUser()
+ {
+ UserName = "Amelia",
+ ReadingLists = new List(),
+ Libraries = new List
+ {
+ new LibraryBuilder("Test Library", LibraryType.LightNovel)
+ .WithSeries(spiceAndWolf)
+ .WithSeries(othersidePicnic)
+ .Build(),
+ },
+ });
+
+ await _context.SaveChangesAsync();
+ var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("Amelia", AppUserIncludes.ReadingLists);
+ Assert.NotNull(user);
+
+ var myTestReadingList = new ReadingListBuilder("MyReadingList").Build();
+ var mySecondTestReadingList = new ReadingListBuilder("MySecondReadingList").Build();
+ var myThirdTestReadingList = new ReadingListBuilder("MyThirdReadingList").Build();
+ user.ReadingLists = new List()
+ {
+ myTestReadingList,
+ mySecondTestReadingList,
+ myThirdTestReadingList,
+ };
+
+
+ await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List {1, 2}, myTestReadingList);
+ await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List {3, 4}, myTestReadingList);
+ await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List {1, 2}, myThirdTestReadingList);
+ await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List {3, 4}, mySecondTestReadingList);
+
+
+ _unitOfWork.UserRepository.Update(user);
+ await _unitOfWork.CommitAsync();
+
+ await _readingListService.CalculateReadingListAgeRating(myTestReadingList);
+ await _readingListService.CalculateReadingListAgeRating(mySecondTestReadingList);
+ Assert.Equal(AgeRating.Everyone, myTestReadingList.AgeRating);
+ Assert.Equal(AgeRating.Everyone, mySecondTestReadingList.AgeRating);
+ Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
+
+ await _readingListService.UpdateReadingListAgeRatingForSeries(othersidePicnic.Id, AgeRating.Mature);
+ await _unitOfWork.CommitAsync();
+
+ // Reading lists containing Otherside Picnic are updated
+ myTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
+ Assert.NotNull(myTestReadingList);
+ Assert.Equal(AgeRating.Mature, myTestReadingList.AgeRating);
+
+ mySecondTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(2);
+ Assert.NotNull(mySecondTestReadingList);
+ Assert.Equal(AgeRating.Mature, mySecondTestReadingList.AgeRating);
+
+ // Unrelated reading list is not updated
+ myThirdTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(3);
+ Assert.NotNull(myThirdTestReadingList);
+ Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
+ }
+
#endregion
#region CalculateStartAndEndDates
@@ -593,7 +678,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.Build()
)
@@ -645,7 +730,7 @@ public class ReadingListServiceTests
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithVolumes(new List()
{
- new VolumeBuilder("0")
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1")
.WithReleaseDate(new DateTime(2005, 03, 01))
.Build()
@@ -711,6 +796,9 @@ public class ReadingListServiceTests
Assert.Equal("Issue #1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", "1", "The Title")));
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", chapterTitleName: "The Title")));
Assert.Equal("The Title", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterTitleName: "The Title")));
+ var dto = CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterNumber: "The Special Title");
+ dto.IsSpecial = true;
+ Assert.Equal("The Special Title", ReadingListService.FormatTitle(dto));
// Book Library & Archive
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Book, "1")));
@@ -736,8 +824,8 @@ public class ReadingListServiceTests
}
private static ReadingListItemDto CreateListItemDto(MangaFormat seriesFormat, LibraryType libraryType,
- string volumeNumber = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- string chapterNumber = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
+ string volumeNumber = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
+ string chapterNumber =API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
string chapterTitleName = "")
{
return new ReadingListItemDto()
@@ -1205,6 +1293,65 @@ public class ReadingListServiceTests
Assert.Equal(2, createdList.Items.First(item => item.Order == 2).ChapterId);
Assert.Equal(4, createdList.Items.First(item => item.Order == 3).ChapterId);
}
+
+ ///
+ /// This test is about ensuring Annuals that are a separate series can be linked up properly (ComicVine)
+ ///
+ //[Fact]
+ public async Task CreateReadingListFromCBL_ShouldCreateList_WithAnnuals()
+ {
+ // TODO: Implement this correctly
+ await ResetDb();
+ var cblReadingList = LoadCblFromPath("Annual.cbl");
+
+ // Mock up our series
+ var fablesSeries = new SeriesBuilder("Fables")
+ .WithVolume(new VolumeBuilder("2002")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .WithChapter(new ChapterBuilder("2").Build())
+ .WithChapter(new ChapterBuilder("3").Build())
+ .Build())
+ .Build();
+
+ var fables2Series = new SeriesBuilder("Fables Annual")
+ .WithVolume(new VolumeBuilder("2003")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build())
+ .Build();
+
+ _context.AppUser.Add(new AppUser()
+ {
+ UserName = "majora2007",
+ ReadingLists = new List(),
+ Libraries = new List()
+ {
+ new LibraryBuilder("Test LIb 2", LibraryType.Book)
+ .WithSeries(fablesSeries)
+ .WithSeries(fables2Series)
+ .Build()
+ },
+ });
+ await _unitOfWork.CommitAsync();
+
+ var importSummary = await _readingListService.CreateReadingListFromCbl(1, cblReadingList);
+
+ Assert.Equal(CblImportResult.Success, importSummary.Success);
+ Assert.NotEmpty(importSummary.Results);
+
+ var createdList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
+
+ Assert.NotNull(createdList);
+ Assert.Equal("Annual", createdList.Title);
+
+ Assert.Equal(4, createdList.Items.Count);
+ Assert.Equal(1, createdList.Items.First(item => item.Order == 0).ChapterId);
+ Assert.Equal(2, createdList.Items.First(item => item.Order == 1).ChapterId);
+ Assert.Equal(4, createdList.Items.First(item => item.Order == 2).ChapterId);
+ Assert.Equal(3, createdList.Items.First(item => item.Order == 3).ChapterId);
+ }
+
#endregion
#region CreateReadingListsFromSeries
@@ -1239,7 +1386,7 @@ public class ReadingListServiceTests
var series2 = new SeriesBuilder("Series 2")
.WithFormat(MangaFormat.Archive)
- .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("1").Build())
.WithChapter(new ChapterBuilder("2").Build())
.Build())
diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs
index 0d0277e3e..4554820fb 100644
--- a/API.Tests/Services/ScannerServiceTests.cs
+++ b/API.Tests/Services/ScannerServiceTests.cs
@@ -1,71 +1,941 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
+using System.IO;
using System.Linq;
+using System.Threading.Tasks;
+using API.Data.Metadata;
+using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
using API.Extensions;
-using API.Helpers.Builders;
-using API.Services.Tasks;
-using API.Services.Tasks.Scanner;
using API.Services.Tasks.Scanner.Parser;
using API.Tests.Helpers;
+using Hangfire;
using Xunit;
+using Xunit.Abstractions;
namespace API.Tests.Services;
-public class ScannerServiceTests
+public class ScannerServiceTests : AbstractDbTest
{
- [Fact]
- public void FindSeriesNotOnDisk_Should_Remove1()
+ private readonly ITestOutputHelper _testOutputHelper;
+ private readonly ScannerHelper _scannerHelper;
+ private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
+
+ public ScannerServiceTests(ITestOutputHelper testOutputHelper)
{
- var infos = new Dictionary>();
+ _testOutputHelper = testOutputHelper;
- ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
- //AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
+ // Set up Hangfire to use in-memory storage for testing
+ GlobalConfiguration.Configuration.UseInMemoryStorage();
+ _scannerHelper = new ScannerHelper(_unitOfWork, testOutputHelper);
+ }
- var existingSeries = new List