diff --git a/.browserslistrc b/.browserslistrc
index 427441dc9..6784945a5 100644
--- a/.browserslistrc
+++ b/.browserslistrc
@@ -8,10 +8,4 @@
# You can see what browsers were selected by your queries by running:
# npx browserslist
-last 1 Chrome version
-last 1 Firefox version
-last 2 Edge major versions
-last 2 Safari major versions
-last 2 iOS major versions
-Firefox ESR
-not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
+defaults
\ No newline at end of file
diff --git a/.editorconfig b/.editorconfig
index cd6d2ec06..c82009e40 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -1,6 +1,7 @@
# Editor configuration, see https://editorconfig.org
root = true
+
[*]
charset = utf-8
indent_style = space
@@ -16,3 +17,13 @@ indent_size = 2
[*.md]
max_line_length = off
trim_trailing_whitespace = false
+
+[*.yml]
+indent_size = 2
+
+[*.csproj]
+indent_size = 2
+
+[*.cs]
+# Disable SonarLint warning S1075 (Don't use hardcoded url)
+dotnet_diagnostic.S1075.severity = none
diff --git a/.github/DISCUSSION_TEMPLATE/ideas.yml b/.github/DISCUSSION_TEMPLATE/ideas.yml
new file mode 100644
index 000000000..845d3e3f3
--- /dev/null
+++ b/.github/DISCUSSION_TEMPLATE/ideas.yml
@@ -0,0 +1,49 @@
+title: "[Kavita] Idea / Feature Submission"
+labels:
+ - "Idea Submission"
+body:
+ - type: markdown
+ attributes:
+ value: |
+ ## Idea Submission for Kavita 💡
+
+ Please fill out the details below, and let's make Kavita even better together!
+
+ - type: textarea
+ id: idea-description
+ attributes:
+ label: Idea Description
+ value: |
+ Go into as much detail as possible to explain why your idea should be added to Kavita. Try to present some use cases and examples of how it would help other users. The more detail you have the better.
+
+ - type: dropdown
+ id: idea-category
+ attributes:
+ label: Idea Category
+ options:
+ - API
+ - Feature Enhancement
+ - User Experience
+ - Performance Improvement
+ - Web UI
+ description: "What area would your idea help with?"
+ validations:
+ required: true
+
+ - type: input
+ id: duration-of-use
+ attributes:
+ label: Duration of Using Kavita
+ description: "How long have you been using Kavita?"
+
+ - type: checkboxes
+ attributes:
+ label: Before submitting
+ options:
+ - label: "I've already searched for existing ideas before posting."
+ required: true
+
+ - type: markdown
+ attributes:
+ value: |
+ ### Thank you for contributing to Kavita's future! 🚀
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index bfd2bab0a..000000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,42 +0,0 @@
----
-name: Bug report
-about: Create a report to help us improve
-title: ''
-labels: needs-triage
-assignees: ''
-
----
-
-**If this is a feature request, request [here](https://feats.kavitareader.com/) instead. Feature requests will be deleted from Github.**
-
-Please put as much information as possible to help me understand your issue. OS, browser, version are very important!
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Steps to reproduce the behavior:
-1. Go to '...'
-2. Click on '....'
-3. Scroll down to '....'
-4. See error
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Screenshots**
-If applicable, add screenshots to help explain your problem.
-
-**Desktop (please complete the following information):**
- - OS: [e.g. iOS, Docker]
- - Browser [e.g. chrome, safari]
- - Version [e.g. 22] (can be found on Server Settings -> System tab)
-
-**Smartphone (please complete the following information):**
- - Device: [e.g. iPhone6]
- - OS: [e.g. iOS8.1]
- - Browser [e.g. stock browser, safari]
- - Version [e.g. 22]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 02cbdf152..805c3b61d 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -1,24 +1,17 @@
name: Bug Report
-description: Create a report to help us improve
-title: ""
+description: Help us make Kavita better for everyone by submitting issues you run into while using the program.
+title: "Put a short summary of what went wrong here"
labels: ["needs-triage"]
-assignees:
body:
- type: markdown
attributes:
- value: |
- Thanks for taking the time to fill out this bug report!
- - type: markdown
- attributes:
- value: |
- If you have a feature request, please go to our [Feature Requests](https://feats.kavitareader.com) page.
+ value: "Thanks for taking the time to fill out this bug report!"
- type: textarea
id: what-happened
attributes:
label: What happened?
- description: Also tell us, what steps you took so we can try to reproduce.
+ description: Don't forget to tell us what steps you took so we can try to reproduce.
placeholder: Tell us what you see!
- value: ""
validations:
required: true
- type: textarea
@@ -26,33 +19,35 @@ body:
attributes:
label: What did you expect?
description: What did you expect to happen?
- placeholder: Tell us what you expected to see!
- value: ""
+ placeholder: Tell us what you expected to see! Go in as much detail as possible so we can confirm if the behavior is something that is broken.
validations:
required: true
- - type: textarea
+ - type: dropdown
id: version
attributes:
- label: Version
- description: What version of our software are you running?
- placeholder: Can be found by going to Server Settings > System
- value: ""
+ label: Kavita Version Number - If you don't see your version number listed, please update Kavita and see if your issue still persists.
+ multiple: false
+ options:
+ - 0.8.7 - Stable
+ - Nightly Testing Branch
validations:
required: true
- type: dropdown
id: OS
attributes:
- label: What OS is Kavita being run on?
+ label: What operating system is Kavita being hosted from?
multiple: false
options:
- - Docker
+ - Docker (LSIO Container)
+ - Docker (Dockerhub Container)
+ - Docker (Other)
- Windows
- Linux
- Mac
- type: dropdown
id: desktop-OS
attributes:
- label: If issue being seen on Desktop, what OS are you running where you see the issue?
+ label: If the issue is being seen on Desktop, what OS are you running where you see the issue?
multiple: false
options:
- Windows
@@ -61,17 +56,18 @@ body:
- type: dropdown
id: desktop-browsers
attributes:
- label: If issue being seen on Desktop, what browsers are you seeing the problem on?
+ label: If the issue is being seen in the UI, what browsers are you seeing the problem on?
multiple: true
options:
- Firefox
- Chrome
- Safari
- Microsoft Edge
+ - Other (List in "Additional Notes" box)
- type: dropdown
id: mobile-OS
attributes:
- label: If issue being seen on Mobile, what OS are you running where you see the issue?
+ label: If the issue is being seen on Mobile, what OS are you running where you see the issue?
multiple: false
options:
- Android
@@ -79,13 +75,13 @@ body:
- type: dropdown
id: mobile-browsers
attributes:
- label: If issue being seen on Mobile, what browsers are you seeing the problem on?
+ label: If the issue is being seen on the Mobile UI, what browsers are you seeing the problem on?
multiple: true
options:
- Firefox
- Chrome
- Safari
- - Microsoft Edge
+ - Other iOS Browser
- type: textarea
id: logs
attributes:
@@ -97,7 +93,4 @@ body:
attributes:
label: Additional Notes
description: Any other information about the issue not covered in this form?
- placeholder: e.g. Running Kavita on a raspberry pi
- value: ""
- validations:
- required: true
\ No newline at end of file
+ placeholder: e.g. Running Kavita on a Raspberry Pi, updating from X version, using LSIO container, etc
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index ec4bb386b..e9be08116 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1 +1,5 @@
-blank_issues_enabled: false
\ No newline at end of file
+blank_issues_enabled: false
+contact_links:
+ - name: Feature Requests
+ url: https://github.com/Kareadita/Kavita/discussions
+ about: Suggest an idea for the Kavita project
diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml
new file mode 100644
index 000000000..044864734
--- /dev/null
+++ b/.github/workflows/build-and-test.yml
@@ -0,0 +1,35 @@
+name: Build and Test PR
+
+on:
+ pull_request:
+ branches: [ '**' ]
+
+jobs:
+ build:
+ name: Build and Test PR
+ runs-on: windows-latest
+ steps:
+ - name: Checkout Repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Setup .NET Core
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Install Swashbuckle CLI
+ shell: powershell
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
+
+ - name: Install dependencies
+ run: dotnet restore
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: csproj
+ path: Kavita.Common/Kavita.Common.csproj
+
+ - name: Test
+ run: dotnet test --no-restore --verbosity normal
diff --git a/.github/workflows/canary-workflow.yml b/.github/workflows/canary-workflow.yml
new file mode 100644
index 000000000..b919030b0
--- /dev/null
+++ b/.github/workflows/canary-workflow.yml
@@ -0,0 +1,138 @@
+name: Canary Workflow
+
+on:
+ push:
+ branches:
+ - canary
+ - '!release/**'
+
+jobs:
+ build:
+ name: Upload Kavita.Common for Version Bump
+ runs-on: ubuntu-24.04
+ steps:
+ - name: Checkout Repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: csproj
+ path: Kavita.Common/Kavita.Common.csproj
+
+ version:
+ name: Bump version
+ needs: [ build ]
+ runs-on: ubuntu-24.04
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Setup .NET Core
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Bump versions
+ uses: SiqiLu/dotnet-bump-version@2.0.0
+ with:
+ version_files: Kavita.Common/Kavita.Common.csproj
+ github_token: ${{ secrets.REPO_GHA_PAT }}
+ version_mask: "0.0.0.1"
+
+ canary:
+ name: Build Canary Docker
+ needs: [ build, version ]
+ runs-on: ubuntu-24.04
+ permissions:
+ packages: write
+ contents: read
+ if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/canary' }}
+ steps:
+ - name: Find Current Pull Request
+ uses: jwalton/gh-find-current-pr@v1
+ id: findPr
+ with:
+ state: all
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Check Out Repo
+ uses: actions/checkout@v4
+ with:
+ ref: canary
+
+ - name: NodeJS to Compile WebUI
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ - run: |
+ cd UI/Web || exit
+ echo 'Installing web dependencies'
+ npm install --legacy-peer-deps
+
+ echo 'Building UI'
+ npm run prod
+
+ echo 'Copying back to Kavita wwwroot'
+ rsync -a dist/ ../../API/wwwroot/
+
+ cd ../ || exit
+
+ - name: Get csproj Version
+ uses: kzrnm/get-net-sdk-project-versions-action@v2
+ id: get-version
+ with:
+ proj-path: Kavita.Common/Kavita.Common.csproj
+
+ - name: Parse Version
+ run: |
+ version='${{steps.get-version.outputs.assembly-version}}'
+ echo "VERSION=$version" >> $GITHUB_OUTPUT
+ id: parse-version
+
+ - name: Echo csproj version
+ run: echo "${{steps.get-version.outputs.assembly-version}}"
+
+ - name: Compile dotnet app
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Install Swashbuckle CLI
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
+
+ - run: ./monorepo-build.sh
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Build and push
+ id: docker_build
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm/v7,linux/arm64
+ push: true
+ tags: jvmilazz0/kavita:canary, jvmilazz0/kavita:canary-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:canary, ghcr.io/kareadita/kavita:canary-${{ steps.parse-version.outputs.VERSION }}
+
+ - name: Image digest
+ run: echo ${{ steps.docker_build.outputs.digest }}
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 000000000..7ce4276bc
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,87 @@
+# For most projects, this workflow file will not need changing; you simply need
+# to commit it to your repository.
+#
+# You may wish to alter this file to override the set of languages analyzed,
+# or to provide custom queries or build logic.
+#
+# ******** NOTE ********
+# We have attempted to detect the languages in your repository. Please check
+# the `language` matrix defined below to confirm you have the correct set of
+# supported CodeQL languages.
+#
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ "develop"]
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ "develop" ]
+ schedule:
+ - cron: '33 12 * * 5'
+
+jobs:
+ analyze:
+ name: Analyze
+ # Runner size impacts CodeQL analysis time. To learn more, please see:
+ # - https://gh.io/recommended-hardware-resources-for-running-codeql
+ # - https://gh.io/supported-runners-and-hardware-resources
+ # - https://gh.io/using-larger-runners
+ # Consider using larger runners for possible analysis time improvements.
+ runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
+ timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'csharp', 'javascript-typescript' ]
+ # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
+ # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
+ # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
+ # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+
+ # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
+ # queries: security-extended,security-and-quality
+
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v3
+
+ # ℹ️ Command-line programs to run using the OS shell.
+ # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
+
+ # If the Autobuild fails above, remove it and uncomment the following three lines.
+ # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
+
+ - run: |
+ echo "Run, Build Application using script"
+ dotnet build Kavita.sln
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/develop-workflow.yml b/.github/workflows/develop-workflow.yml
new file mode 100644
index 000000000..006127645
--- /dev/null
+++ b/.github/workflows/develop-workflow.yml
@@ -0,0 +1,191 @@
+name: Nightly Workflow
+
+on:
+ push:
+ branches: [ 'develop', '!release/**' ]
+ workflow_dispatch:
+
+jobs:
+ debug:
+ runs-on: ubuntu-24.04
+ steps:
+ - name: Debug Info
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Ref: ${{ github.ref }}"
+ echo "Not Contains Release: ${{ !contains(github.head_ref, 'release') }}"
+ echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
+ build:
+ name: Upload Kavita.Common for Version Bump
+ runs-on: ubuntu-24.04
+ if: github.ref == 'refs/heads/develop'
+ steps:
+ - name: Checkout Repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: csproj
+ path: Kavita.Common/Kavita.Common.csproj
+
+ version:
+ name: Bump version
+ needs: [ build ]
+ runs-on: ubuntu-24.04
+ if: github.ref == 'refs/heads/develop'
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Setup .NET Core
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Bump versions
+ uses: majora2007/dotnet-bump-version@v0.0.10
+ with:
+ version_files: Kavita.Common/Kavita.Common.csproj
+ github_token: ${{ secrets.REPO_GHA_PAT }}
+ version_mask: "0.0.0.1"
+
+ develop:
+ name: Build Nightly Docker
+ needs: [ build, version ]
+ runs-on: ubuntu-24.04
+ if: github.ref == 'refs/heads/develop'
+ permissions:
+ packages: write
+ contents: read
+ steps:
+ - name: Find Current Pull Request
+ uses: jwalton/gh-find-current-pr@v1
+ id: findPr
+ with:
+ state: all
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Parse PR body
+ id: parse-body
+ run: |
+ body="${{ steps.findPr.outputs.body }}"
+ if [[ ${#body} -gt 1870 ]] ; then
+ body=${body:0:1870}
+ body="${body}...and much more.
+
+ Read full changelog: https://github.com/Kareadita/Kavita/pull/${{ steps.findPr.outputs.pr }}"
+ fi
+
+ body=${body//\'/}
+ body=${body//'%'/'%25'}
+ body=${body//$'\n'/'%0A'}
+ body=${body//$'\r'/'%0D'}
+ body=${body//$'`'/'%60'}
+ body=${body//$'>'/'%3E'}
+ echo $body
+ echo "BODY=$body" >> $GITHUB_OUTPUT
+
+ - name: Check Out Repo
+ uses: actions/checkout@v4
+ with:
+ ref: develop
+
+ - name: NodeJS to Compile WebUI
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ - run: |
+ cd UI/Web || exit
+ echo 'Installing web dependencies'
+ npm ci
+
+ echo 'Building UI'
+ npm run prod
+
+ echo 'Copying back to Kavita wwwroot'
+ rsync -a dist/ ../../API/wwwroot/
+
+ cd ../ || exit
+
+ - name: Get csproj Version
+ uses: kzrnm/get-net-sdk-project-versions-action@v2
+ id: get-version
+ with:
+ proj-path: Kavita.Common/Kavita.Common.csproj
+
+ - name: Parse Version
+ run: |
+ version='${{steps.get-version.outputs.assembly-version}}'
+ echo "VERSION=$version" >> $GITHUB_OUTPUT
+ id: parse-version
+
+ - name: Echo csproj version
+ run: echo "${{steps.get-version.outputs.assembly-version}}"
+
+ - name: Compile dotnet app
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Install Swashbuckle CLI
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
+
+ - run: ./monorepo-build.sh
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ if: ${{ github.repository_owner == 'Kareadita' }}
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: docker_meta_nightly
+ uses: docker/metadata-action@v5
+ with:
+ tags: |
+ type=raw,value=nightly
+ type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
+ images: |
+ name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
+ name=ghcr.io/${{ github.repository }}
+
+ - name: Build and push
+ id: docker_build
+ uses: docker/build-push-action@v6
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm/v7,linux/arm64
+ push: true
+ tags: ${{ steps.docker_meta_nightly.outputs.tags }}
+ labels: ${{ steps.docker_meta_nightly.outputs.labels }}
+
+ - name: Image digest
+ run: echo ${{ steps.docker_build.outputs.digest }}
+
+ - name: Notify Discord
+ uses: rjstone/discord-webhook-notify@v1
+ if: ${{ github.repository_owner == 'Kareadita' }}
+ with:
+ severity: info
+ description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
+ details: '${{ steps.findPr.outputs.body }}'
+ text: <@&939225459156217917> <@&939225350775406643> A new nightly build has been released for docker.
+ webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
diff --git a/.github/workflows/openapi-gen.yml b/.github/workflows/openapi-gen.yml
new file mode 100644
index 000000000..45446d045
--- /dev/null
+++ b/.github/workflows/openapi-gen.yml
@@ -0,0 +1,68 @@
+name: Generate OpenAPI Documentation
+
+on:
+ push:
+ branches: [ 'develop', '!release/**' ]
+ paths:
+ - '**/*.cs'
+ - '**/*.csproj'
+ pull_request:
+ branches: [ 'develop', '!release/**' ]
+ workflow_dispatch:
+
+jobs:
+ generate-openapi:
+ runs-on: ubuntu-latest
+ # Only run on direct pushes to develop, not PRs
+ if: (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && github.repository_owner == 'Kareadita'
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Install dependencies
+ run: dotnet restore
+
+ - name: Build project
+ run: dotnet build API/API.csproj --configuration Debug
+
+ - name: Get Swashbuckle version
+ id: swashbuckle-version
+ run: |
+ VERSION=$(grep -o '> $GITHUB_OUTPUT
+ echo "Found Swashbuckle.AspNetCore version: $VERSION"
+
+ - name: Install matching Swashbuckle CLI tool
+ run: |
+ dotnet new tool-manifest --force
+ dotnet tool install Swashbuckle.AspNetCore.Cli --version ${{ steps.swashbuckle-version.outputs.VERSION }}
+
+ - name: Generate OpenAPI file
+ run: dotnet swagger tofile --output openapi.json API/bin/Debug/net9.0/API.dll v1
+
+ - name: Check for changes
+ id: git-check
+ run: |
+ git add openapi.json
+ git diff --staged --quiet openapi.json || echo "has_changes=true" >> $GITHUB_OUTPUT
+
+ - name: Commit and push if changed
+ if: steps.git-check.outputs.has_changes == 'true'
+ run: |
+ git config --local user.email "action@github.com"
+ git config --local user.name "GitHub Action"
+
+ git commit -m "Update OpenAPI documentation" openapi.json
+
+ # Pull latest changes with rebase to avoid merge commits
+ git pull --rebase origin develop
+
+ git push
+ env:
+ GITHUB_TOKEN: ${{ secrets.REPO_GHA_PAT }}
diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml
new file mode 100644
index 000000000..51589221f
--- /dev/null
+++ b/.github/workflows/pr-check.yml
@@ -0,0 +1,20 @@
+name: Validate PR Body
+
+on:
+ pull_request:
+ branches: [ main, develop, canary ]
+ types: [synchronize]
+
+jobs:
+ check_pr:
+ runs-on: ubuntu-24.04
+ steps:
+ - name: Extract branch name
+ shell: bash
+ run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
+ id: extract_branch
+ - name: Check PR Body
+ uses: JJ/github-pr-contains-action@releases/v10
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ bodyDoesNotContain: "[\"|`]"
diff --git a/.github/workflows/release-workflow.yml b/.github/workflows/release-workflow.yml
new file mode 100644
index 000000000..757ce1075
--- /dev/null
+++ b/.github/workflows/release-workflow.yml
@@ -0,0 +1,182 @@
+name: Stable Workflow
+
+on:
+ push:
+ branches: ['release/**']
+ pull_request:
+ branches: [ 'develop' ]
+ types: [ closed ]
+ workflow_dispatch:
+
+jobs:
+ debug:
+ runs-on: ubuntu-24.04
+ steps:
+ - name: Debug Info
+ run: |
+ echo "Event Name: ${{ github.event_name }}"
+ echo "Ref: ${{ github.ref }}"
+ echo "Not Contains Release: ${{ !contains(github.head_ref, 'release') }}"
+ echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
+ if_merged:
+ if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
+ runs-on: ubuntu-24.04
+ steps:
+ - run: |
+ echo The PR was merged
+ build:
+ name: Upload Kavita.Common for Version Bump
+ runs-on: ubuntu-24.04
+ if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
+ steps:
+ - name: Checkout Repo
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - uses: actions/upload-artifact@v4
+ with:
+ name: csproj
+ path: Kavita.Common/Kavita.Common.csproj
+
+ stable:
+ name: Build Stable and Nightly Docker if Release
+ needs: [ build ]
+ if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
+ runs-on: ubuntu-24.04
+ permissions:
+ packages: write
+ contents: read
+ steps:
+ - name: Find Current Pull Request
+ uses: jwalton/gh-find-current-pr@v1
+ id: findPr
+ with:
+ state: all
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Parse PR body
+ id: parse-body
+ run: |
+ body="Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
+
+ echo $body
+ echo "BODY=$body" >> $GITHUB_OUTPUT
+
+ - name: Check Out Repo
+ uses: actions/checkout@v4
+ with:
+ ref: develop
+
+ - name: NodeJS to Compile WebUI
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ - run: |
+
+ cd UI/Web || exit
+ echo 'Installing web dependencies'
+ npm ci
+
+ echo 'Building UI'
+ npm run prod
+
+ echo 'Copying back to Kavita wwwroot'
+ rsync -a dist/ ../../API/wwwroot/
+
+ cd ../ || exit
+
+ - name: Get csproj Version
+ uses: kzrnm/get-net-sdk-project-versions-action@v2
+ id: get-version
+ with:
+ proj-path: Kavita.Common/Kavita.Common.csproj
+
+ - name: Echo csproj version
+ run: echo "${{steps.get-version.outputs.assembly-version}}"
+
+ - name: Parse Version
+ run: |
+ version='${{steps.get-version.outputs.assembly-version}}'
+ newVersion=${version%.*}
+ echo $newVersion
+ echo "VERSION=$newVersion" >> $GITHUB_OUTPUT
+ id: parse-version
+
+ - name: Compile dotnet app
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 9.0.x
+ - name: Install Swashbuckle CLI
+ run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
+
+ - run: ./monorepo-build.sh
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ if: ${{ github.repository_owner == 'Kareadita' }}
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: docker_meta_stable
+ uses: docker/metadata-action@v5
+ with:
+ tags: |
+ type=raw,value=latest
+ type=raw,value=${{ steps.parse-version.outputs.VERSION }}
+ images: |
+ name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
+ name=ghcr.io/${{ github.repository }}
+
+ - name: Build and push stable
+ id: docker_build_stable
+ uses: docker/build-push-action@v6
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm/v7,linux/arm64
+ push: true
+ tags: ${{ steps.docker_meta_stable.outputs.tags }}
+ labels: ${{ steps.docker_meta_stable.outputs.labels }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: docker_meta_nightly
+ uses: docker/metadata-action@v5
+ with:
+ tags: |
+ type=raw,value=nightly
+ type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
+ images: |
+ name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
+ name=ghcr.io/${{ github.repository }}
+
+ - name: Build and push nightly
+ id: docker_build_nightly
+ uses: docker/build-push-action@v6
+ with:
+ context: .
+ platforms: linux/amd64,linux/arm/v7,linux/arm64
+ push: true
+ tags: ${{ steps.docker_meta_nightly.outputs.tags }}
+ labels: ${{ steps.docker_meta_nightly.outputs.labels }}
+
+ - name: Image digest
+ run: echo ${{ steps.docker_build_stable.outputs.digest }}
+
+ - name: Image digest
+ run: echo ${{ steps.docker_build_nightly.outputs.digest }}
diff --git a/.github/workflows/sonar-scan.yml b/.github/workflows/sonar-scan.yml
deleted file mode 100644
index e0f98f393..000000000
--- a/.github/workflows/sonar-scan.yml
+++ /dev/null
@@ -1,344 +0,0 @@
-name: .NET Build Test and Sonar Scan
-
-on:
- push:
- branches: '**'
- pull_request:
- branches: [ main, develop ]
- types: [synchronize]
-
-jobs:
- build:
- name: Build .Net
- runs-on: windows-latest
- steps:
- - name: Checkout Repo
- uses: actions/checkout@v2
- with:
- fetch-depth: 0
-
- - name: Setup .NET Core
- uses: actions/setup-dotnet@v2
- with:
- dotnet-version: 6.0.x
-
- - name: Install dependencies
- run: dotnet restore
-
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- java-version: 1.11
-
- - uses: actions/upload-artifact@v2
- with:
- name: csproj
- path: Kavita.Common/Kavita.Common.csproj
-
- test:
- name: Install Sonar & Test
- needs: build
- runs-on: windows-latest
- steps:
- - name: Checkout Repo
- uses: actions/checkout@v2
- with:
- fetch-depth: 0
-
- - name: Setup .NET Core
- uses: actions/setup-dotnet@v2
- with:
- dotnet-version: 6.0.x
-
- - name: Install dependencies
- run: dotnet restore
-
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- java-version: 1.11
-
- - name: Cache SonarCloud packages
- uses: actions/cache@v1
- with:
- path: ~\sonar\cache
- key: ${{ runner.os }}-sonar
- restore-keys: ${{ runner.os }}-sonar
-
- - name: Cache SonarCloud scanner
- id: cache-sonar-scanner
- uses: actions/cache@v1
- with:
- path: .\.sonar\scanner
- key: ${{ runner.os }}-sonar-scanner
- restore-keys: ${{ runner.os }}-sonar-scanner
-
- - name: Install SonarCloud scanner
- if: steps.cache-sonar-scanner.outputs.cache-hit != 'true'
- shell: powershell
- run: |
- New-Item -Path .\.sonar\scanner -ItemType Directory
- dotnet tool update dotnet-sonarscanner --tool-path .\.sonar\scanner
-
- - name: Sonar Scan
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- shell: powershell
- run: |
- .\.sonar\scanner\dotnet-sonarscanner begin /k:"Kareadita_Kavita" /o:"kareadita" /d:sonar.login="${{ secrets.SONAR_TOKEN }}" /d:sonar.host.url="https://sonarcloud.io"
- dotnet build --configuration Release
- .\.sonar\scanner\dotnet-sonarscanner end /d:sonar.login="${{ secrets.SONAR_TOKEN }}"
-
- - name: Test
- run: dotnet test --no-restore --verbosity normal
-
- version:
- name: Bump version on Develop push
- needs: [ build, test ]
- runs-on: ubuntu-latest
- if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
- steps:
- - uses: actions/checkout@v2
- with:
- fetch-depth: 0
-
- - name: Setup .NET Core
- uses: actions/setup-dotnet@v2
- with:
- dotnet-version: 6.0.x
-
- - name: Install dependencies
- run: dotnet restore
-
- - name: Build
- run: dotnet build --configuration Release --no-restore
-
- - name: Bump versions
- uses: SiqiLu/dotnet-bump-version@2.0.0
- with:
- version_files: Kavita.Common/Kavita.Common.csproj
- github_token: ${{ secrets.REPO_GHA_PAT }}
- version_mask: "0.0.0.1"
-
- develop:
- name: Build Nightly Docker if Develop push
- needs: [ build, version ]
- runs-on: ubuntu-latest
- if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }}
- steps:
- - name: Find Current Pull Request
- uses: jwalton/gh-find-current-pr@v1.0.2
- id: findPr
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Parse PR body
- id: parse-body
- run: |
- body="${{ steps.findPr.outputs.body }}"
- if [[ ${#body} -gt 1870 ]] ; then
- body=${body:0:1870}
- body="${body}...and much more.
-
- Read full changelog: https://github.com/Kareadita/Kavita/pull/${{ steps.findPr.outputs.pr }}"
- fi
-
- body=${body//\'/}
- body=${body//'%'/'%25'}
- body=${body//$'\n'/'%0A'}
- body=${body//$'\r'/'%0D'}
- body=${body//$'`'/'%60'}
- body=${body//$'>'/'%3E'}
- echo $body
- echo "::set-output name=BODY::$body"
-
- - name: Check Out Repo
- uses: actions/checkout@v2
- with:
- ref: develop
-
- - name: NodeJS to Compile WebUI
- uses: actions/setup-node@v2.1.5
- with:
- node-version: '14'
- - run: |
- cd UI/Web || exit
- echo 'Installing web dependencies'
- npm install
-
- echo 'Building UI'
- npm run prod
-
- echo 'Copying back to Kavita wwwroot'
- rsync -a dist/ ../../API/wwwroot/
-
- cd ../ || exit
-
- - name: Get csproj Version
- uses: naminodarie/get-net-sdk-project-versions-action@v1
- id: get-version
- with:
- proj-path: Kavita.Common/Kavita.Common.csproj
-
- - name: Parse Version
- run: |
- version='${{steps.get-version.outputs.assembly-version}}'
- echo "::set-output name=VERSION::$version"
- id: parse-version
-
- - name: Echo csproj version
- run: echo "${{steps.get-version.outputs.assembly-version}}"
-
- - name: Compile dotnet app
- uses: actions/setup-dotnet@v2
- with:
- dotnet-version: 6.0.x
- - run: ./monorepo-build.sh
-
- - name: Login to Docker Hub
- uses: docker/login-action@v1
- with:
- username: ${{ secrets.DOCKER_HUB_USERNAME }}
- password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v1
-
- - name: Set up Docker Buildx
- id: buildx
- uses: docker/setup-buildx-action@v1
-
- - name: Build and push
- id: docker_build
- uses: docker/build-push-action@v2
- with:
- context: .
- platforms: linux/amd64,linux/arm/v7,linux/arm64
- push: true
- tags: kizaing/kavita:nightly, kizaing/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
-
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
-
- - name: Notify Discord
- uses: rjstone/discord-webhook-notify@v1
- with:
- severity: info
- description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
- details: '${{ steps.parse-body.outputs.BODY }}'
- text: <@&939225459156217917> <@&939225350775406643> A new nightly build has been released for docker.
- webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
-
- stable:
- name: Build Stable Docker if Main push
- needs: [ build ]
- runs-on: ubuntu-latest
- if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
- steps:
-
- - name: Find Current Pull Request
- uses: jwalton/gh-find-current-pr@v1.0.2
- id: findPr
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Parse PR body
- id: parse-body
- run: |
- body="${{ steps.findPr.outputs.body }}"
- if [[ ${#body} -gt 1870 ]] ; then
- body=${body:0:1870}
- body="${body}...and much more.
-
- Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
- fi
-
- body=${body//\'/}
- body=${body//'%'/'%25'}
- body=${body//$'\n'/'%0A'}
- body=${body//$'\r'/'%0D'}
- body=${body//$'`'/'%60'}
- body=${body//$'>'/'%3E'}
- echo $body
- echo "::set-output name=BODY::$body"
-
- - name: Check Out Repo
- uses: actions/checkout@v2
- with:
- ref: main
-
- - name: NodeJS to Compile WebUI
- uses: actions/setup-node@v2.1.5
- with:
- node-version: '14'
- - run: |
-
- cd UI/Web || exit
- echo 'Installing web dependencies'
- npm install
-
- echo 'Building UI'
- npm run prod
-
- echo 'Copying back to Kavita wwwroot'
- rsync -a dist/ ../../API/wwwroot/
-
- cd ../ || exit
-
- - name: Get csproj Version
- uses: naminodarie/get-net-sdk-project-versions-action@v1
- id: get-version
- with:
- proj-path: Kavita.Common/Kavita.Common.csproj
-
- - name: Echo csproj version
- run: echo "${{steps.get-version.outputs.assembly-version}}"
-
- - name: Parse Version
- run: |
- version='${{steps.get-version.outputs.assembly-version}}'
- newVersion=${version%.*}
- echo $newVersion
- echo "::set-output name=VERSION::$newVersion"
- id: parse-version
-
- - name: Compile dotnet app
- uses: actions/setup-dotnet@v2
- with:
- dotnet-version: 6.0.x
- - run: ./monorepo-build.sh
-
- - name: Login to Docker Hub
- uses: docker/login-action@v1
- with:
- username: ${{ secrets.DOCKER_HUB_USERNAME }}
- password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v1
-
- - name: Set up Docker Buildx
- id: buildx
- uses: docker/setup-buildx-action@v1
-
- - name: Build and push
- id: docker_build
- uses: docker/build-push-action@v2
- with:
- context: .
- platforms: linux/amd64,linux/arm/v7,linux/arm64
- push: true
- tags: kizaing/kavita:latest, kizaing/kavita:${{ steps.parse-version.outputs.VERSION }}
-
- - name: Image digest
- run: echo ${{ steps.docker_build.outputs.digest }}
-
- - name: Notify Discord
- uses: rjstone/discord-webhook-notify@v1
- with:
- severity: info
- description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
- details: '${{ steps.parse-body.outputs.BODY }}'
- text: <@&939225192553644133> A new stable build has been released.
- webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
diff --git a/.gitignore b/.gitignore
index 078b6108c..1cffb441d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -512,6 +512,8 @@ UI/Web/dist/
/API/config/themes/
/API/config/stats/
/API/config/bookmarks/
+/API/config/favicons/
+/API/config/cache-long/
/API/config/kavita.db
/API/config/kavita.db-shm
/API/config/kavita.db-wal
@@ -519,15 +521,26 @@ UI/Web/dist/
/API/config/*.db
/API/config/*.bak
/API/config/*.backup
+/API/config/*.csv
/API/config/Hangfire.db
/API/config/Hangfire-log.db
API/config/covers/
+API/config/images/*
API/config/stats/*
API/config/stats/app_stats.json
API/config/pre-metadata/
API/config/post-metadata/
+API/config/*.csv
API.Tests/TestResults/
UI/Web/.vscode/settings.json
/API.Tests/Services/Test Data/ArchiveService/CoverImages/output/*
UI/Web/.angular/
-BenchmarkDotNet.Artifacts
\ No newline at end of file
+BenchmarkDotNet.Artifacts
+
+
+API.Tests/Services/Test Data/ImageService/**/*_output*
+API.Tests/Services/Test Data/ImageService/**/*_baseline*
+API.Tests/Services/Test Data/ImageService/**/*.html
+
+
+API.Tests/Services/Test Data/ScannerService/ScanTests/**/*
diff --git a/.sonarcloud.properties b/.sonarcloud.properties
new file mode 100644
index 000000000..1876ac55a
--- /dev/null
+++ b/.sonarcloud.properties
@@ -0,0 +1,15 @@
+# Path to sources
+sonar.sources=.
+sonar.exclusions=API.Benchmark
+#sonar.inclusions=
+
+# Path to tests
+sonar.tests=API.Tests
+#sonar.test.exclusions=
+#sonar.test.inclusions=
+
+# Source encoding
+sonar.sourceEncoding=UTF-8
+
+# Exclusions for copy-paste detection
+#sonar.cpd.exclusions=
diff --git a/API.Benchmark/API.Benchmark.csproj b/API.Benchmark/API.Benchmark.csproj
index 11ef151a2..ec9c1884f 100644
--- a/API.Benchmark/API.Benchmark.csproj
+++ b/API.Benchmark/API.Benchmark.csproj
@@ -1,7 +1,7 @@
- net6.0
+ net9.0
Exe
@@ -10,9 +10,9 @@
-
-
-
+
+
+
@@ -26,5 +26,10 @@
Always
+
+
+ PreserveNewest
+
+
diff --git a/API.Benchmark/ArchiveServiceBenchmark.cs b/API.Benchmark/ArchiveServiceBenchmark.cs
index d8418ee26..ccb44d517 100644
--- a/API.Benchmark/ArchiveServiceBenchmark.cs
+++ b/API.Benchmark/ArchiveServiceBenchmark.cs
@@ -1,9 +1,16 @@
using System;
+using System.IO;
using System.IO.Abstractions;
using Microsoft.Extensions.Logging.Abstractions;
using API.Services;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Order;
+using EasyCaching.Core;
+using NSubstitute;
+using SixLabors.ImageSharp;
+using SixLabors.ImageSharp.Formats.Png;
+using SixLabors.ImageSharp.Formats.Webp;
+using SixLabors.ImageSharp.Processing;
namespace API.Benchmark;
@@ -11,18 +18,22 @@ namespace API.Benchmark;
[MemoryDiagnoser]
[RankColumn]
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
-[SimpleJob(launchCount: 1, warmupCount: 5, targetCount: 20)]
+[SimpleJob(launchCount: 1, warmupCount: 5, invocationCount: 20)]
public class ArchiveServiceBenchmark
{
private readonly ArchiveService _archiveService;
private readonly IDirectoryService _directoryService;
private readonly IImageService _imageService;
+ private readonly PngEncoder _pngEncoder = new PngEncoder();
+ private readonly WebpEncoder _webPEncoder = new WebpEncoder();
+ private const string SourceImage = "C:/Users/josep/Pictures/obey_by_grrsa-d6llkaa_colored_by_me.png";
+
public ArchiveServiceBenchmark()
{
_directoryService = new DirectoryService(null, new FileSystem());
_imageService = new ImageService(null, _directoryService);
- _archiveService = new ArchiveService(new NullLogger(), _directoryService, _imageService);
+ _archiveService = new ArchiveService(new NullLogger(), _directoryService, _imageService, Substitute.For());
}
[Benchmark(Baseline = true)]
@@ -49,6 +60,52 @@ public class ArchiveServiceBenchmark
}
}
+ [Benchmark]
+ public void ImageSharp_ExtractImage_PNG()
+ {
+ var outputDirectory = "C:/Users/josep/Pictures/imagesharp/";
+ _directoryService.ExistOrCreate(outputDirectory);
+
+ using var stream = new FileStream(SourceImage, FileMode.Open);
+ using var thumbnail2 = SixLabors.ImageSharp.Image.Load(stream);
+ thumbnail2.Mutate(x => x.Resize(320, 0));
+ thumbnail2.Save(_directoryService.FileSystem.Path.Join(outputDirectory, "imagesharp.png"), _pngEncoder);
+ }
+
+ [Benchmark]
+ public void ImageSharp_ExtractImage_WebP()
+ {
+ var outputDirectory = "C:/Users/josep/Pictures/imagesharp/";
+ _directoryService.ExistOrCreate(outputDirectory);
+
+ using var stream = new FileStream(SourceImage, FileMode.Open);
+ using var thumbnail2 = SixLabors.ImageSharp.Image.Load(stream);
+ thumbnail2.Mutate(x => x.Resize(320, 0));
+ thumbnail2.Save(_directoryService.FileSystem.Path.Join(outputDirectory, "imagesharp.webp"), _webPEncoder);
+ }
+
+ [Benchmark]
+ public void NetVips_ExtractImage_PNG()
+ {
+ var outputDirectory = "C:/Users/josep/Pictures/netvips/";
+ _directoryService.ExistOrCreate(outputDirectory);
+
+ using var stream = new FileStream(SourceImage, FileMode.Open);
+ using var thumbnail = NetVips.Image.ThumbnailStream(stream, 320);
+ thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(outputDirectory, "netvips.png"));
+ }
+
+ [Benchmark]
+ public void NetVips_ExtractImage_WebP()
+ {
+ var outputDirectory = "C:/Users/josep/Pictures/netvips/";
+ _directoryService.ExistOrCreate(outputDirectory);
+
+ using var stream = new FileStream(SourceImage, FileMode.Open);
+ using var thumbnail = NetVips.Image.ThumbnailStream(stream, 320);
+ thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(outputDirectory, "netvips.webp"));
+ }
+
// Benchmark to test default GetNumberOfPages from archive
// vs a new method where I try to open the archive and return said stream
}
diff --git a/API.Benchmark/CleanTitleBenchmark.cs b/API.Benchmark/CleanTitleBenchmark.cs
index 90310a9ef..c3a383647 100644
--- a/API.Benchmark/CleanTitleBenchmark.cs
+++ b/API.Benchmark/CleanTitleBenchmark.cs
@@ -1,9 +1,6 @@
-using System;
-using System.Collections.Generic;
+using System.Collections.Generic;
using System.IO;
-using System.Text.RegularExpressions;
using BenchmarkDotNet.Attributes;
-using BenchmarkDotNet.Order;
namespace API.Benchmark;
diff --git a/API.Benchmark/Data/AesopsFables.epub b/API.Benchmark/Data/AesopsFables.epub
new file mode 100644
index 000000000..d2ab9a8b2
Binary files /dev/null and b/API.Benchmark/Data/AesopsFables.epub differ
diff --git a/API.Benchmark/EpubBenchmark.cs b/API.Benchmark/EpubBenchmark.cs
deleted file mode 100644
index fd4fe4da4..000000000
--- a/API.Benchmark/EpubBenchmark.cs
+++ /dev/null
@@ -1,68 +0,0 @@
-using System;
-using System.Linq;
-using System.Threading.Tasks;
-using API.Services;
-using BenchmarkDotNet.Attributes;
-using BenchmarkDotNet.Order;
-using HtmlAgilityPack;
-using VersOne.Epub;
-
-namespace API.Benchmark;
-
-[MemoryDiagnoser]
-[Orderer(SummaryOrderPolicy.FastestToSlowest)]
-[RankColumn]
-[SimpleJob(launchCount: 1, warmupCount: 3, targetCount: 5, invocationCount: 100, id: "Epub"), ShortRunJob]
-public class EpubBenchmark
-{
- [Benchmark]
- public static async Task GetWordCount_PassByString()
- {
- using var book = await EpubReader.OpenBookAsync("Data/book-test.epub", BookService.BookReaderOptions);
- foreach (var bookFile in book.Content.Html.Values)
- {
- Console.WriteLine(GetBookWordCount_PassByString(await bookFile.ReadContentAsTextAsync()));
- ;
- }
- }
-
- [Benchmark]
- public static async Task GetWordCount_PassByRef()
- {
- using var book = await EpubReader.OpenBookAsync("Data/book-test.epub", BookService.BookReaderOptions);
- foreach (var bookFile in book.Content.Html.Values)
- {
- Console.WriteLine(await GetBookWordCount_PassByRef(bookFile));
- }
- }
-
- private static int GetBookWordCount_PassByString(string fileContents)
- {
- var doc = new HtmlDocument();
- doc.LoadHtml(fileContents);
- var delimiter = new char[] {' '};
-
- return doc.DocumentNode.SelectNodes("//body//text()[not(parent::script)]")
- .Select(node => node.InnerText)
- .Select(text => text.Split(delimiter, StringSplitOptions.RemoveEmptyEntries)
- .Where(s => char.IsLetter(s[0])))
- .Select(words => words.Count())
- .Where(wordCount => wordCount > 0)
- .Sum();
- }
-
- private static async Task GetBookWordCount_PassByRef(EpubContentFileRef bookFile)
- {
- var doc = new HtmlDocument();
- doc.LoadHtml(await bookFile.ReadContentAsTextAsync());
- var delimiter = new char[] {' '};
-
- return doc.DocumentNode.SelectNodes("//body//text()[not(parent::script)]")
- .Select(node => node.InnerText)
- .Select(text => text.Split(delimiter, StringSplitOptions.RemoveEmptyEntries)
- .Where(s => char.IsLetter(s[0])))
- .Select(words => words.Count())
- .Where(wordCount => wordCount > 0)
- .Sum();
- }
-}
diff --git a/API.Benchmark/KoreaderHashBenchmark.cs b/API.Benchmark/KoreaderHashBenchmark.cs
new file mode 100644
index 000000000..c0abfd2ad
--- /dev/null
+++ b/API.Benchmark/KoreaderHashBenchmark.cs
@@ -0,0 +1,41 @@
+using API.Helpers.Builders;
+using BenchmarkDotNet.Attributes;
+using BenchmarkDotNet.Order;
+using System;
+using API.Entities.Enums;
+
+namespace API.Benchmark
+{
+ [StopOnFirstError]
+ [MemoryDiagnoser]
+ [RankColumn]
+ [Orderer(SummaryOrderPolicy.FastestToSlowest)]
+ [SimpleJob(launchCount: 1, warmupCount: 5, invocationCount: 20)]
+ public class KoreaderHashBenchmark
+ {
+ private const string sourceEpub = "./Data/AesopsFables.epub";
+
+ [Benchmark(Baseline = true)]
+ public void TestBuildManga_baseline()
+ {
+ var file = new MangaFileBuilder(sourceEpub, MangaFormat.Epub)
+ .Build();
+ if (file == null)
+ {
+ throw new Exception("Failed to build manga file");
+ }
+ }
+
+ [Benchmark]
+ public void TestBuildManga_withHash()
+ {
+ var file = new MangaFileBuilder(sourceEpub, MangaFormat.Epub)
+ .WithHash()
+ .Build();
+ if (file == null)
+ {
+ throw new Exception("Failed to build manga file");
+ }
+ }
+ }
+}
diff --git a/API.Benchmark/ParserBenchmarks.cs b/API.Benchmark/ParserBenchmarks.cs
index d7706a3f4..0dabc560b 100644
--- a/API.Benchmark/ParserBenchmarks.cs
+++ b/API.Benchmark/ParserBenchmarks.cs
@@ -74,5 +74,24 @@ public class ParserBenchmarks
}
}
+ [Benchmark]
+ public void Test_CharacterReplace()
+ {
+ foreach (var name in _names)
+ {
+ var d = name.Contains('a');
+ }
+ }
+
+ [Benchmark]
+ public void Test_StringReplace()
+ {
+ foreach (var name in _names)
+ {
+
+ var d = name.Contains("a");
+ }
+ }
+
}
diff --git a/API.Benchmark/TestBenchmark.cs b/API.Benchmark/TestBenchmark.cs
index 0b4880690..511d250aa 100644
--- a/API.Benchmark/TestBenchmark.cs
+++ b/API.Benchmark/TestBenchmark.cs
@@ -25,7 +25,7 @@ public class TestBenchmark
{
list.Add(new VolumeDto()
{
- Number = random.Next(10) > 5 ? 1 : 0,
+ MinNumber = random.Next(10) > 5 ? 1 : 0,
Chapters = GenerateChapters()
});
}
@@ -49,7 +49,7 @@ public class TestBenchmark
private static void SortSpecialChapters(IEnumerable volumes)
{
- foreach (var v in volumes.Where(vDto => vDto.Number == 0))
+ foreach (var v in volumes.WhereNotLooseLeaf())
{
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
}
diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj
index 6380fc95f..a571a6e72 100644
--- a/API.Tests/API.Tests.csproj
+++ b/API.Tests/API.Tests.csproj
@@ -1,38 +1,45 @@
- net6.0
-
+ net9.0
false
-
-
-
-
-
-
+
+
+
+
+
+
+
runtime; build; native; contentfiles; analyzers; buildtransitive
all
-
+
runtime; build; native; contentfiles; analyzers; buildtransitive
all
-
+
-
-
+
+
+
-
+
+
+
+
+
+ PreserveNewest
+
diff --git a/API.Tests/AbstractDbTest.cs b/API.Tests/AbstractDbTest.cs
new file mode 100644
index 000000000..9c5f3e726
--- /dev/null
+++ b/API.Tests/AbstractDbTest.cs
@@ -0,0 +1,136 @@
+using System;
+using System.Data.Common;
+using System.Linq;
+using System.Threading.Tasks;
+using API.Data;
+using API.Entities;
+using API.Entities.Enums;
+using API.Helpers;
+using API.Helpers.Builders;
+using API.Services;
+using AutoMapper;
+using Hangfire;
+using Microsoft.Data.Sqlite;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+
+namespace API.Tests;
+
+public abstract class AbstractDbTest : AbstractFsTest , IDisposable
+{
+ protected readonly DataContext Context;
+ protected readonly IUnitOfWork UnitOfWork;
+ protected readonly IMapper Mapper;
+ private readonly DbConnection _connection;
+ private bool _disposed;
+
+ protected AbstractDbTest()
+ {
+ var contextOptions = new DbContextOptionsBuilder()
+ .UseSqlite(CreateInMemoryDatabase())
+ .EnableSensitiveDataLogging()
+ .Options;
+
+ _connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
+
+ Context = new DataContext(contextOptions);
+
+ Context.Database.EnsureCreated(); // Ensure DB schema is created
+
+ Task.Run(SeedDb).GetAwaiter().GetResult();
+
+ var config = new MapperConfiguration(cfg => cfg.AddProfile());
+ Mapper = config.CreateMapper();
+
+ GlobalConfiguration.Configuration.UseInMemoryStorage();
+ UnitOfWork = new UnitOfWork(Context, Mapper, null);
+ }
+
+ private static DbConnection CreateInMemoryDatabase()
+ {
+ var connection = new SqliteConnection("Filename=:memory:");
+ connection.Open();
+
+ return connection;
+ }
+
+ private async Task SeedDb()
+ {
+ try
+ {
+ await Context.Database.EnsureCreatedAsync();
+ var filesystem = CreateFileSystem();
+
+ await Seed.SeedSettings(Context, new DirectoryService(Substitute.For>(), filesystem));
+
+ var setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
+ setting.Value = CacheDirectory;
+
+ setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
+ setting.Value = BackupDirectory;
+
+ setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
+ setting.Value = BookmarkDirectory;
+
+ setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
+ setting.Value = "10";
+
+ Context.ServerSetting.Update(setting);
+
+
+ Context.Library.Add(new LibraryBuilder("Manga")
+ .WithAllowMetadataMatching(true)
+ .WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
+ .Build());
+
+ await Context.SaveChangesAsync();
+
+ await Seed.SeedMetadataSettings(Context);
+
+ return true;
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine($"[SeedDb] Error: {ex.Message}");
+ return false;
+ }
+ }
+
+ protected abstract Task ResetDb();
+
+ public void Dispose()
+ {
+ Dispose(true);
+ GC.SuppressFinalize(this);
+ }
+
+ protected virtual void Dispose(bool disposing)
+ {
+ if (_disposed) return;
+
+ if (disposing)
+ {
+ Context?.Dispose();
+ _connection?.Dispose();
+ }
+
+ _disposed = true;
+ }
+
+ ///
+ /// Add a role to an existing User. Commits.
+ ///
+ ///
+ ///
+ protected async Task AddUserWithRole(int userId, string roleName)
+ {
+ var role = new AppRole { Id = userId, Name = roleName, NormalizedName = roleName.ToUpper() };
+
+ await Context.Roles.AddAsync(role);
+ await Context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
+
+ await Context.SaveChangesAsync();
+ }
+}
diff --git a/API.Tests/AbstractFsTest.cs b/API.Tests/AbstractFsTest.cs
new file mode 100644
index 000000000..965a7ad78
--- /dev/null
+++ b/API.Tests/AbstractFsTest.cs
@@ -0,0 +1,44 @@
+
+
+using System.IO;
+using System.IO.Abstractions;
+using System.IO.Abstractions.TestingHelpers;
+using API.Services.Tasks.Scanner.Parser;
+
+namespace API.Tests;
+
+public abstract class AbstractFsTest
+{
+
+ protected static readonly string Root = Parser.NormalizePath(Path.GetPathRoot(Directory.GetCurrentDirectory()));
+ protected static readonly string ConfigDirectory = Root + "kavita/config/";
+ protected static readonly string CacheDirectory = ConfigDirectory + "cache/";
+ protected static readonly string CacheLongDirectory = ConfigDirectory + "cache-long/";
+ protected static readonly string CoverImageDirectory = ConfigDirectory + "covers/";
+ protected static readonly string BackupDirectory = ConfigDirectory + "backups/";
+ protected static readonly string LogDirectory = ConfigDirectory + "logs/";
+ protected static readonly string BookmarkDirectory = ConfigDirectory + "bookmarks/";
+ protected static readonly string SiteThemeDirectory = ConfigDirectory + "themes/";
+ protected static readonly string TempDirectory = ConfigDirectory + "temp/";
+ protected static readonly string ThemesDirectory = ConfigDirectory + "theme";
+ protected static readonly string DataDirectory = Root + "data/";
+
+ protected static MockFileSystem CreateFileSystem()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.Directory.SetCurrentDirectory(Root + "kavita/");
+ fileSystem.AddDirectory(Root + "kavita/config/");
+ fileSystem.AddDirectory(CacheDirectory);
+ fileSystem.AddDirectory(CacheLongDirectory);
+ fileSystem.AddDirectory(CoverImageDirectory);
+ fileSystem.AddDirectory(BackupDirectory);
+ fileSystem.AddDirectory(BookmarkDirectory);
+ fileSystem.AddDirectory(SiteThemeDirectory);
+ fileSystem.AddDirectory(LogDirectory);
+ fileSystem.AddDirectory(TempDirectory);
+ fileSystem.AddDirectory(DataDirectory);
+ fileSystem.AddDirectory(ThemesDirectory);
+
+ return fileSystem;
+ }
+}
diff --git a/API.Tests/BasicTest.cs b/API.Tests/BasicTest.cs
deleted file mode 100644
index fb2f2bbf0..000000000
--- a/API.Tests/BasicTest.cs
+++ /dev/null
@@ -1,118 +0,0 @@
-using System.Collections.Generic;
-using System.Data.Common;
-using System.IO.Abstractions.TestingHelpers;
-using System.Linq;
-using System.Threading.Tasks;
-using API.Data;
-using API.Entities;
-using API.Entities.Enums;
-using API.Helpers;
-using API.Services;
-using AutoMapper;
-using Microsoft.Data.Sqlite;
-using Microsoft.EntityFrameworkCore;
-using Microsoft.EntityFrameworkCore.Infrastructure;
-using Microsoft.Extensions.Logging;
-using NSubstitute;
-
-namespace API.Tests;
-
-public abstract class BasicTest
-{
- private readonly DbConnection _connection;
- protected readonly DataContext _context;
- protected readonly IUnitOfWork _unitOfWork;
-
-
- protected const string CacheDirectory = "C:/kavita/config/cache/";
- protected const string CoverImageDirectory = "C:/kavita/config/covers/";
- protected const string BackupDirectory = "C:/kavita/config/backups/";
- protected const string LogDirectory = "C:/kavita/config/logs/";
- protected const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
- protected const string TempDirectory = "C:/kavita/config/temp/";
-
- protected BasicTest()
- {
- var contextOptions = new DbContextOptionsBuilder()
- .UseSqlite(CreateInMemoryDatabase())
- .Options;
- _connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
-
- _context = new DataContext(contextOptions);
- Task.Run(SeedDb).GetAwaiter().GetResult();
-
- var config = new MapperConfiguration(cfg => cfg.AddProfile());
- var mapper = config.CreateMapper();
-
- _unitOfWork = new UnitOfWork(_context, mapper, null);
- }
-
- private static DbConnection CreateInMemoryDatabase()
- {
- var connection = new SqliteConnection("Filename=:memory:");
-
- connection.Open();
-
- return connection;
- }
-
- private async Task SeedDb()
- {
- await _context.Database.MigrateAsync();
- var filesystem = CreateFileSystem();
-
- await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem));
-
- var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
- setting.Value = CacheDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
- setting.Value = BackupDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
- setting.Value = BookmarkDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
- setting.Value = "10";
-
- _context.ServerSetting.Update(setting);
-
- _context.Library.Add(new Library()
- {
- Name = "Manga",
- Folders = new List()
- {
- new FolderPath()
- {
- Path = "C:/data/"
- }
- }
- });
- return await _context.SaveChangesAsync() > 0;
- }
-
- protected async Task ResetDb()
- {
- _context.Series.RemoveRange(_context.Series.ToList());
- _context.Users.RemoveRange(_context.Users.ToList());
- _context.AppUserBookmark.RemoveRange(_context.AppUserBookmark.ToList());
-
- await _context.SaveChangesAsync();
- }
-
- protected static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory(LogDirectory);
- fileSystem.AddDirectory(TempDirectory);
- fileSystem.AddDirectory("C:/data/");
-
- return fileSystem;
- }
-}
diff --git a/API.Tests/Comparers/ChapterSortComparerTest.cs b/API.Tests/Comparers/ChapterSortComparerTest.cs
index 220be052d..39a68b3b0 100644
--- a/API.Tests/Comparers/ChapterSortComparerTest.cs
+++ b/API.Tests/Comparers/ChapterSortComparerTest.cs
@@ -4,15 +4,16 @@ using Xunit;
namespace API.Tests.Comparers;
-public class ChapterSortComparerTest
+public class ChapterSortComparerDefaultLastTest
{
[Theory]
- [InlineData(new[] {1, 2, 0}, new[] {1, 2, 0})]
+ [InlineData(new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
- [InlineData(new[] {1, 0, 0}, new[] {1, 0, 0})]
+ [InlineData(new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
+ [InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
public void ChapterSortTest(int[] input, int[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparer()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultLast()).ToArray());
}
}
diff --git a/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs b/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs
index df3934884..fbae46b59 100644
--- a/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs
+++ b/API.Tests/Comparers/ChapterSortComparerZeroFirstTests.cs
@@ -4,7 +4,7 @@ using Xunit;
namespace API.Tests.Comparers;
-public class ChapterSortComparerZeroFirstTests
+public class ChapterSortComparerDefaultFirstTests
{
[Theory]
[InlineData(new[] {1, 2, 0}, new[] {0, 1, 2,})]
@@ -12,13 +12,13 @@ public class ChapterSortComparerZeroFirstTests
[InlineData(new[] {1, 0, 0}, new[] {0, 0, 1})]
public void ChapterSortComparerZeroFirstTest(int[] input, int[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
}
[Theory]
- [InlineData(new[] {1.0, 0.5, 0.3}, new[] {0.3, 0.5, 1.0})]
- public void ChapterSortComparerZeroFirstTest_Doubles(double[] input, double[] expected)
+ [InlineData(new [] {1.0f, 0.5f, 0.3f}, new [] {0.3f, 0.5f, 1.0f})]
+ public void ChapterSortComparerZeroFirstTest_Doubles(float[] input, float[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
}
}
diff --git a/API.Tests/Comparers/SortComparerZeroLastTests.cs b/API.Tests/Comparers/SortComparerZeroLastTests.cs
index 37699d110..9a0722984 100644
--- a/API.Tests/Comparers/SortComparerZeroLastTests.cs
+++ b/API.Tests/Comparers/SortComparerZeroLastTests.cs
@@ -7,11 +7,11 @@ namespace API.Tests.Comparers;
public class SortComparerZeroLastTests
{
[Theory]
- [InlineData(new[] {0, 1, 2,}, new[] {1, 2, 0})]
+ [InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1, 2,}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
- [InlineData(new[] {0, 0, 1}, new[] {1, 0, 0})]
+ [InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
public void SortComparerZeroLastTest(int[] input, int[] expected)
{
- Assert.Equal(expected, input.OrderBy(f => f, new SortComparerZeroLast()).ToArray());
+ Assert.Equal(expected, input.OrderBy(f => f, ChapterSortComparerDefaultLast.Default).ToArray());
}
}
diff --git a/API.Tests/Converters/CronConverterTests.cs b/API.Tests/Converters/CronConverterTests.cs
index 4d26edef7..5568c89d0 100644
--- a/API.Tests/Converters/CronConverterTests.cs
+++ b/API.Tests/Converters/CronConverterTests.cs
@@ -2,16 +2,18 @@
using Xunit;
namespace API.Tests.Converters;
-
+#nullable enable
public class CronConverterTests
{
[Theory]
[InlineData("daily", "0 0 * * *")]
[InlineData("disabled", "0 0 31 2 *")]
[InlineData("weekly", "0 0 * * 1")]
- [InlineData("", "0 0 31 2 *")]
- [InlineData("sdfgdf", "")]
- public void ConvertTest(string input, string expected)
+ [InlineData("0 0 31 2 *", "0 0 31 2 *")]
+ [InlineData("sdfgdf", "sdfgdf")]
+ [InlineData("* * * * *", "* * * * *")]
+ [InlineData(null, "0 0 * * *")] // daily
+ public void ConvertTest(string? input, string expected)
{
Assert.Equal(expected, CronConverter.ConvertToCronNotation(input));
}
diff --git a/API.Tests/Data/AesopsFables.epub b/API.Tests/Data/AesopsFables.epub
new file mode 100644
index 000000000..d2ab9a8b2
Binary files /dev/null and b/API.Tests/Data/AesopsFables.epub differ
diff --git a/API.Tests/Entities/ComicInfoTests.cs b/API.Tests/Entities/ComicInfoTests.cs
index ea8b0187d..783248a3b 100644
--- a/API.Tests/Entities/ComicInfoTests.cs
+++ b/API.Tests/Entities/ComicInfoTests.cs
@@ -36,7 +36,6 @@ public class ComicInfoTests
}
#endregion
-
#region CalculatedCount
[Fact]
diff --git a/API.Tests/Entities/SeriesTest.cs b/API.Tests/Entities/SeriesTest.cs
deleted file mode 100644
index 0b49bd3dd..000000000
--- a/API.Tests/Entities/SeriesTest.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-using API.Data;
-using Xunit;
-
-namespace API.Tests.Entities;
-
-///
-/// Tests for
-///
-public class SeriesTest
-{
- [Theory]
- [InlineData("Darker than Black")]
- public void CreateSeries(string name)
- {
- var key = API.Services.Tasks.Scanner.Parser.Parser.Normalize(name);
- var series = DbFactory.Series(name);
- Assert.Equal(0, series.Id);
- Assert.Equal(0, series.Pages);
- Assert.Equal(name, series.Name);
- Assert.Null(series.CoverImage);
- Assert.Equal(name, series.LocalizedName);
- Assert.Equal(name, series.SortName);
- Assert.Equal(name, series.OriginalName);
- Assert.Equal(key, series.NormalizedName);
- }
-}
diff --git a/API.Tests/Extensions/ChapterListExtensionsTests.cs b/API.Tests/Extensions/ChapterListExtensionsTests.cs
index f6ea62408..f19a0cede 100644
--- a/API.Tests/Extensions/ChapterListExtensionsTests.cs
+++ b/API.Tests/Extensions/ChapterListExtensionsTests.cs
@@ -4,7 +4,8 @@ using System.Linq;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
-using API.Parser;
+using API.Helpers.Builders;
+using API.Services.Tasks.Scanner.Parser;
using Xunit;
namespace API.Tests.Extensions;
@@ -13,22 +14,15 @@ public class ChapterListExtensionsTests
{
private static Chapter CreateChapter(string range, string number, MangaFile file, bool isSpecial)
{
- return new Chapter()
- {
- Range = range,
- Number = number,
- Files = new List() {file},
- IsSpecial = isSpecial
- };
+ return new ChapterBuilder(number, range)
+ .WithIsSpecial(isSpecial)
+ .WithFile(file)
+ .Build();
}
private static MangaFile CreateFile(string file, MangaFormat format)
{
- return new MangaFile()
- {
- FilePath = file,
- Format = format
- };
+ return new MangaFileBuilder(file, format).Build();
}
[Fact]
@@ -36,7 +30,7 @@ public class ChapterListExtensionsTests
{
var info = new ParserInfo()
{
- Chapters = "0",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
@@ -44,12 +38,12 @@ public class ChapterListExtensionsTests
IsSpecial = false,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
};
var chapterList = new List()
{
- CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
+ CreateChapter("darker than black - Some special", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
};
var actualChapter = chapterList.GetChapterByRange(info);
@@ -63,7 +57,7 @@ public class ChapterListExtensionsTests
{
var info = new ParserInfo()
{
- Chapters = "0",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
@@ -71,12 +65,12 @@ public class ChapterListExtensionsTests
IsSpecial = true,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
};
var chapterList = new List()
{
- CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
+ CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
};
var actualChapter = chapterList.GetChapterByRange(info);
@@ -89,7 +83,7 @@ public class ChapterListExtensionsTests
{
var info = new ParserInfo()
{
- Chapters = "0",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/detective comics #001.cbz",
@@ -97,13 +91,39 @@ public class ChapterListExtensionsTests
IsSpecial = true,
Series = "detective comics",
Title = "detective comics",
- Volumes = "0"
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
};
var chapterList = new List()
{
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ };
+
+ var actualChapter = chapterList.GetChapterByRange(info);
+
+ Assert.Equal(chapterList[0], actualChapter);
+ }
+
+ [Fact]
+ public void GetChapterByRange_On_FilenameChange_ShouldGetChapter()
+ {
+ var info = new ParserInfo()
+ {
+ Chapters = "1",
+ Edition = "",
+ Format = MangaFormat.Archive,
+ FullFilePath = "/manga/detective comics #001.cbz",
+ Filename = "detective comics #001.cbz",
+ IsSpecial = false,
+ Series = "detective comics",
+ Title = "detective comics",
+ Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
+ };
+
+ var chapterList = new List()
+ {
+ CreateChapter("1", "1", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), false),
};
var actualChapter = chapterList.GetChapterByRange(info);
@@ -118,11 +138,11 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
+ CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
};
- Assert.Equal(chapterList.First(), chapterList.GetFirstChapterWithFiles());
+ Assert.Equal(chapterList[0], chapterList.GetFirstChapterWithFiles());
}
[Fact]
@@ -130,13 +150,13 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
+ CreateChapter("darker than black", Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
};
- chapterList.First().Files = new List();
+ chapterList[0].Files = new List();
- Assert.Equal(chapterList.Last(), chapterList.GetFirstChapterWithFiles());
+ Assert.Equal(chapterList[^1], chapterList.GetFirstChapterWithFiles());
}
@@ -157,11 +177,11 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
};
- chapterList[0].ReleaseDate = new DateTime(10, 1, 1);
+ chapterList[0].ReleaseDate = new DateTime(10, 1, 1, 0, 0, 0, DateTimeKind.Utc);
chapterList[1].ReleaseDate = DateTime.MinValue;
Assert.Equal(0, chapterList.MinimumReleaseYear());
@@ -172,12 +192,12 @@ public class ChapterListExtensionsTests
{
var chapterList = new List()
{
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
- CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
+ CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
};
- chapterList[0].ReleaseDate = new DateTime(2002, 1, 1);
- chapterList[1].ReleaseDate = new DateTime(2012, 2, 1);
+ chapterList[0].ReleaseDate = new DateTime(2002, 1, 1, 0, 0, 0, DateTimeKind.Utc);
+ chapterList[1].ReleaseDate = new DateTime(2012, 2, 1, 0, 0, 0, DateTimeKind.Utc);
Assert.Equal(2002, chapterList.MinimumReleaseYear());
}
diff --git a/API.Tests/Extensions/EncodeFormatExtensionsTests.cs b/API.Tests/Extensions/EncodeFormatExtensionsTests.cs
new file mode 100644
index 000000000..a02de84aa
--- /dev/null
+++ b/API.Tests/Extensions/EncodeFormatExtensionsTests.cs
@@ -0,0 +1,31 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using API.Entities.Enums;
+using API.Extensions;
+using Xunit;
+
+namespace API.Tests.Extensions;
+
+public class EncodeFormatExtensionsTests
+{
+ [Fact]
+ public void GetExtension_ShouldReturnCorrectExtensionForAllValues()
+ {
+ // Arrange
+ var expectedExtensions = new Dictionary
+ {
+ { EncodeFormat.PNG, ".png" },
+ { EncodeFormat.WEBP, ".webp" },
+ { EncodeFormat.AVIF, ".avif" }
+ };
+
+ // Act & Assert
+ foreach (var format in Enum.GetValues(typeof(EncodeFormat)).Cast())
+ {
+ var extension = format.GetExtension();
+ Assert.Equal(expectedExtensions[format], extension);
+ }
+ }
+
+}
diff --git a/API.Tests/Extensions/EnumerableExtensionsTests.cs b/API.Tests/Extensions/EnumerableExtensionsTests.cs
index e115d45f3..bdd3433ae 100644
--- a/API.Tests/Extensions/EnumerableExtensionsTests.cs
+++ b/API.Tests/Extensions/EnumerableExtensionsTests.cs
@@ -74,10 +74,10 @@ public class EnumerableExtensionsTests
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"},
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"}
)]
- [InlineData(
- new[] {"01/001.jpg", "001.jpg"},
- new[] {"001.jpg", "01/001.jpg"}
- )]
+ [InlineData(
+ new[] {"01/001.jpg", "001.jpg"},
+ new[] {"001.jpg", "01/001.jpg"}
+ )]
public void TestNaturalSort(string[] input, string[] expected)
{
Assert.Equal(expected, input.OrderByNatural(x => x).ToArray());
diff --git a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs
index b6a5ca362..227dd2b32 100644
--- a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs
+++ b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs
@@ -1,12 +1,12 @@
using System.Collections.Generic;
+using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using API.Entities.Enums;
using API.Extensions;
-using API.Parser;
+using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
-using API.Tests.Helpers;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
@@ -18,22 +18,21 @@ public class ParserInfoListExtensions
private readonly IDefaultParser _defaultParser;
public ParserInfoListExtensions()
{
- _defaultParser =
- new DefaultParser(new DirectoryService(Substitute.For>(),
- new MockFileSystem()));
+ var ds = new DirectoryService(Substitute.For>(), new MockFileSystem());
+ _defaultParser = new BasicParser(ds, new ImageParser(ds));
}
[Theory]
[InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})]
public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers)
{
- var infos = volumeNumbers.Select(n => new ParserInfo() {Volumes = n}).ToList();
+ var infos = volumeNumbers.Select(n => new ParserInfo() {Series = "", Volumes = n}).ToList();
Assert.Equal(expectedNumbers, infos.DistinctVolumes());
}
[Theory]
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
- [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
+ [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
[InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo)
{
@@ -41,13 +40,37 @@ public class ParserInfoListExtensions
foreach (var filename in inputInfos)
{
infos.Add(_defaultParser.Parse(
- filename,
- string.Empty));
+ Path.Join("E:/Manga/Cynthia the Mission/", filename),
+ "E:/Manga/", "E:/Manga/", LibraryType.Manga));
}
- var files = inputChapters.Select(s => EntityFactory.CreateMangaFile(s, MangaFormat.Archive, 199)).ToList();
- var chapter = EntityFactory.CreateChapter("0-6", false, files);
+ var files = inputChapters.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build()).ToList();
+ var chapter = new ChapterBuilder("0-6")
+ .WithFiles(files)
+ .Build();
Assert.Equal(expectedHasInfo, infos.HasInfo(chapter));
}
+
+ [Fact]
+ public void HasInfoTest_SuccessWhenSpecial()
+ {
+ var infos = new[]
+ {
+ _defaultParser.Parse(
+ "E:/Manga/Cynthia the Mission/Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip",
+ "E:/Manga/", "E:/Manga/", LibraryType.Manga)
+ };
+
+ var files = new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip"}
+ .Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build())
+ .ToList();
+ var chapter = new ChapterBuilder("Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip")
+ .WithRange("Cynthia The Mission The Special SP01 [Desudesu&Brolen]")
+ .WithFiles(files)
+ .WithIsSpecial(true)
+ .Build();
+
+ Assert.True(infos.HasInfo(chapter));
+ }
}
diff --git a/API.Tests/Extensions/QueryableExtensionsTests.cs b/API.Tests/Extensions/QueryableExtensionsTests.cs
index ee1ada416..96d74b46d 100644
--- a/API.Tests/Extensions/QueryableExtensionsTests.cs
+++ b/API.Tests/Extensions/QueryableExtensionsTests.cs
@@ -3,8 +3,9 @@ using System.Linq;
using API.Data.Misc;
using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
-using API.Extensions;
+using API.Entities.Person;
+using API.Extensions.QueryExtensions;
+using API.Helpers.Builders;
using Xunit;
namespace API.Tests.Extensions;
@@ -18,27 +19,15 @@ public class QueryableExtensionsTests
{
var items = new List()
{
- new Series()
- {
- Metadata = new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- },
- new Series()
- {
- Metadata = new SeriesMetadata()
- {
- AgeRating = AgeRating.Unknown,
- }
- },
- new Series()
- {
- Metadata = new SeriesMetadata()
- {
- AgeRating = AgeRating.X18Plus,
- }
- },
+ new SeriesBuilder("Test 1")
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ .Build(),
+ new SeriesBuilder("Test 2")
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
+ .Build(),
+ new SeriesBuilder("Test 3")
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
+ .Build()
};
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
@@ -54,42 +43,18 @@ public class QueryableExtensionsTests
[InlineData(false, 1)]
public void RestrictAgainstAgeRestriction_CollectionTag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
{
- var items = new List()
+ var items = new List()
{
- new CollectionTag()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new CollectionTag()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Unknown,
- },
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new CollectionTag()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.X18Plus,
- }
- }
- },
+ new AppUserCollectionBuilder("Test")
+ .WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
+ .Build(),
+ new AppUserCollectionBuilder("Test 2")
+ .WithItem(new SeriesBuilder("S2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build()).Build())
+ .WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
+ .Build(),
+ new AppUserCollectionBuilder("Test 3")
+ .WithItem(new SeriesBuilder("S3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build()).Build())
+ .Build(),
};
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
@@ -102,45 +67,21 @@ public class QueryableExtensionsTests
[Theory]
[InlineData(true, 2)]
- [InlineData(false, 1)]
+ [InlineData(false, 2)]
public void RestrictAgainstAgeRestriction_Genre_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
{
var items = new List()
{
- new Genre()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new Genre()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Unknown,
- },
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new Genre()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.X18Plus,
- }
- }
- },
+ new GenreBuilder("A")
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ .Build(),
+ new GenreBuilder("B")
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ .Build(),
+ new GenreBuilder("C")
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
+ .Build(),
};
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
@@ -153,45 +94,21 @@ public class QueryableExtensionsTests
[Theory]
[InlineData(true, 2)]
- [InlineData(false, 1)]
+ [InlineData(false, 2)]
public void RestrictAgainstAgeRestriction_Tag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
{
var items = new List()
{
- new Tag()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new Tag()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Unknown,
- },
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new Tag()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.X18Plus,
- }
- }
- },
+ new TagBuilder("Test 1")
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ .Build(),
+ new TagBuilder("Test 2")
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
+ .Build(),
+ new TagBuilder("Test 3")
+ .WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
+ .Build(),
};
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
@@ -204,53 +121,46 @@ public class QueryableExtensionsTests
[Theory]
[InlineData(true, 2)]
- [InlineData(false, 1)]
- public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
+ [InlineData(false, 2)]
+ public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedPeopleCount)
{
- var items = new List()
+ // Arrange
+ var items = new List
{
- new Person()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new Person()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Unknown,
- },
- new SeriesMetadata()
- {
- AgeRating = AgeRating.Teen,
- }
- }
- },
- new Person()
- {
- SeriesMetadatas = new List()
- {
- new SeriesMetadata()
- {
- AgeRating = AgeRating.X18Plus,
- }
- }
- },
+ CreatePersonWithSeriesMetadata("Test1", AgeRating.Teen),
+ CreatePersonWithSeriesMetadata("Test2", AgeRating.Unknown, AgeRating.Teen), // 2 series on this person, restrict will still allow access
+ CreatePersonWithSeriesMetadata("Test3", AgeRating.X18Plus)
};
- var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
+ var ageRestriction = new AgeRestriction
{
AgeRating = AgeRating.Teen,
IncludeUnknowns = includeUnknowns
- });
- Assert.Equal(expectedCount, filtered.Count());
+ };
+
+ // Act
+ var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(ageRestriction);
+
+ // Assert
+ Assert.Equal(expectedPeopleCount, filtered.Count());
+ }
+
+ private static Person CreatePersonWithSeriesMetadata(string name, params AgeRating[] ageRatings)
+ {
+ var person = new PersonBuilder(name).Build();
+
+ foreach (var ageRating in ageRatings)
+ {
+ var seriesMetadata = new SeriesMetadataBuilder().WithAgeRating(ageRating).Build();
+ person.SeriesMetadataPeople.Add(new SeriesMetadataPeople
+ {
+ SeriesMetadata = seriesMetadata,
+ Person = person,
+ Role = PersonRole.Character // Role is now part of the relationship
+ });
+ }
+
+ return person;
}
[Theory]
@@ -258,20 +168,12 @@ public class QueryableExtensionsTests
[InlineData(false, 1)]
public void RestrictAgainstAgeRestriction_ReadingList_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
{
+
var items = new List()
{
- new ReadingList()
- {
- AgeRating = AgeRating.Teen,
- },
- new ReadingList()
- {
- AgeRating = AgeRating.Unknown,
- },
- new ReadingList()
- {
- AgeRating = AgeRating.X18Plus
- },
+ new ReadingListBuilder("Test List").WithRating(AgeRating.Teen).Build(),
+ new ReadingListBuilder("Test List").WithRating(AgeRating.Unknown).Build(),
+ new ReadingListBuilder("Test List").WithRating(AgeRating.X18Plus).Build(),
};
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
diff --git a/API.Tests/Extensions/SeriesExtensionsTests.cs b/API.Tests/Extensions/SeriesExtensionsTests.cs
index f8dce8876..adaecfba5 100644
--- a/API.Tests/Extensions/SeriesExtensionsTests.cs
+++ b/API.Tests/Extensions/SeriesExtensionsTests.cs
@@ -1,368 +1,503 @@
-using System.Collections.Generic;
-using System.Linq;
+using System.Linq;
using API.Comparators;
-using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
using API.Extensions;
-using API.Parser;
-using API.Services.Tasks.Scanner;
+using API.Helpers.Builders;
+using API.Services.Tasks.Scanner.Parser;
using Xunit;
namespace API.Tests.Extensions;
public class SeriesExtensionsTests
{
- [Theory]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)]
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, true)]
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, true)]
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, true)]
- // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, true)]
- [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, true)]
- public void NameInListTest(string[] seriesInput, string[] list, bool expected)
- {
- var series = new Series()
- {
- Name = seriesInput[0],
- LocalizedName = seriesInput[1],
- OriginalName = seriesInput[2],
- NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]),
- Metadata = new SeriesMetadata()
- };
-
- Assert.Equal(expected, series.NameInList(list));
- }
-
- [Theory]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, MangaFormat.Archive, true)]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, MangaFormat.Archive, true)]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, MangaFormat.Archive, false)]
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, MangaFormat.Archive, true)]
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, MangaFormat.Archive, true)]
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, MangaFormat.Archive, true)]
- // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works
- [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, MangaFormat.Archive, true)]
- [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, MangaFormat.Archive, true)]
- public void NameInListParserInfoTest(string[] seriesInput, string[] list, MangaFormat format, bool expected)
- {
- var series = new Series()
- {
- Name = seriesInput[0],
- LocalizedName = seriesInput[1],
- OriginalName = seriesInput[2],
- NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]),
- Metadata = new SeriesMetadata(),
- };
-
- var parserInfos = list.Select(s => new ParsedSeries()
- {
- Name = s,
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(s),
- }).ToList();
-
- // This doesn't do any checks against format
- Assert.Equal(expected, series.NameInList(parserInfos));
- }
-
-
- [Theory]
- [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, "Darker than Black", true)]
- [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Kanojo, Okarishimasu", true)]
- [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Rent", false)]
- public void NameInParserInfoTest(string[] seriesInput, string parserSeries, bool expected)
- {
- var series = new Series()
- {
- Name = seriesInput[0],
- LocalizedName = seriesInput[1],
- OriginalName = seriesInput[2],
- NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]),
- Metadata = new SeriesMetadata()
- };
- var info = new ParserInfo
- {
- Series = parserSeries
- };
-
- Assert.Equal(expected, series.NameInParserInfo(info));
- }
-
[Fact]
- public void GetCoverImage_MultipleSpecials_Comics()
+ public void GetCoverImage_MultipleSpecials()
{
- var series = new Series()
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Special 1")
+ .WithIsSpecial(true)
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Special 2")
+ .WithIsSpecial(true)
+ .WithSortOrder(Parser.SpecialVolumeNumber + 2)
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
{
- Format = MangaFormat.Archive,
- Volumes = new List()
- {
- new Volume()
- {
- Number = 0,
- Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 1",
- },
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 2",
- }
- },
- }
- }
- };
-
- Assert.Equal("Special 1", series.GetCoverImage());
-
- }
-
- [Fact]
- public void GetCoverImage_MultipleSpecials_Books()
- {
- var series = new Series()
- {
- Format = MangaFormat.Epub,
- Volumes = new List()
- {
- new Volume()
- {
- Number = 0,
- Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 1",
- },
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 2",
- }
- },
- }
- }
- };
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
Assert.Equal("Special 1", series.GetCoverImage());
}
[Fact]
- public void GetCoverImage_JustChapters_Comics()
+ public void GetCoverImage_Volume1Chapter1_Volume2_AndLooseChapters()
{
- var series = new Series()
- {
- Format = MangaFormat.Archive,
- Volumes = new List()
- {
- new Volume()
- {
- Number = 0,
- Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = false,
- Number = "2.5",
- CoverImage = "Special 1",
- },
- new Chapter()
- {
- IsSpecial = false,
- Number = "2",
- CoverImage = "Special 2",
- }
- },
- }
- }
- };
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("13")
+ .WithCoverImage("Chapter 13")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("1")
+ .WithName("Volume 1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithCoverImage("Volume 1 Chapter 1")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("2")
+ .WithName("Volume 2")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 2")
+ .Build())
+ .Build())
+ .Build();
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
- Assert.Equal("Special 2", series.GetCoverImage());
+ Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
}
[Fact]
- public void GetCoverImage_JustChaptersAndSpecials_Comics()
+ public void GetCoverImage_LooseChapters_WithSub1_Chapter()
{
- var series = new Series()
- {
- Format = MangaFormat.Archive,
- Volumes = new List()
- {
- new Volume()
- {
- Number = 0,
- Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = false,
- Number = "2.5",
- CoverImage = "Special 1",
- },
- new Chapter()
- {
- IsSpecial = false,
- Number = "2",
- CoverImage = "Special 2",
- },
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 3",
- }
- },
- }
- }
- };
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("-1")
+ .WithCoverImage("Chapter -1")
+ .Build())
+ .WithChapter(new ChapterBuilder("0.5")
+ .WithCoverImage("Chapter 0.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .WithChapter(new ChapterBuilder("3")
+ .WithCoverImage("Chapter 3")
+ .Build())
+ .WithChapter(new ChapterBuilder("4AU")
+ .WithCoverImage("Chapter 4AU")
+ .Build())
+ .Build())
- foreach (var vol in series.Volumes)
- {
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage;
- }
+ .Build();
- Assert.Equal("Special 2", series.GetCoverImage());
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
+ ///
+ /// Checks the case where there are specials and loose leafs, loose leaf chapters should be preferred
+ ///
+ [Fact]
+ public void GetCoverImage_LooseChapters_WithSub1_Chapter_WithSpecials()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithName(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder("I am a Special")
+ .WithCoverImage("I am a Special")
+ .Build())
+ .WithChapter(new ChapterBuilder("I am a Special 2")
+ .WithCoverImage("I am a Special 2")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("0.5")
+ .WithCoverImage("Chapter 0.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .Build())
+
+ .Build();
+
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
}
[Fact]
- public void GetCoverImage_VolumesChapters_Comics()
+ public void GetCoverImage_JustVolumes()
{
- var series = new Series()
- {
- Format = MangaFormat.Archive,
- Volumes = new List()
- {
- new Volume()
- {
- Number = 0,
- Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = false,
- Number = "2.5",
- CoverImage = "Special 1",
- },
- new Chapter()
- {
- IsSpecial = false,
- Number = "2",
- CoverImage = "Special 2",
- },
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 3",
- }
- },
- },
- new Volume()
- {
- Number = 1,
- Name = "1",
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = false,
- Number = "0",
- CoverImage = "Volume 1",
- },
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
- },
- }
- }
- };
+ .WithVolume(new VolumeBuilder("1")
+ .WithName("Volume 1")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 1 Chapter 1")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("2")
+ .WithName("Volume 2")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 2")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("3")
+ .WithName("Volume 3")
+ .WithChapter(new ChapterBuilder("10")
+ .WithCoverImage("Volume 3 Chapter 10")
+ .Build())
+ .WithChapter(new ChapterBuilder("11")
+ .WithCoverImage("Volume 3 Chapter 11")
+ .Build())
+ .WithChapter(new ChapterBuilder("12")
+ .WithCoverImage("Volume 3 Chapter 12")
+ .Build())
+ .Build())
+ .Build();
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
+ }
+
+ [Fact]
+ public void GetCoverImage_JustVolumes_ButVolume0()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+
+ .WithVolume(new VolumeBuilder("0")
+ .WithName("Volume 0")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 0")
+ .Build())
+ .Build())
+
+ .WithVolume(new VolumeBuilder("1")
+ .WithName("Volume 1")
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithCoverImage("Volume 1")
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1", series.GetCoverImage());
}
[Fact]
- public void GetCoverImage_VolumesChaptersAndSpecials_Comics()
+ public void GetCoverImage_JustSpecials_WithDecimal()
{
- var series = new Series()
- {
- Format = MangaFormat.Archive,
- Volumes = new List()
- {
- new Volume()
- {
- Number = 0,
- Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = false,
- Number = "2.5",
- CoverImage = "Special 1",
- },
- new Chapter()
- {
- IsSpecial = false,
- Number = "2",
- CoverImage = "Special 2",
- },
- new Chapter()
- {
- IsSpecial = true,
- Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
- CoverImage = "Special 3",
- }
- },
- },
- new Volume()
- {
- Number = 1,
- Name = "1",
- Chapters = new List()
- {
- new Chapter()
- {
- IsSpecial = false,
- Number = "0",
- CoverImage = "Volume 1",
- },
-
- },
- }
- }
- };
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("2.5")
+ .WithIsSpecial(false)
+ .WithCoverImage("Special 1")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithIsSpecial(false)
+ .WithCoverImage("Special 2")
+ .Build())
+ .Build())
+ .Build();
foreach (var vol in series.Volumes)
{
- vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage;
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Special 2", series.GetCoverImage());
+ }
+
+ [Fact]
+ public void GetCoverImage_JustChaptersAndSpecials()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("2.5")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithCoverImage("Special 1")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Chapter 2", series.GetCoverImage());
+ }
+
+ [Fact]
+ public void GetCoverImage_VolumesChapters()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("2.5")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithCoverImage("Special 3")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 1")
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1", series.GetCoverImage());
}
+ [Fact]
+ public void GetCoverImage_VolumesChaptersAndSpecials()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("2.5")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithCoverImage("Special 1")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 1")
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Volume 1", series.GetCoverImage());
+ }
+
+ [Fact]
+ public void GetCoverImage_VolumesChaptersAndSpecials_Ippo()
+ {
+ var series = new SeriesBuilder("Ippo")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1426")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 1426")
+ .Build())
+ .WithChapter(new ChapterBuilder("1425")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 1425")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithCoverImage("Special 3")
+ .WithSortOrder(Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 1")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("137")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 137")
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Volume 1", series.GetCoverImage());
+ }
+
+ [Fact]
+ public void GetCoverImage_VolumesChapters_WhereVolumeIsNot1()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("2.5")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2.5")
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 2")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder("4")
+ .WithMinNumber(4)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 4")
+ .Build())
+ .Build())
+ .Build();
+
+ foreach (var vol in series.Volumes)
+ {
+ vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
+ }
+
+ Assert.Equal("Chapter 2", series.GetCoverImage());
+ }
+
+ ///
+ /// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
+ ///
+ [Fact]
+ public void GetCoverImage_LessThanIssue1()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("0")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 0")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithMinNumber(4)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 4")
+ .Build())
+ .Build())
+ .Build();
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
+ ///
+ /// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
+ ///
+ [Fact]
+ public void GetCoverImage_LessThanIssue1_WithNegative()
+ {
+ var series = new SeriesBuilder("Test 1")
+ .WithFormat(MangaFormat.Archive)
+ .WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
+ .WithName(Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("-1")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter -1")
+ .Build())
+ .WithChapter(new ChapterBuilder("0")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 0")
+ .Build())
+ .WithChapter(new ChapterBuilder("1")
+ .WithIsSpecial(false)
+ .WithCoverImage("Chapter 1")
+ .Build())
+ .Build())
+ .WithVolume(new VolumeBuilder(Parser.SpecialVolume)
+ .WithMinNumber(4)
+ .WithChapter(new ChapterBuilder(Parser.DefaultChapter)
+ .WithIsSpecial(false)
+ .WithCoverImage("Volume 4")
+ .Build())
+ .Build())
+ .Build();
+
+ Assert.Equal("Chapter 1", series.GetCoverImage());
+ }
+
}
diff --git a/API.Tests/Extensions/SeriesFilterTests.cs b/API.Tests/Extensions/SeriesFilterTests.cs
new file mode 100644
index 000000000..ba42be8a1
--- /dev/null
+++ b/API.Tests/Extensions/SeriesFilterTests.cs
@@ -0,0 +1,1338 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using API.DTOs;
+using API.DTOs.Filtering.v2;
+using API.DTOs.Progress;
+using API.Entities;
+using API.Entities.Enums;
+using API.Extensions.QueryExtensions.Filtering;
+using API.Helpers.Builders;
+using API.Services;
+using API.Services.Plus;
+using API.SignalR;
+using Kavita.Common;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Extensions;
+
+public class SeriesFilterTests : AbstractDbTest
+{
+ protected override async Task ResetDb()
+ {
+ Context.Series.RemoveRange(Context.Series);
+ Context.AppUser.RemoveRange(Context.AppUser);
+ await Context.SaveChangesAsync();
+ }
+
+ #region HasProgress
+
+ private async Task SetupHasProgress()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("None").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Partial").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Full").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+
+ // Create read progress on Partial and Full
+ var readerService = new ReaderService(UnitOfWork, Substitute.For>(),
+ Substitute.For(), Substitute.For(),
+ Substitute.For(), Substitute.For());
+
+ // Select Partial and set pages read to 5 on first chapter
+ var partialSeries = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(2);
+ var partialChapter = partialSeries.Volumes.First().Chapters.First();
+
+ Assert.True(await readerService.SaveReadingProgress(new ProgressDto()
+ {
+ ChapterId = partialChapter.Id,
+ LibraryId = 1,
+ SeriesId = partialSeries.Id,
+ PageNum = 5,
+ VolumeId = partialChapter.VolumeId
+ }, user.Id));
+
+ // Select Full and set pages read to 10 on first chapter
+ var fullSeries = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(3);
+ var fullChapter = fullSeries.Volumes.First().Chapters.First();
+
+ Assert.True(await readerService.SaveReadingProgress(new ProgressDto()
+ {
+ ChapterId = fullChapter.Id,
+ LibraryId = 1,
+ SeriesId = fullSeries.Id,
+ PageNum = 10,
+ VolumeId = fullChapter.VolumeId
+ }, user.Id));
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThan50_ShouldReturnSingle()
+ {
+ var user = await SetupHasProgress();
+
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.LessThan, 50, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("None", queryResult.First().Name);
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThanOrEqual50_ShouldReturnTwo()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress <= 50%
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.LessThanEqual, 50, user.Id)
+ .ToListAsync();
+
+ Assert.Equal(2, queryResult.Count);
+ Assert.Contains(queryResult, s => s.Name == "None");
+ Assert.Contains(queryResult, s => s.Name == "Partial");
+ }
+
+ [Fact]
+ public async Task HasProgress_GreaterThan50_ShouldReturnFull()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress > 50%
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.GreaterThan, 50, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("Full", queryResult.First().Name);
+ }
+
+ [Fact]
+ public async Task HasProgress_Equal100_ShouldReturnFull()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress == 100%
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.Equal, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("Full", queryResult.First().Name);
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThan100_ShouldReturnTwo()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress < 100%
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.LessThan, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Equal(2, queryResult.Count);
+ Assert.Contains(queryResult, s => s.Name == "None");
+ Assert.Contains(queryResult, s => s.Name == "Partial");
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThanOrEqual100_ShouldReturnAll()
+ {
+ var user = await SetupHasProgress();
+
+ // Query series with progress <= 100%
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.LessThanEqual, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Equal(3, queryResult.Count);
+ Assert.Contains(queryResult, s => s.Name == "None");
+ Assert.Contains(queryResult, s => s.Name == "Partial");
+ Assert.Contains(queryResult, s => s.Name == "Full");
+ }
+
+ [Fact]
+ public async Task HasProgress_LessThan100_WithProgress99_99_ShouldReturnSeries()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("AlmostFull").WithPages(100)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(100).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ var readerService = new ReaderService(UnitOfWork, Substitute.For>(),
+ Substitute.For(), Substitute.For(),
+ Substitute.For(), Substitute.For());
+
+ // Set progress to 99.99% (99/100 pages read)
+ var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
+ var chapter = series.Volumes.First().Chapters.First();
+
+ Assert.True(await readerService.SaveReadingProgress(new ProgressDto()
+ {
+ ChapterId = chapter.Id,
+ LibraryId = 1,
+ SeriesId = series.Id,
+ PageNum = 99,
+ VolumeId = chapter.VolumeId
+ }, user.Id));
+
+ // Query series with progress < 100%
+ var queryResult = await Context.Series.HasReadingProgress(true, FilterComparison.LessThan, 100, user.Id)
+ .ToListAsync();
+
+ Assert.Single(queryResult);
+ Assert.Equal("AlmostFull", queryResult.First().Name);
+ }
+ #endregion
+
+ #region HasLanguage
+
+ private async Task SetupHasLanguage()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("English").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithLanguage("en").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("French").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithLanguage("fr").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Spanish").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithLanguage("es").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasLanguage_Equal_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.Equal, ["en"]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("en", foundSeries[0].Metadata.Language);
+ }
+
+ [Fact]
+ public async Task HasLanguage_NotEqual_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.NotEqual, ["en"]).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.DoesNotContain(foundSeries, s => s.Metadata.Language == "en");
+ }
+
+ [Fact]
+ public async Task HasLanguage_Contains_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.Contains, ["en", "fr"]).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Metadata.Language == "en");
+ Assert.Contains(foundSeries, s => s.Metadata.Language == "fr");
+ }
+
+ [Fact]
+ public async Task HasLanguage_NotContains_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.NotContains, ["en", "fr"]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("es", foundSeries[0].Metadata.Language);
+ }
+
+ [Fact]
+ public async Task HasLanguage_MustContains_Works()
+ {
+ await SetupHasLanguage();
+
+ // Since "MustContains" matches all the provided languages, no series should match in this case.
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.MustContains, ["en", "fr"]).ToListAsync();
+ Assert.Empty(foundSeries);
+
+ // Single language should work.
+ foundSeries = await Context.Series.HasLanguage(true, FilterComparison.MustContains, ["en"]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("en", foundSeries[0].Metadata.Language);
+ }
+
+ [Fact]
+ public async Task HasLanguage_Matches_Works()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.Matches, ["e"]).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains("en", foundSeries.Select(s => s.Metadata.Language));
+ Assert.Contains("es", foundSeries.Select(s => s.Metadata.Language));
+ }
+
+ [Fact]
+ public async Task HasLanguage_DisabledCondition_ReturnsAll()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(false, FilterComparison.Equal, ["en"]).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasLanguage_EmptyLanguageList_ReturnsAll()
+ {
+ await SetupHasLanguage();
+
+ var foundSeries = await Context.Series.HasLanguage(true, FilterComparison.Equal, new List()).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasLanguage_UnsupportedComparison_ThrowsException()
+ {
+ await SetupHasLanguage();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasLanguage(true, FilterComparison.GreaterThan, ["en"]).ToListAsync();
+ });
+ }
+
+ #endregion
+
+ #region HasAverageRating
+
+ private async Task SetupHasAverageRating()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("None").WithPages(10)
+ .WithExternalMetadata(new ExternalSeriesMetadataBuilder().WithAverageExternalRating(-1).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Partial").WithPages(10)
+ .WithExternalMetadata(new ExternalSeriesMetadataBuilder().WithAverageExternalRating(50).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Full").WithPages(10)
+ .WithExternalMetadata(new ExternalSeriesMetadataBuilder().WithAverageExternalRating(100).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasAverageRating_Equal_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.Equal, 100).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("Full", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_GreaterThan_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.GreaterThan, 50).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("Full", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_GreaterThanEqual_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.GreaterThanEqual, 50).ToListAsync();
+ Assert.Equal(2, series.Count);
+ Assert.Contains(series, s => s.Name == "Partial");
+ Assert.Contains(series, s => s.Name == "Full");
+ }
+
+ [Fact]
+ public async Task HasAverageRating_LessThan_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.LessThan, 50).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("None", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_LessThanEqual_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.LessThanEqual, 50).ToListAsync();
+ Assert.Equal(2, series.Count);
+ Assert.Contains(series, s => s.Name == "None");
+ Assert.Contains(series, s => s.Name == "Partial");
+ }
+
+ [Fact]
+ public async Task HasAverageRating_NotEqual_Works()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.NotEqual, 100).ToListAsync();
+ Assert.Equal(2, series.Count);
+ Assert.DoesNotContain(series, s => s.Name == "Full");
+ }
+
+ [Fact]
+ public async Task HasAverageRating_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(false, FilterComparison.Equal, 100).ToListAsync();
+ Assert.Equal(3, series.Count);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_NotSet_IsHandled()
+ {
+ await SetupHasAverageRating();
+
+ var series = await Context.Series.HasAverageRating(true, FilterComparison.Equal, -1).ToListAsync();
+ Assert.Single(series);
+ Assert.Equal("None", series[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAverageRating_ThrowsForInvalidComparison()
+ {
+ await SetupHasAverageRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasAverageRating(true, FilterComparison.Contains, 50).ToListAsync();
+ });
+ }
+
+ [Fact]
+ public async Task HasAverageRating_ThrowsForOutOfRangeComparison()
+ {
+ await SetupHasAverageRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasAverageRating(true, (FilterComparison)999, 50).ToListAsync();
+ });
+ }
+
+ #endregion
+
+ # region HasPublicationStatus
+
+ private async Task SetupHasPublicationStatus()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Cancelled").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Cancelled).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("OnGoing").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.OnGoing).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Completed").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Completed).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_Equal_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await Context.Series.HasPublicationStatus(true, FilterComparison.Equal, new List { PublicationStatus.Cancelled }).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("Cancelled", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_Contains_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await Context.Series.HasPublicationStatus(true, FilterComparison.Contains, new List { PublicationStatus.Cancelled, PublicationStatus.Completed }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Cancelled");
+ Assert.Contains(foundSeries, s => s.Name == "Completed");
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_NotContains_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await Context.Series.HasPublicationStatus(true, FilterComparison.NotContains, new List { PublicationStatus.Cancelled }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "OnGoing");
+ Assert.Contains(foundSeries, s => s.Name == "Completed");
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_NotEqual_Works()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await Context.Series.HasPublicationStatus(true, FilterComparison.NotEqual, new List { PublicationStatus.OnGoing }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Cancelled");
+ Assert.Contains(foundSeries, s => s.Name == "Completed");
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await Context.Series.HasPublicationStatus(false, FilterComparison.Equal, new List { PublicationStatus.Cancelled }).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_EmptyPubStatuses_ReturnsAll()
+ {
+ await SetupHasPublicationStatus();
+
+ var foundSeries = await Context.Series.HasPublicationStatus(true, FilterComparison.Equal, new List()).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_ThrowsForInvalidComparison()
+ {
+ await SetupHasPublicationStatus();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasPublicationStatus(true, FilterComparison.BeginsWith, new List { PublicationStatus.Cancelled }).ToListAsync();
+ });
+ }
+
+ [Fact]
+ public async Task HasPublicationStatus_ThrowsForOutOfRangeComparison()
+ {
+ await SetupHasPublicationStatus();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasPublicationStatus(true, (FilterComparison)999, new List { PublicationStatus.Cancelled }).ToListAsync();
+ });
+ }
+ #endregion
+
+ #region HasAgeRating
+ private async Task SetupHasAgeRating()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Unknown").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("G").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Mature").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasAgeRating_Equal_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.Equal, [AgeRating.G]).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("G", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasAgeRating_Contains_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.Contains, new List { AgeRating.G, AgeRating.Mature }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_NotContains_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.NotContains, new List { AgeRating.Unknown }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_NotEqual_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.NotEqual, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Unknown");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_GreaterThan_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.GreaterThan, new List { AgeRating.Unknown }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_GreaterThanEqual_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.GreaterThanEqual, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ Assert.Contains(foundSeries, s => s.Name == "Mature");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_LessThan_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.LessThan, new List { AgeRating.Mature }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Unknown");
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_LessThanEqual_Works()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.LessThanEqual, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "Unknown");
+ Assert.Contains(foundSeries, s => s.Name == "G");
+ }
+
+ [Fact]
+ public async Task HasAgeRating_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(false, FilterComparison.Equal, new List { AgeRating.G }).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasAgeRating_EmptyRatings_ReturnsAll()
+ {
+ await SetupHasAgeRating();
+
+ var foundSeries = await Context.Series.HasAgeRating(true, FilterComparison.Equal, new List()).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasAgeRating_ThrowsForInvalidComparison()
+ {
+ await SetupHasAgeRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasAgeRating(true, FilterComparison.BeginsWith, new List { AgeRating.G }).ToListAsync();
+ });
+ }
+
+ [Fact]
+ public async Task HasAgeRating_ThrowsForOutOfRangeComparison()
+ {
+ await SetupHasAgeRating();
+
+ await Assert.ThrowsAsync(async () =>
+ {
+ await Context.Series.HasAgeRating(true, (FilterComparison)999, new List { AgeRating.G }).ToListAsync();
+ });
+ }
+
+ #endregion
+
+ #region HasReleaseYear
+
+ private async Task SetupHasReleaseYear()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("2000").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(2000).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("2020").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(2020).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("2025").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(2025).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_Equal_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.Equal, 2020).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("2020", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_GreaterThan_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.GreaterThan, 2000).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "2020");
+ Assert.Contains(foundSeries, s => s.Name == "2025");
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_LessThan_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.LessThan, 2025).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Contains(foundSeries, s => s.Name == "2000");
+ Assert.Contains(foundSeries, s => s.Name == "2020");
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_IsInLast_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.IsInLast, 5).ToListAsync();
+ Assert.Equal(2, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_IsNotInLast_Works()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.IsNotInLast, 5).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Contains(foundSeries, s => s.Name == "2000");
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_ConditionFalse_ReturnsAll()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(false, FilterComparison.Equal, 2020).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_ReleaseYearNull_ReturnsAll()
+ {
+ await SetupHasReleaseYear();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.Equal, null).ToListAsync();
+ Assert.Equal(3, foundSeries.Count);
+ }
+
+ [Fact]
+ public async Task HasReleaseYear_IsEmpty_Works()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("EmptyYear").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithReleaseYear(0).Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ var foundSeries = await Context.Series.HasReleaseYear(true, FilterComparison.IsEmpty, 0).ToListAsync();
+ Assert.Single(foundSeries);
+ Assert.Equal("EmptyYear", foundSeries[0].Name);
+ }
+
+
+ #endregion
+
+ #region HasRating
+
+ private async Task SetupHasRating()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("No Rating").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("0 Rating").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("4.5 Rating").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ var ratingService = new RatingService(UnitOfWork, Substitute.For(), Substitute.For>());
+
+ // Select 0 Rating
+ var zeroRating = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(2);
+ Assert.NotNull(zeroRating);
+
+ Assert.True(await ratingService.UpdateSeriesRating(user, new UpdateRatingDto()
+ {
+ SeriesId = zeroRating.Id,
+ UserRating = 0
+ }));
+
+ // Select 4.5 Rating
+ var partialRating = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(3);
+
+ Assert.True(await ratingService.UpdateSeriesRating(user, new UpdateRatingDto()
+ {
+ SeriesId = partialRating.Id,
+ UserRating = 4.5f
+ }));
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasRating_Equal_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await Context.Series
+ .HasRating(true, FilterComparison.Equal, 4.5f, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("4.5 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_GreaterThan_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await Context.Series
+ .HasRating(true, FilterComparison.GreaterThan, 0, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("4.5 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_LessThan_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await Context.Series
+ .HasRating(true, FilterComparison.LessThan, 4.5f, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("0 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_IsEmpty_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await Context.Series
+ .HasRating(true, FilterComparison.IsEmpty, 0, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("No Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_GreaterThanEqual_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await Context.Series
+ .HasRating(true, FilterComparison.GreaterThanEqual, 4.5f, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("4.5 Rating", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasRating_LessThanEqual_Works()
+ {
+ var user = await SetupHasRating();
+
+ var foundSeries = await Context.Series
+ .HasRating(true, FilterComparison.LessThanEqual, 0, user.Id)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("0 Rating", foundSeries[0].Name);
+ }
+
+ #endregion
+
+ #region HasAverageReadTime
+
+
+
+ #endregion
+
+ #region HasReadLast
+
+
+
+ #endregion
+
+ #region HasReadingDate
+
+
+
+ #endregion
+
+ #region HasTags
+
+
+
+ #endregion
+
+ #region HasPeople
+
+
+
+ #endregion
+
+ #region HasGenre
+
+
+
+ #endregion
+
+ #region HasFormat
+
+
+
+ #endregion
+
+ #region HasCollectionTags
+
+
+
+ #endregion
+
+ #region HasName
+
+ private async Task SetupHasName()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Don't Toy With Me, Miss Nagatoro").WithLocalizedName("Ijiranaide, Nagatoro-san").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("My Dress-Up Darling").WithLocalizedName("Sono Bisque Doll wa Koi wo Suru").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasName_Equal_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.Equal, "My Dress-Up Darling")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("My Dress-Up Darling", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_Equal_LocalizedName_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.Equal, "Ijiranaide, Nagatoro-san")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_BeginsWith_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.BeginsWith, "My Dress")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("My Dress-Up Darling", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_BeginsWith_LocalizedName_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.BeginsWith, "Sono Bisque")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("My Dress-Up Darling", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_EndsWith_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.EndsWith, "Nagatoro")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_Matches_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.Matches, "Toy With Me")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasName_NotEqual_Works()
+ {
+ await SetupHasName();
+
+ var foundSeries = await Context.Series
+ .HasName(true, FilterComparison.NotEqual, "My Dress-Up Darling")
+ .ToListAsync();
+
+ Assert.Equal(2, foundSeries.Count);
+ Assert.Equal("Don't Toy With Me, Miss Nagatoro", foundSeries[0].Name);
+ }
+
+
+ #endregion
+
+ #region HasSummary
+
+ private async Task SetupHasSummary()
+ {
+ var library = new LibraryBuilder("Manga")
+ .WithSeries(new SeriesBuilder("Hippos").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithSummary("I like hippos").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Apples").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithSummary("I like apples").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("Ducks").WithPages(10)
+ .WithMetadata(new SeriesMetadataBuilder().WithSummary("I like ducks").Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("No Summary").WithPages(10)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").WithPages(10).Build())
+ .Build())
+ .Build())
+ .Build();
+ var user = new AppUserBuilder("user", "user@gmail.com")
+ .WithLibrary(library)
+ .Build();
+
+ Context.Users.Add(user);
+ Context.Library.Add(library);
+ await Context.SaveChangesAsync();
+
+ return user;
+ }
+
+ [Fact]
+ public async Task HasSummary_Equal_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await Context.Series
+ .HasSummary(true, FilterComparison.Equal, "I like hippos")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Hippos", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_BeginsWith_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await Context.Series
+ .HasSummary(true, FilterComparison.BeginsWith, "I like h")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Hippos", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_EndsWith_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await Context.Series
+ .HasSummary(true, FilterComparison.EndsWith, "apples")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Apples", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_Matches_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await Context.Series
+ .HasSummary(true, FilterComparison.Matches, "like ducks")
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("Ducks", foundSeries[0].Name);
+ }
+
+ [Fact]
+ public async Task HasSummary_NotEqual_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await Context.Series
+ .HasSummary(true, FilterComparison.NotEqual, "I like ducks")
+ .ToListAsync();
+
+ Assert.Equal(3, foundSeries.Count);
+ Assert.DoesNotContain(foundSeries, s => s.Name == "Ducks");
+ }
+
+ [Fact]
+ public async Task HasSummary_IsEmpty_Works()
+ {
+ await SetupHasSummary();
+
+ var foundSeries = await Context.Series
+ .HasSummary(true, FilterComparison.IsEmpty, string.Empty)
+ .ToListAsync();
+
+ Assert.Single(foundSeries);
+ Assert.Equal("No Summary", foundSeries[0].Name);
+ }
+
+ #endregion
+
+
+ #region HasPath
+
+
+
+ #endregion
+
+
+ #region HasFilePath
+
+
+
+ #endregion
+}
diff --git a/API.Tests/Extensions/VersionExtensionTests.cs b/API.Tests/Extensions/VersionExtensionTests.cs
new file mode 100644
index 000000000..e19fd7312
--- /dev/null
+++ b/API.Tests/Extensions/VersionExtensionTests.cs
@@ -0,0 +1,81 @@
+using System;
+using API.Extensions;
+using Xunit;
+
+namespace API.Tests.Extensions;
+
+public class VersionHelperTests
+{
+ [Fact]
+ public void CompareWithoutRevision_ShouldReturnTrue_WhenMajorMinorBuildMatch()
+ {
+ // Arrange
+ var v1 = new Version(1, 2, 3, 4);
+ var v2 = new Version(1, 2, 3, 5);
+
+ // Act
+ var result = v1.CompareWithoutRevision(v2);
+
+ // Assert
+ Assert.True(result);
+ }
+
+ [Fact]
+ public void CompareWithoutRevision_ShouldHandleBuildlessVersions()
+ {
+ // Arrange
+ var v1 = new Version(1, 2);
+ var v2 = new Version(1, 2);
+
+ // Act
+ var result = v1.CompareWithoutRevision(v2);
+
+ // Assert
+ Assert.True(result);
+ }
+
+ [Theory]
+ [InlineData(1, 2, 3, 1, 2, 4)]
+ [InlineData(1, 2, 3, 1, 2, 0)]
+ public void CompareWithoutRevision_ShouldReturnFalse_WhenBuildDiffers(
+ int major1, int minor1, int build1,
+ int major2, int minor2, int build2)
+ {
+ var v1 = new Version(major1, minor1, build1);
+ var v2 = new Version(major2, minor2, build2);
+
+ var result = v1.CompareWithoutRevision(v2);
+
+ Assert.False(result);
+ }
+
+ [Theory]
+ [InlineData(1, 2, 3, 1, 3, 3)]
+ [InlineData(1, 2, 3, 1, 0, 3)]
+ public void CompareWithoutRevision_ShouldReturnFalse_WhenMinorDiffers(
+ int major1, int minor1, int build1,
+ int major2, int minor2, int build2)
+ {
+ var v1 = new Version(major1, minor1, build1);
+ var v2 = new Version(major2, minor2, build2);
+
+ var result = v1.CompareWithoutRevision(v2);
+
+ Assert.False(result);
+ }
+
+ [Theory]
+ [InlineData(1, 2, 3, 2, 2, 3)]
+ [InlineData(1, 2, 3, 0, 2, 3)]
+ public void CompareWithoutRevision_ShouldReturnFalse_WhenMajorDiffers(
+ int major1, int minor1, int build1,
+ int major2, int minor2, int build2)
+ {
+ var v1 = new Version(major1, minor1, build1);
+ var v2 = new Version(major2, minor2, build2);
+
+ var result = v1.CompareWithoutRevision(v2);
+
+ Assert.False(result);
+ }
+}
diff --git a/API.Tests/Extensions/VolumeListExtensionsTests.cs b/API.Tests/Extensions/VolumeListExtensionsTests.cs
index 264437ecd..bbb8f215c 100644
--- a/API.Tests/Extensions/VolumeListExtensionsTests.cs
+++ b/API.Tests/Extensions/VolumeListExtensionsTests.cs
@@ -2,7 +2,7 @@
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
-using API.Tests.Helpers;
+using API.Helpers.Builders;
using Xunit;
namespace API.Tests.Extensions;
@@ -16,19 +16,48 @@ public class VolumeListExtensionsTests
{
var volumes = new List()
{
- EntityFactory.CreateVolume("1", new List()
- {
- EntityFactory.CreateChapter("3", false),
- EntityFactory.CreateChapter("4", false),
- }),
- EntityFactory.CreateVolume("0", new List()
- {
- EntityFactory.CreateChapter("1", false),
- EntityFactory.CreateChapter("0", true),
- }),
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("3").Build())
+ .WithChapter(new ChapterBuilder("4").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build(),
+
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
};
- Assert.Equal(volumes[0].Number, volumes.GetCoverImage(MangaFormat.Archive).Number);
+ var v = volumes.GetCoverImage(MangaFormat.Archive);
+ Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
+ }
+
+ [Fact]
+ public void GetCoverImage_ChoosesVolume1_WhenHalf()
+ {
+ var volumes = new List()
+ {
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("0.5").Build())
+ .Build(),
+
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
+ };
+
+ var v = volumes.GetCoverImage(MangaFormat.Archive);
+ Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
}
[Fact]
@@ -36,16 +65,19 @@ public class VolumeListExtensionsTests
{
var volumes = new List()
{
- EntityFactory.CreateVolume("1", new List()
- {
- EntityFactory.CreateChapter("3", false),
- EntityFactory.CreateChapter("4", false),
- }),
- EntityFactory.CreateVolume("0", new List()
- {
- EntityFactory.CreateChapter("1", false),
- EntityFactory.CreateChapter("0", true),
- }),
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("3").Build())
+ .WithChapter(new ChapterBuilder("4").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
};
Assert.Equal(volumes[1].Name, volumes.GetCoverImage(MangaFormat.Epub).Name);
@@ -56,16 +88,19 @@ public class VolumeListExtensionsTests
{
var volumes = new List()
{
- EntityFactory.CreateVolume("1", new List()
- {
- EntityFactory.CreateChapter("3", false),
- EntityFactory.CreateChapter("4", false),
- }),
- EntityFactory.CreateVolume("0", new List()
- {
- EntityFactory.CreateChapter("1", false),
- EntityFactory.CreateChapter("0", true),
- }),
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("3").Build())
+ .WithChapter(new ChapterBuilder("4").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
};
Assert.Equal(volumes[1].Name, volumes.GetCoverImage(MangaFormat.Pdf).Name);
@@ -76,16 +111,19 @@ public class VolumeListExtensionsTests
{
var volumes = new List()
{
- EntityFactory.CreateVolume("1", new List()
- {
- EntityFactory.CreateChapter("3", false),
- EntityFactory.CreateChapter("4", false),
- }),
- EntityFactory.CreateVolume("0", new List()
- {
- EntityFactory.CreateChapter("1", false),
- EntityFactory.CreateChapter("0", true),
- }),
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("3").Build())
+ .WithChapter(new ChapterBuilder("4").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
};
Assert.Equal(volumes[0].Name, volumes.GetCoverImage(MangaFormat.Image).Name);
@@ -96,16 +134,19 @@ public class VolumeListExtensionsTests
{
var volumes = new List()
{
- EntityFactory.CreateVolume("2", new List()
- {
- EntityFactory.CreateChapter("3", false),
- EntityFactory.CreateChapter("4", false),
- }),
- EntityFactory.CreateVolume("1", new List()
- {
- EntityFactory.CreateChapter("1", false),
- EntityFactory.CreateChapter("0", true),
- }),
+ new VolumeBuilder("2")
+ .WithChapter(new ChapterBuilder("3").Build())
+ .WithChapter(new ChapterBuilder("4").Build())
+ .Build(),
+ new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1").Build())
+ .Build(),
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithIsSpecial(true)
+ .WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
+ .Build())
+ .Build(),
};
Assert.Equal(volumes[1].Name, volumes.GetCoverImage(MangaFormat.Image).Name);
diff --git a/API.Tests/Helpers/BookSortTitlePrefixHelperTests.cs b/API.Tests/Helpers/BookSortTitlePrefixHelperTests.cs
new file mode 100644
index 000000000..e1f585806
--- /dev/null
+++ b/API.Tests/Helpers/BookSortTitlePrefixHelperTests.cs
@@ -0,0 +1,178 @@
+using API.Helpers;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+public class BookSortTitlePrefixHelperTests
+{
+ [Theory]
+ [InlineData("The Avengers", "Avengers")]
+ [InlineData("A Game of Thrones", "Game of Thrones")]
+ [InlineData("An American Tragedy", "American Tragedy")]
+ public void TestEnglishPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("El Quijote", "Quijote")]
+ [InlineData("La Casa de Papel", "Casa de Papel")]
+ [InlineData("Los Miserables", "Miserables")]
+ [InlineData("Las Vegas", "Vegas")]
+ [InlineData("Un Mundo Feliz", "Mundo Feliz")]
+ [InlineData("Una Historia", "Historia")]
+ public void TestSpanishPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("Le Petit Prince", "Petit Prince")]
+ [InlineData("La Belle et la Bête", "Belle et la Bête")]
+ [InlineData("Les Misérables", "Misérables")]
+ [InlineData("Un Amour de Swann", "Amour de Swann")]
+ [InlineData("Une Vie", "Vie")]
+ [InlineData("Des Souris et des Hommes", "Souris et des Hommes")]
+ public void TestFrenchPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("Der Herr der Ringe", "Herr der Ringe")]
+ [InlineData("Die Verwandlung", "Verwandlung")]
+ [InlineData("Das Kapital", "Kapital")]
+ [InlineData("Ein Sommernachtstraum", "Sommernachtstraum")]
+ [InlineData("Eine Geschichte", "Geschichte")]
+ public void TestGermanPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("Il Nome della Rosa", "Nome della Rosa")]
+ [InlineData("La Divina Commedia", "Divina Commedia")]
+ [InlineData("Lo Hobbit", "Hobbit")]
+ [InlineData("Gli Ultimi", "Ultimi")]
+ [InlineData("Le Città Invisibili", "Città Invisibili")]
+ [InlineData("Un Giorno", "Giorno")]
+ [InlineData("Una Notte", "Notte")]
+ public void TestItalianPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("O Alquimista", "Alquimista")]
+ [InlineData("A Moreninha", "Moreninha")]
+ [InlineData("Os Lusíadas", "Lusíadas")]
+ [InlineData("As Meninas", "Meninas")]
+ [InlineData("Um Defeito de Cor", "Defeito de Cor")]
+ [InlineData("Uma História", "História")]
+ public void TestPortuguesePrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("", "")] // Empty string returns empty
+ [InlineData("Book", "Book")] // Single word, no change
+ [InlineData("Avengers", "Avengers")] // No prefix, no change
+ public void TestNoPrefixCases(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("The", "The")] // Just a prefix word alone
+ [InlineData("A", "A")] // Just single letter prefix alone
+ [InlineData("Le", "Le")] // French prefix alone
+ public void TestPrefixWordAlone(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("THE AVENGERS", "AVENGERS")] // All caps
+ [InlineData("the avengers", "avengers")] // All lowercase
+ [InlineData("The AVENGERS", "AVENGERS")] // Mixed case
+ [InlineData("tHe AvEnGeRs", "AvEnGeRs")] // Random case
+ public void TestCaseInsensitivity(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("Then Came You", "Then Came You")] // "The" + "n" = not a prefix
+ [InlineData("And Then There Were None", "And Then There Were None")] // "An" + "d" = not a prefix
+ [InlineData("Elsewhere", "Elsewhere")] // "El" + "sewhere" = not a prefix (no space)
+ [InlineData("Lesson Plans", "Lesson Plans")] // "Les" + "son" = not a prefix (no space)
+ [InlineData("Theory of Everything", "Theory of Everything")] // "The" + "ory" = not a prefix
+ public void TestFalsePositivePrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("The ", "The ")] // Prefix with only space after - returns original
+ [InlineData("La ", "La ")] // Same for other languages
+ [InlineData("El ", "El ")] // Same for Spanish
+ public void TestPrefixWithOnlySpaceAfter(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("The Multiple Spaces", " Multiple Spaces")] // Doesn't trim extra spaces from remainder
+ [InlineData("Le Petit Prince", " Petit Prince")] // Leading space preserved in remainder
+ public void TestSpaceHandling(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("The The Matrix", "The Matrix")] // Removes first "The", leaves second
+ [InlineData("A A Clockwork Orange", "A Clockwork Orange")] // Removes first "A", leaves second
+ [InlineData("El El Cid", "El Cid")] // Spanish version
+ public void TestRepeatedPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("L'Étranger", "L'Étranger")] // French contraction - no space, no change
+ [InlineData("D'Artagnan", "D'Artagnan")] // Contraction - no space, no change
+ [InlineData("The-Matrix", "The-Matrix")] // Hyphen instead of space - no change
+ [InlineData("The.Avengers", "The.Avengers")] // Period instead of space - no change
+ public void TestNonSpaceSeparators(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("三国演义", "三国演义")] // Chinese - no processing due to CJK detection
+ [InlineData("한국어", "한국어")] // Korean - not in CJK range, would be processed normally
+ public void TestCjkLanguages(string inputString, string expected)
+ {
+ // NOTE: These don't do anything, I am waiting for user input on if these are needed
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("नमस्ते दुनिया", "नमस्ते दुनिया")] // Hindi - not CJK, processed normally
+ [InlineData("مرحبا بالعالم", "مرحبا بالعالم")] // Arabic - not CJK, processed normally
+ [InlineData("שלום עולם", "שלום עולם")] // Hebrew - not CJK, processed normally
+ public void TestNonLatinNonCjkScripts(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+
+ [Theory]
+ [InlineData("в мире", "мире")] // Russian "в" (in) - should be removed
+ [InlineData("на столе", "столе")] // Russian "на" (on) - should be removed
+ [InlineData("с друзьями", "друзьями")] // Russian "с" (with) - should be removed
+ public void TestRussianPrefixes(string inputString, string expected)
+ {
+ Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
+ }
+}
diff --git a/API.Tests/Helpers/CacheHelperTests.cs b/API.Tests/Helpers/CacheHelperTests.cs
index d78ed1601..3962ba2df 100644
--- a/API.Tests/Helpers/CacheHelperTests.cs
+++ b/API.Tests/Helpers/CacheHelperTests.cs
@@ -2,16 +2,17 @@
using System.Collections.Generic;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
-using API.Entities;
+using API.Entities.Enums;
using API.Helpers;
+using API.Helpers.Builders;
using API.Services;
using Xunit;
namespace API.Tests.Helpers;
-public class CacheHelperTests
+public class CacheHelperTests: AbstractFsTest
{
- private const string TestCoverImageDirectory = @"c:\";
+ private static readonly string TestCoverImageDirectory = Root;
private const string TestCoverImageFile = "thumbnail.jpg";
private readonly string _testCoverPath = Path.Join(TestCoverImageDirectory, TestCoverImageFile);
private const string TestCoverArchive = @"file in folder.zip";
@@ -35,27 +36,31 @@ public class CacheHelperTests
[Theory]
[InlineData("", false)]
- [InlineData("C:/", false)]
[InlineData(null, false)]
public void CoverImageExists_DoesFileExist(string coverImage, bool exists)
{
Assert.Equal(exists, _cacheHelper.CoverImageExists(coverImage));
}
+ [Fact]
+ public void CoverImageExists_DoesFileExistRoot()
+ {
+ Assert.False(_cacheHelper.CoverImageExists(Root));
+ }
+
[Fact]
public void CoverImageExists_FileExists()
{
- Assert.True(_cacheHelper.CoverImageExists(TestCoverArchive));
+ Assert.True(_cacheHelper.CoverImageExists(Path.Join(TestCoverImageDirectory, TestCoverArchive)));
}
[Fact]
public void ShouldUpdateCoverImage_OnFirstRun()
{
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = DateTime.Now
- };
+
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(DateTime.Now)
+ .Build();
Assert.True(_cacheHelper.ShouldUpdateCoverImage(null, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
false, false));
}
@@ -64,11 +69,9 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked()
{
// Represents first run
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = DateTime.Now
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(DateTime.Now)
+ .Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
false, false));
}
@@ -77,11 +80,9 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked_2()
{
// Represents first run
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = DateTime.Now
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(DateTime.Now)
+ .Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now,
false, false));
}
@@ -90,11 +91,9 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked()
{
// Represents first run
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = DateTime.Now
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(DateTime.Now)
+ .Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
false, true));
}
@@ -103,11 +102,9 @@ public class CacheHelperTests
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked_Modified()
{
// Represents first run
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = DateTime.Now
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(DateTime.Now)
+ .Build();
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
false, true));
}
@@ -129,11 +126,10 @@ public class CacheHelperTests
var cacheHelper = new CacheHelper(fileService);
var created = DateTime.Now.Subtract(TimeSpan.FromHours(1));
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = DateTime.Now.Subtract(TimeSpan.FromMinutes(1))
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(1)))
+ .Build();
+
Assert.True(cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, created,
false, false));
}
@@ -141,9 +137,10 @@ public class CacheHelperTests
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceCreated()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime =now,
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -154,26 +151,24 @@ public class CacheHelperTests
var fileService = new FileService(fileSystem);
var cacheHelper = new CacheHelper(fileService);
- var chapter = new Chapter()
- {
- Created = filesystemFile.LastWriteTime.DateTime,
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
+ var chapter = new ChapterBuilder("1")
+ .WithLastModified(now.DateTime)
+ .WithCreated(now.DateTime)
+ .Build();
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
+ .Build();
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
}
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime = now,
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -184,26 +179,25 @@ public class CacheHelperTests
var fileService = new FileService(fileSystem);
var cacheHelper = new CacheHelper(fileService);
- var chapter = new Chapter()
- {
- Created = filesystemFile.LastWriteTime.DateTime,
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
+ var chapter = new ChapterBuilder("1")
+ .WithLastModified(now.DateTime)
+ .WithCreated(now.DateTime)
+ .Build();
+
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
+ .Build();
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
}
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified_ForceUpdate()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime = now.DateTime,
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -214,27 +208,25 @@ public class CacheHelperTests
var fileService = new FileService(fileSystem);
var cacheHelper = new CacheHelper(fileService);
- var chapter = new Chapter()
- {
- Created = filesystemFile.LastWriteTime.DateTime,
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
+ var chapter = new ChapterBuilder("1")
+ .WithLastModified(now.DateTime)
+ .WithCreated(now.DateTime)
+ .Build();
- var file = new MangaFile()
- {
- FilePath = TestCoverArchive,
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
+ .Build();
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file));
}
[Fact]
public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now,
- CreationTime = DateTimeOffset.Now
+ LastWriteTime = now.DateTime,
+ CreationTime = now.DateTime
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -245,26 +237,24 @@ public class CacheHelperTests
var fileService = new FileService(fileSystem);
var cacheHelper = new CacheHelper(fileService);
- var chapter = new Chapter()
- {
- Created = DateTime.Now.Subtract(TimeSpan.FromMinutes(10)),
- LastModified = DateTime.Now.Subtract(TimeSpan.FromMinutes(10))
- };
+ var chapter = new ChapterBuilder("1")
+ .WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
+ .WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
+ .Build();
- var file = new MangaFile()
- {
- FilePath = Path.Join(TestCoverImageDirectory, TestCoverArchive),
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
+ .Build();
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
}
[Fact]
public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan_ButLastModifiedSame()
{
+ var now = DateTimeOffset.Now;
var filesystemFile = new MockFileData("")
{
- LastWriteTime = DateTimeOffset.Now
+ LastWriteTime =now.DateTime
};
var fileSystem = new MockFileSystem(new Dictionary
{
@@ -275,17 +265,15 @@ public class CacheHelperTests
var fileService = new FileService(fileSystem);
var cacheHelper = new CacheHelper(fileService);
- var chapter = new Chapter()
- {
- Created = DateTime.Now.Subtract(TimeSpan.FromMinutes(10)),
- LastModified = DateTime.Now
- };
+ var chapter = new ChapterBuilder("1")
+ .WithLastModified(DateTime.Now)
+ .WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
+ .Build();
+
+ var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
+ .WithLastModified(now.DateTime)
+ .Build();
- var file = new MangaFile()
- {
- FilePath = Path.Join(TestCoverImageDirectory, TestCoverArchive),
- LastModified = filesystemFile.LastWriteTime.DateTime
- };
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
}
diff --git a/API.Tests/Helpers/EntityFactory.cs b/API.Tests/Helpers/EntityFactory.cs
deleted file mode 100644
index 2f46cc1f4..000000000
--- a/API.Tests/Helpers/EntityFactory.cs
+++ /dev/null
@@ -1,82 +0,0 @@
-using System.Collections.Generic;
-using System.Linq;
-using API.Entities;
-using API.Entities.Enums;
-using API.Entities.Metadata;
-
-namespace API.Tests.Helpers;
-
-///
-/// Used to help quickly create DB entities for Unit Testing
-///
-public static class EntityFactory
-{
- public static Series CreateSeries(string name)
- {
- return new Series()
- {
- Name = name,
- SortName = name,
- LocalizedName = name,
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(name),
- Volumes = new List(),
- Metadata = new SeriesMetadata()
- };
- }
-
- public static Volume CreateVolume(string volumeNumber, List chapters = null)
- {
- var chaps = chapters ?? new List();
- var pages = chaps.Count > 0 ? chaps.Max(c => c.Pages) : 0;
- return new Volume()
- {
- Name = volumeNumber,
- Number = (int) API.Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(volumeNumber),
- Pages = pages,
- Chapters = chaps
- };
- }
-
- public static Chapter CreateChapter(string range, bool isSpecial, List files = null, int pageCount = 0)
- {
- return new Chapter()
- {
- IsSpecial = isSpecial,
- Range = range,
- Number = API.Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(range) + string.Empty,
- Files = files ?? new List(),
- Pages = pageCount,
-
- };
- }
-
- public static MangaFile CreateMangaFile(string filename, MangaFormat format, int pages)
- {
- return new MangaFile()
- {
- FilePath = filename,
- Format = format,
- Pages = pages
- };
- }
-
- public static SeriesMetadata CreateSeriesMetadata(ICollection collectionTags)
- {
- return new SeriesMetadata()
- {
- CollectionTags = collectionTags
- };
- }
-
- public static CollectionTag CreateCollectionTag(int id, string title, string summary, bool promoted)
- {
- return new CollectionTag()
- {
- Id = id,
- NormalizedTitle = API.Services.Tasks.Scanner.Parser.Parser.Normalize(title).ToUpper(),
- Title = title,
- Summary = summary,
- Promoted = promoted
- };
- }
-}
diff --git a/API.Tests/Helpers/GenreHelperTests.cs b/API.Tests/Helpers/GenreHelperTests.cs
deleted file mode 100644
index 94602ff01..000000000
--- a/API.Tests/Helpers/GenreHelperTests.cs
+++ /dev/null
@@ -1,131 +0,0 @@
-using System.Collections.Generic;
-using API.Data;
-using API.Entities;
-using API.Helpers;
-using Xunit;
-
-namespace API.Tests.Helpers;
-
-public class GenreHelperTests
-{
- [Fact]
- public void UpdateGenre_ShouldAddNewGenre()
- {
- var allGenres = new List
- {
- DbFactory.Genre("Action", false),
- DbFactory.Genre("action", false),
- DbFactory.Genre("Sci-fi", false),
- };
- var genreAdded = new List();
-
- GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Adventure"}, false, genre =>
- {
- genreAdded.Add(genre);
- });
-
- Assert.Equal(2, genreAdded.Count);
- Assert.Equal(4, allGenres.Count);
- }
-
- [Fact]
- public void UpdateGenre_ShouldNotAddDuplicateGenre()
- {
- var allGenres = new List
- {
- DbFactory.Genre("Action", false),
- DbFactory.Genre("action", false),
- DbFactory.Genre("Sci-fi", false),
-
- };
- var genreAdded = new List();
-
- GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Scifi"}, false, genre =>
- {
- genreAdded.Add(genre);
- });
-
- Assert.Equal(3, allGenres.Count);
- }
-
- [Fact]
- public void AddGenre_ShouldAddOnlyNonExistingGenre()
- {
- var existingGenres = new List
- {
- DbFactory.Genre("Action", false),
- DbFactory.Genre("action", false),
- DbFactory.Genre("Sci-fi", false),
- };
-
-
- GenreHelper.AddGenreIfNotExists(existingGenres, DbFactory.Genre("Action", false));
- Assert.Equal(3, existingGenres.Count);
-
- GenreHelper.AddGenreIfNotExists(existingGenres, DbFactory.Genre("action", false));
- Assert.Equal(3, existingGenres.Count);
-
- GenreHelper.AddGenreIfNotExists(existingGenres, DbFactory.Genre("Shonen", false));
- Assert.Equal(4, existingGenres.Count);
- }
-
- [Fact]
- public void AddGenre_ShouldNotAddSameNameAndExternal()
- {
- var existingGenres = new List
- {
- DbFactory.Genre("Action", false),
- DbFactory.Genre("action", false),
- DbFactory.Genre("Sci-fi", false),
- };
-
-
- GenreHelper.AddGenreIfNotExists(existingGenres, DbFactory.Genre("Action", true));
- Assert.Equal(3, existingGenres.Count);
- }
-
- [Fact]
- public void KeepOnlySamePeopleBetweenLists()
- {
- var existingGenres = new List
- {
- DbFactory.Genre("Action", false),
- DbFactory.Genre("Sci-fi", false),
- };
-
- var peopleFromChapters = new List
- {
- DbFactory.Genre("Action", false),
- };
-
- var genreRemoved = new List();
- GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
- peopleFromChapters, genre =>
- {
- genreRemoved.Add(genre);
- });
-
- Assert.Equal(1, genreRemoved.Count);
- }
-
- [Fact]
- public void RemoveEveryoneIfNothingInRemoveAllExcept()
- {
- var existingGenres = new List
- {
- DbFactory.Genre("Action", false),
- DbFactory.Genre("Sci-fi", false),
- };
-
- var peopleFromChapters = new List();
-
- var genreRemoved = new List();
- GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
- peopleFromChapters, genre =>
- {
- genreRemoved.Add(genre);
- });
-
- Assert.Equal(2, genreRemoved.Count);
- }
-}
diff --git a/API.Tests/Helpers/KoreaderHelperTests.cs b/API.Tests/Helpers/KoreaderHelperTests.cs
new file mode 100644
index 000000000..66d287a5d
--- /dev/null
+++ b/API.Tests/Helpers/KoreaderHelperTests.cs
@@ -0,0 +1,60 @@
+using API.DTOs.Koreader;
+using API.DTOs.Progress;
+using API.Helpers;
+using System.Runtime.CompilerServices;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+
+public class KoreaderHelperTests
+{
+
+ [Theory]
+ [InlineData("/body/DocFragment[11]/body/div/a", 10, null)]
+ [InlineData("/body/DocFragment[1]/body/div/p[40]", 0, 40)]
+ [InlineData("/body/DocFragment[8]/body/div/p[28]/text().264", 7, 28)]
+ public void GetEpubPositionDto(string koreaderPosition, int page, int? pNumber)
+ {
+ var expected = EmptyProgressDto();
+ expected.BookScrollId = pNumber.HasValue ? $"//html[1]/BODY/APP-ROOT[1]/DIV[1]/DIV[1]/DIV[1]/APP-BOOK-READER[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/DIV[1]/P[{pNumber}]" : null;
+ expected.PageNum = page;
+ var actual = EmptyProgressDto();
+
+ KoreaderHelper.UpdateProgressDto(actual, koreaderPosition);
+ Assert.Equal(expected.BookScrollId, actual.BookScrollId);
+ Assert.Equal(expected.PageNum, actual.PageNum);
+ }
+
+
+ [Theory]
+ [InlineData("//html[1]/BODY/APP-ROOT[1]/DIV[1]/DIV[1]/DIV[1]/APP-BOOK-READER[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/DIV[1]/P[20]", 5, "/body/DocFragment[6]/body/div/p[20]")]
+ [InlineData(null, 10, "/body/DocFragment[11]/body/div/a")]
+ public void GetKoreaderPosition(string scrollId, int page, string koreaderPosition)
+ {
+ var given = EmptyProgressDto();
+ given.BookScrollId = scrollId;
+ given.PageNum = page;
+
+ Assert.Equal(koreaderPosition, KoreaderHelper.GetKoreaderPosition(given));
+ }
+
+ [Theory]
+ [InlineData("./Data/AesopsFables.epub", "8795ACA4BF264B57C1EEDF06A0CEE688")]
+ public void GetKoreaderHash(string filePath, string hash)
+ {
+ Assert.Equal(KoreaderHelper.HashContents(filePath), hash);
+ }
+
+ private ProgressDto EmptyProgressDto()
+ {
+ return new ProgressDto
+ {
+ ChapterId = 0,
+ PageNum = 0,
+ VolumeId = 0,
+ SeriesId = 0,
+ LibraryId = 0
+ };
+ }
+}
diff --git a/API.Tests/Helpers/OrderableHelperTests.cs b/API.Tests/Helpers/OrderableHelperTests.cs
new file mode 100644
index 000000000..15f9e6268
--- /dev/null
+++ b/API.Tests/Helpers/OrderableHelperTests.cs
@@ -0,0 +1,202 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using API.Entities;
+using API.Helpers;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+public class OrderableHelperTests
+{
+ [Fact]
+ public void ReorderItems_ItemExists_SuccessfullyReorders()
+ {
+ // Arrange
+ var items = new List
+ {
+ new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
+ new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
+ new AppUserSideNavStream { Id = 3, Order = 2, Name = "A" },
+ };
+
+ // Act
+ OrderableHelper.ReorderItems(items, 2, 0);
+
+ // Assert
+ Assert.Equal(2, items[0].Id); // Item 2 should be at position 0
+ Assert.Equal(1, items[1].Id); // Item 1 should be at position 1
+ Assert.Equal(3, items[2].Id); // Item 3 should remain at position 2
+ }
+
+ [Fact]
+ public void ReorderItems_ItemNotFound_NoChange()
+ {
+ // Arrange
+ var items = new List
+ {
+ new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
+ new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
+ };
+
+ // Act
+ OrderableHelper.ReorderItems(items, 3, 0); // Item with Id 3 doesn't exist
+
+ // Assert
+ Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
+ Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
+ }
+
+ [Fact]
+ public void ReorderItems_InvalidPosition_NoChange()
+ {
+ var items = new List
+ {
+ new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
+ new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
+ };
+
+ OrderableHelper.ReorderItems(items, 2, 3); // Position 3 is out of range
+
+ Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
+ Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
+ }
+
+ [Fact]
+ public void ReorderItems_EmptyList_NoChange()
+ {
+ // Arrange
+ var items = new List();
+
+ // Act
+ OrderableHelper.ReorderItems(items, 2, 1); // List is empty
+
+ // Assert
+ Assert.Empty(items); // The list should remain empty
+ }
+
+ [Fact]
+ public void ReorderItems_DoubleMove()
+ {
+ var items = new List
+ {
+ new AppUserSideNavStream { Id = 1, Order = 0, Name = "0" },
+ new AppUserSideNavStream { Id = 2, Order = 1, Name = "1" },
+ new AppUserSideNavStream { Id = 3, Order = 2, Name = "2" },
+ new AppUserSideNavStream { Id = 4, Order = 3, Name = "3" },
+ new AppUserSideNavStream { Id = 5, Order = 4, Name = "4" },
+ new AppUserSideNavStream { Id = 6, Order = 5, Name = "5" },
+ };
+
+ // Move 4 -> 1
+ OrderableHelper.ReorderItems(items, 5, 1);
+
+ Assert.Equal(1, items[0].Id);
+ Assert.Equal(0, items[0].Order);
+ Assert.Equal(5, items[1].Id);
+ Assert.Equal(1, items[1].Order);
+ Assert.Equal(2, items[2].Id);
+ Assert.Equal(2, items[2].Order);
+
+ // Ensure the items are in the correct order
+ Assert.Equal("041235", string.Join("", items.Select(s => s.Name)));
+
+ OrderableHelper.ReorderItems(items, items[4].Id, 1); // 3 -> 1
+
+ Assert.Equal("034125", string.Join("", items.Select(s => s.Name)));
+ }
+
+ private static List CreateTestReadingListItems(int count = 4)
+ {
+ var items = new List();
+
+ for (var i = 0; i < count; i++)
+ {
+ items.Add(new ReadingListItem() { Id = i + 1, Order = count, ReadingListId = i + 1});
+ }
+
+ return items;
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToBeginning_CorrectOrder()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 3, 0);
+
+ Assert.Equal(3, items[0].Id);
+ Assert.Equal(1, items[1].Id);
+ Assert.Equal(2, items[2].Id);
+ Assert.Equal(4, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToEnd_CorrectOrder()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 1, 3);
+
+ Assert.Equal(2, items[0].Id);
+ Assert.Equal(3, items[1].Id);
+ Assert.Equal(4, items[2].Id);
+ Assert.Equal(1, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToMiddle_CorrectOrder()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 4, 2);
+
+ Assert.Equal(1, items[0].Id);
+ Assert.Equal(2, items[1].Id);
+ Assert.Equal(4, items[2].Id);
+ Assert.Equal(3, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_MoveItemToOutOfBoundsPosition_MovesToEnd()
+ {
+ var items = CreateTestReadingListItems();
+
+ OrderableHelper.ReorderItems(items, 2, 10);
+
+ Assert.Equal(1, items[0].Id);
+ Assert.Equal(3, items[1].Id);
+ Assert.Equal(4, items[2].Id);
+ Assert.Equal(2, items[3].Id);
+
+ for (var i = 0; i < items.Count; i++)
+ {
+ Assert.Equal(i, items[i].Order);
+ }
+ }
+
+ [Fact]
+ public void ReorderItems_NegativePosition_ThrowsArgumentException()
+ {
+ var items = CreateTestReadingListItems();
+
+ Assert.Throws(() =>
+ OrderableHelper.ReorderItems(items, 2, -1)
+ );
+ }
+}
diff --git a/API.Tests/Helpers/ParserInfoFactory.cs b/API.Tests/Helpers/ParserInfoFactory.cs
index 793b764b0..40d0ea4f4 100644
--- a/API.Tests/Helpers/ParserInfoFactory.cs
+++ b/API.Tests/Helpers/ParserInfoFactory.cs
@@ -3,8 +3,9 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using API.Entities.Enums;
-using API.Parser;
+using API.Extensions;
using API.Services.Tasks.Scanner;
+using API.Services.Tasks.Scanner.Parser;
namespace API.Tests.Helpers;
@@ -29,12 +30,12 @@ public static class ParserInfoFactory
public static void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info)
{
var existingKey = collectedSeries.Keys.FirstOrDefault(ps =>
- ps.Format == info.Format && ps.NormalizedName == API.Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series));
+ ps.Format == info.Format && ps.NormalizedName == info.Series.ToNormalized());
existingKey ??= new ParsedSeries()
{
Format = info.Format,
Name = info.Series,
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series)
+ NormalizedName = info.Series.ToNormalized()
};
if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>))
{
diff --git a/API.Tests/Helpers/ParserInfoHelperTests.cs b/API.Tests/Helpers/ParserInfoHelperTests.cs
index e51362b81..0bb7efb9b 100644
--- a/API.Tests/Helpers/ParserInfoHelperTests.cs
+++ b/API.Tests/Helpers/ParserInfoHelperTests.cs
@@ -1,10 +1,9 @@
using System.Collections.Generic;
-using API.Entities;
using API.Entities.Enums;
-using API.Entities.Metadata;
using API.Helpers;
-using API.Parser;
+using API.Helpers.Builders;
using API.Services.Tasks.Scanner;
+using API.Services.Tasks.Scanner.Parser;
using Xunit;
namespace API.Tests.Helpers;
@@ -21,23 +20,13 @@ public class ParserInfoHelperTests
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
//AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
- var series = new Series()
- {
- Name = "Darker Than Black",
- LocalizedName = "Darker Than Black",
- OriginalName = "Darker Than Black",
- Volumes = new List()
- {
- new Volume()
- {
- Number = 1,
- Name = "1"
- }
- },
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"),
- Metadata = new SeriesMetadata(),
- Format = MangaFormat.Epub
- };
+ var series = new SeriesBuilder("Darker Than Black")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithName("1")
+ .Build())
+ .WithLocalizedName("Darker Than Black")
+ .Build();
Assert.False(ParserInfoHelpers.SeriesHasMatchingParserInfoFormat(series, infos));
}
@@ -50,23 +39,14 @@ public class ParserInfoHelperTests
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
- var series = new Series()
- {
- Name = "Darker Than Black",
- LocalizedName = "Darker Than Black",
- OriginalName = "Darker Than Black",
- Volumes = new List()
- {
- new Volume()
- {
- Number = 1,
- Name = "1"
- }
- },
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"),
- Metadata = new SeriesMetadata(),
- Format = MangaFormat.Epub
- };
+
+ var series = new SeriesBuilder("Darker Than Black")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithName("1")
+ .Build())
+ .WithLocalizedName("Darker Than Black")
+ .Build();
Assert.True(ParserInfoHelpers.SeriesHasMatchingParserInfoFormat(series, infos));
}
diff --git a/API.Tests/Helpers/PersonHelperTests.cs b/API.Tests/Helpers/PersonHelperTests.cs
index d5dafd963..47dab48da 100644
--- a/API.Tests/Helpers/PersonHelperTests.cs
+++ b/API.Tests/Helpers/PersonHelperTests.cs
@@ -1,161 +1,226 @@
-using System;
-using System.Collections.Generic;
-using API.Data;
-using API.Entities;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
using API.Entities.Enums;
using API.Helpers;
+using API.Helpers.Builders;
using Xunit;
namespace API.Tests.Helpers;
-public class PersonHelperTests
+public class PersonHelperTests : AbstractDbTest
{
- [Fact]
- public void UpdatePeople_ShouldAddNewPeople()
+ protected override async Task ResetDb()
{
- var allPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- DbFactory.Person("Joe Shmo", PersonRole.Writer)
- };
- var peopleAdded = new List();
+ Context.Series.RemoveRange(Context.Series.ToList());
+ Context.Person.RemoveRange(Context.Person.ToList());
+ Context.Library.RemoveRange(Context.Library.ToList());
+ Context.Series.RemoveRange(Context.Series.ToList());
+ await Context.SaveChangesAsync();
+ }
- PersonHelper.UpdatePeople(allPeople, new[] {"Joseph Shmo", "Sally Ann"}, PersonRole.Writer, person =>
- {
- peopleAdded.Add(person);
- });
+ // 1. Test adding new people and keeping existing ones
+ [Fact]
+ public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
+ {
+ await ResetDb();
- Assert.Equal(2, peopleAdded.Count);
- Assert.Equal(4, allPeople.Count);
+ var library = new LibraryBuilder("My Library")
+ .Build();
+
+ UnitOfWork.LibraryRepository.Add(library);
+ await UnitOfWork.CommitAsync();
+
+ var existingPerson = new PersonBuilder("Joe Shmo").Build();
+ var chapter = new ChapterBuilder("1").Build();
+
+ // Create an existing person and assign them to the series with a role
+ var series = new SeriesBuilder("Test 1")
+ .WithLibraryId(library.Id)
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(existingPerson, PersonRole.Editor)
+ .Build())
+ .WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
+ .Build();
+
+ UnitOfWork.SeriesRepository.Add(series);
+ await UnitOfWork.CommitAsync();
+
+ // Call UpdateChapterPeopleAsync with one existing and one new person
+ await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo", "New Person" }, PersonRole.Editor, UnitOfWork);
+
+ // Assert existing person retained and new person added
+ var people = await UnitOfWork.PersonRepository.GetAllPeople();
+ Assert.Contains(people, p => p.Name == "Joe Shmo");
+ Assert.Contains(people, p => p.Name == "New Person");
+
+ var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
+ Assert.Contains("Joe Shmo", chapterPeople);
+ Assert.Contains("New Person", chapterPeople);
+ }
+
+ // 2. Test removing a person no longer in the list
+ [Fact]
+ public async Task UpdateChapterPeopleAsync_RemovePeople()
+ {
+ await ResetDb();
+
+ var library = new LibraryBuilder("My Library")
+ .Build();
+
+ UnitOfWork.LibraryRepository.Add(library);
+ await UnitOfWork.CommitAsync();
+
+ var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
+ var existingPerson2 = new PersonBuilder("Jane Doe").Build();
+ var chapter = new ChapterBuilder("1")
+ .WithPerson(existingPerson1, PersonRole.Editor)
+ .WithPerson(existingPerson2, PersonRole.Editor)
+ .Build();
+
+ var series = new SeriesBuilder("Test 1")
+ .WithLibraryId(library.Id)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(chapter)
+ .Build())
+ .Build();
+
+ UnitOfWork.SeriesRepository.Add(series);
+ await UnitOfWork.CommitAsync();
+
+ // Call UpdateChapterPeopleAsync with only one person
+ await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
+
+ // PersonHelper does not remove the Person from the global DbSet itself
+ await UnitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
+
+ var people = await UnitOfWork.PersonRepository.GetAllPeople();
+ Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
+
+ var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
+ Assert.Contains("Joe Shmo", chapterPeople);
+ Assert.DoesNotContain("Jane Doe", chapterPeople);
+ }
+
+ // 3. Test no changes when the list of people is the same
+ [Fact]
+ public async Task UpdateChapterPeopleAsync_NoChanges()
+ {
+ await ResetDb();
+
+ var library = new LibraryBuilder("My Library")
+ .Build();
+
+ UnitOfWork.LibraryRepository.Add(library);
+ await UnitOfWork.CommitAsync();
+
+ var existingPerson = new PersonBuilder("Joe Shmo").Build();
+ var chapter = new ChapterBuilder("1").WithPerson(existingPerson, PersonRole.Editor).Build();
+
+ var series = new SeriesBuilder("Test 1")
+ .WithLibraryId(library.Id)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(chapter)
+ .Build())
+ .Build();
+
+ UnitOfWork.SeriesRepository.Add(series);
+ await UnitOfWork.CommitAsync();
+
+ // Call UpdateChapterPeopleAsync with the same list
+ await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
+
+ var people = await UnitOfWork.PersonRepository.GetAllPeople();
+ Assert.Contains(people, p => p.Name == "Joe Shmo");
+
+ var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
+ Assert.Contains("Joe Shmo", chapterPeople);
+ Assert.Single(chapter.People); // No duplicate entries
+ }
+
+ // 4. Test multiple roles for a person
+ [Fact]
+ public async Task UpdateChapterPeopleAsync_MultipleRoles()
+ {
+ await ResetDb();
+
+ var library = new LibraryBuilder("My Library")
+ .Build();
+
+ UnitOfWork.LibraryRepository.Add(library);
+ await UnitOfWork.CommitAsync();
+
+ var person = new PersonBuilder("Joe Shmo").Build();
+ var chapter = new ChapterBuilder("1").WithPerson(person, PersonRole.Writer).Build();
+
+ var series = new SeriesBuilder("Test 1")
+ .WithLibraryId(library.Id)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(chapter)
+ .Build())
+ .Build();
+
+ UnitOfWork.SeriesRepository.Add(series);
+ await UnitOfWork.CommitAsync();
+
+ // Add same person as Editor
+ await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
+
+ // Ensure that the same person is assigned with two roles
+ var chapterPeople = chapter
+ .People
+ .Where(cp =>
+ cp.Person.Name == "Joe Shmo")
+ .ToList();
+ Assert.Equal(2, chapterPeople.Count); // One for each role
+ Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Writer);
+ Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Editor);
}
[Fact]
- public void UpdatePeople_ShouldNotAddDuplicatePeople()
+ public async Task UpdateChapterPeopleAsync_MatchOnAlias_NoChanges()
{
- var allPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- DbFactory.Person("Joe Shmo", PersonRole.Writer),
- DbFactory.Person("Sally Ann", PersonRole.CoverArtist),
+ await ResetDb();
- };
- var peopleAdded = new List();
+ var library = new LibraryBuilder("My Library")
+ .Build();
- PersonHelper.UpdatePeople(allPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.CoverArtist, person =>
- {
- peopleAdded.Add(person);
- });
+ UnitOfWork.LibraryRepository.Add(library);
+ await UnitOfWork.CommitAsync();
- Assert.Equal(3, allPeople.Count);
- }
-
- [Fact]
- public void RemovePeople_ShouldRemovePeopleOfSameRole()
- {
- var existingPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- DbFactory.Person("Joe Shmo", PersonRole.Writer)
- };
- var peopleRemoved = new List();
- PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.NotEqual(existingPeople, peopleRemoved);
- Assert.Equal(1, peopleRemoved.Count);
- }
-
- [Fact]
- public void RemovePeople_ShouldRemovePeopleFromBothRoles()
- {
- var existingPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- DbFactory.Person("Joe Shmo", PersonRole.Writer)
- };
- var peopleRemoved = new List();
- PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.NotEqual(existingPeople, peopleRemoved);
- Assert.Equal(1, peopleRemoved.Count);
-
- PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo"}, PersonRole.CoverArtist, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.Equal(0, existingPeople.Count);
- Assert.Equal(2, peopleRemoved.Count);
- }
-
- [Fact]
- public void RemovePeople_ShouldRemovePeopleOfSameRole_WhenNothingPassed()
- {
- var existingPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.Writer),
- DbFactory.Person("Joe Shmo", PersonRole.Writer),
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist)
- };
- var peopleRemoved = new List();
- PersonHelper.RemovePeople(existingPeople, Array.Empty(), PersonRole.Writer, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.NotEqual(existingPeople, peopleRemoved);
- Assert.Equal(2, peopleRemoved.Count);
- }
-
- [Fact]
- public void KeepOnlySamePeopleBetweenLists()
- {
- var existingPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- DbFactory.Person("Joe Shmo", PersonRole.Writer),
- DbFactory.Person("Sally", PersonRole.Writer),
- };
-
- var peopleFromChapters = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- };
-
- var peopleRemoved = new List();
- PersonHelper.KeepOnlySamePeopleBetweenLists(existingPeople,
- peopleFromChapters, person =>
- {
- peopleRemoved.Add(person);
- });
-
- Assert.Equal(2, peopleRemoved.Count);
- }
-
- [Fact]
- public void AddPeople_ShouldAddOnlyNonExistingPeople()
- {
- var existingPeople = new List
- {
- DbFactory.Person("Joe Shmo", PersonRole.CoverArtist),
- DbFactory.Person("Joe Shmo", PersonRole.Writer),
- DbFactory.Person("Sally", PersonRole.Writer),
- };
-
-
- PersonHelper.AddPersonIfNotExists(existingPeople, DbFactory.Person("Joe Shmo", PersonRole.CoverArtist));
- Assert.Equal(3, existingPeople.Count);
-
- PersonHelper.AddPersonIfNotExists(existingPeople, DbFactory.Person("Joe Shmo", PersonRole.Writer));
- Assert.Equal(3, existingPeople.Count);
-
- PersonHelper.AddPersonIfNotExists(existingPeople, DbFactory.Person("Joe Shmo Two", PersonRole.CoverArtist));
- Assert.Equal(4, existingPeople.Count);
+ var person = new PersonBuilder("Joe Doe")
+ .WithAlias("Jonny Doe")
+ .Build();
+
+ var chapter = new ChapterBuilder("1")
+ .WithPerson(person, PersonRole.Editor)
+ .Build();
+
+ var series = new SeriesBuilder("Test 1")
+ .WithLibraryId(library.Id)
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(chapter)
+ .Build())
+ .Build();
+
+ UnitOfWork.SeriesRepository.Add(series);
+ await UnitOfWork.CommitAsync();
+
+ // Add on Name
+ await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Joe Doe" }, PersonRole.Editor, UnitOfWork);
+ await UnitOfWork.CommitAsync();
+
+ var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
+ Assert.Single(allPeople);
+
+ // Add on alias
+ await PersonHelper.UpdateChapterPeopleAsync(chapter, new List { "Jonny Doe" }, PersonRole.Editor, UnitOfWork);
+ await UnitOfWork.CommitAsync();
+
+ allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
+ Assert.Single(allPeople);
}
+ // TODO: Unit tests for series
}
diff --git a/API.Tests/Helpers/RandfHelper.cs b/API.Tests/Helpers/RandfHelper.cs
new file mode 100644
index 000000000..d8c007df7
--- /dev/null
+++ b/API.Tests/Helpers/RandfHelper.cs
@@ -0,0 +1,124 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+
+namespace API.Tests.Helpers;
+
+public class RandfHelper
+{
+ private static readonly Random Random = new ();
+
+ ///
+ /// Returns true if all simple fields are equal
+ ///
+ ///
+ ///
+ /// fields to ignore, note that the names are very weird sometimes
+ ///
+ ///
+ ///
+ public static bool AreSimpleFieldsEqual(object obj1, object obj2, IList ignoreFields)
+ {
+ if (obj1 == null || obj2 == null)
+ throw new ArgumentNullException("Neither object can be null.");
+
+ Type type1 = obj1.GetType();
+ Type type2 = obj2.GetType();
+
+ if (type1 != type2)
+ throw new ArgumentException("Objects must be of the same type.");
+
+ FieldInfo[] fields = type1.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.NonPublic);
+
+ foreach (var field in fields)
+ {
+ if (field.IsInitOnly) continue;
+ if (ignoreFields.Contains(field.Name)) continue;
+
+ Type fieldType = field.FieldType;
+
+ if (IsRelevantType(fieldType))
+ {
+ object value1 = field.GetValue(obj1);
+ object value2 = field.GetValue(obj2);
+
+ if (!Equals(value1, value2))
+ {
+ throw new ArgumentException("Fields must be of the same type: " + field.Name + " was " + value1 + " and " + value2);
+ }
+ }
+ }
+
+ return true;
+ }
+
+ private static bool IsRelevantType(Type type)
+ {
+ return type.IsPrimitive
+ || type == typeof(string)
+ || type.IsEnum;
+ }
+
+ ///
+ /// Sets all simple fields of the given object to a random value
+ ///
+ ///
+ /// Simple is, primitive, string, or enum
+ ///
+ public static void SetRandomValues(object obj)
+ {
+ if (obj == null) throw new ArgumentNullException(nameof(obj));
+
+ Type type = obj.GetType();
+ FieldInfo[] fields = type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic);
+
+ foreach (var field in fields)
+ {
+ if (field.IsInitOnly) continue; // Skip readonly fields
+
+ object value = GenerateRandomValue(field.FieldType);
+ if (value != null)
+ {
+ field.SetValue(obj, value);
+ }
+ }
+ }
+
+ private static object GenerateRandomValue(Type type)
+ {
+ if (type == typeof(int))
+ return Random.Next();
+ if (type == typeof(float))
+ return (float)Random.NextDouble() * 100;
+ if (type == typeof(double))
+ return Random.NextDouble() * 100;
+ if (type == typeof(bool))
+ return Random.Next(2) == 1;
+ if (type == typeof(char))
+ return (char)Random.Next('A', 'Z' + 1);
+ if (type == typeof(byte))
+ return (byte)Random.Next(0, 256);
+ if (type == typeof(short))
+ return (short)Random.Next(short.MinValue, short.MaxValue);
+ if (type == typeof(long))
+ return (long)(Random.NextDouble() * long.MaxValue);
+ if (type == typeof(string))
+ return GenerateRandomString(10);
+ if (type.IsEnum)
+ {
+ var values = Enum.GetValues(type);
+ return values.GetValue(Random.Next(values.Length));
+ }
+
+ // Unsupported type
+ return null;
+ }
+
+ private static string GenerateRandomString(int length)
+ {
+ const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+ return new string(Enumerable.Repeat(chars, length)
+ .Select(s => s[Random.Next(s.Length)]).ToArray());
+ }
+}
diff --git a/API.Tests/Helpers/RateLimiterTests.cs b/API.Tests/Helpers/RateLimiterTests.cs
new file mode 100644
index 000000000..e9b0030b9
--- /dev/null
+++ b/API.Tests/Helpers/RateLimiterTests.cs
@@ -0,0 +1,80 @@
+using System;
+using System.Threading.Tasks;
+using API.Helpers;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+public class RateLimiterTests
+{
+ [Fact]
+ public void AcquireTokens_Successful()
+ {
+ // Arrange
+ var limiter = new RateLimiter(3, TimeSpan.FromSeconds(1));
+
+ // Act & Assert
+ Assert.True(limiter.TryAcquire("test_key"));
+ Assert.True(limiter.TryAcquire("test_key"));
+ Assert.True(limiter.TryAcquire("test_key"));
+ }
+
+ [Fact]
+ public void AcquireTokens_ExceedLimit()
+ {
+ // Arrange
+ var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
+
+ // Act
+ limiter.TryAcquire("test_key");
+ limiter.TryAcquire("test_key");
+
+ // Assert
+ Assert.False(limiter.TryAcquire("test_key"));
+ }
+
+ [Fact]
+ public async Task AcquireTokens_Refill()
+ {
+ // Arrange
+ var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
+
+ // Act
+ limiter.TryAcquire("test_key");
+ limiter.TryAcquire("test_key");
+
+ // Wait for refill
+ await Task.Delay(1100);
+
+ // Assert
+ Assert.True(limiter.TryAcquire("test_key"));
+ }
+
+ [Fact]
+ public async Task AcquireTokens_Refill_WithOff()
+ {
+ // Arrange
+ var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
+
+ // Act
+ limiter.TryAcquire("test_key");
+ limiter.TryAcquire("test_key");
+
+ // Wait for refill
+ await Task.Delay(2100);
+
+ // Assert
+ Assert.False(limiter.TryAcquire("test_key"));
+ }
+
+ [Fact]
+ public void AcquireTokens_MultipleKeys()
+ {
+ // Arrange
+ var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
+
+ // Act & Assert
+ Assert.True(limiter.TryAcquire("key1"));
+ Assert.True(limiter.TryAcquire("key2"));
+ }
+}
diff --git a/API.Tests/Helpers/ReviewHelperTests.cs b/API.Tests/Helpers/ReviewHelperTests.cs
new file mode 100644
index 000000000..b221c3c70
--- /dev/null
+++ b/API.Tests/Helpers/ReviewHelperTests.cs
@@ -0,0 +1,258 @@
+using API.Helpers;
+using System.Collections.Generic;
+using System.Linq;
+using Xunit;
+using API.DTOs.SeriesDetail;
+
+namespace API.Tests.Helpers;
+
+public class ReviewHelperTests
+{
+ #region SelectSpectrumOfReviews Tests
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WhenLessThan10Reviews_ReturnsAllReviews()
+ {
+ // Arrange
+ var reviews = CreateReviewList(8);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(8, result.Count);
+ Assert.Equal(reviews, result.OrderByDescending(r => r.Score));
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WhenMoreThan10Reviews_Returns10Reviews()
+ {
+ // Arrange
+ var reviews = CreateReviewList(20);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(10, result.Count);
+ Assert.Equal(reviews[0], result.First());
+ Assert.Equal(reviews[19], result.Last());
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WithExactly10Reviews_ReturnsAllReviews()
+ {
+ // Arrange
+ var reviews = CreateReviewList(10);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(10, result.Count);
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WithLargeNumberOfReviews_ReturnsCorrectSpectrum()
+ {
+ // Arrange
+ var reviews = CreateReviewList(100);
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(10, result.Count);
+ Assert.Contains(reviews[0], result);
+ Assert.Contains(reviews[1], result);
+ Assert.Contains(reviews[98], result);
+ Assert.Contains(reviews[99], result);
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_WithEmptyList_ReturnsEmptyList()
+ {
+ // Arrange
+ var reviews = new List();
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void SelectSpectrumOfReviews_ResultsOrderedByScoreDescending()
+ {
+ // Arrange
+ var reviews = new List
+ {
+ new UserReviewDto { Tagline = "1", Score = 3 },
+ new UserReviewDto { Tagline = "2", Score = 5 },
+ new UserReviewDto { Tagline = "3", Score = 1 },
+ new UserReviewDto { Tagline = "4", Score = 4 },
+ new UserReviewDto { Tagline = "5", Score = 2 }
+ };
+
+ // Act
+ var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
+
+ // Assert
+ Assert.Equal(5, result.Count);
+ Assert.Equal(5, result[0].Score);
+ Assert.Equal(4, result[1].Score);
+ Assert.Equal(3, result[2].Score);
+ Assert.Equal(2, result[3].Score);
+ Assert.Equal(1, result[4].Score);
+ }
+
+ #endregion
+
+ #region GetCharacters Tests
+
+ [Fact]
+ public void GetCharacters_WithNullBody_ReturnsNull()
+ {
+ // Arrange
+ string body = null;
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Null(result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithEmptyBody_ReturnsEmptyString()
+ {
+ // Arrange
+ var body = string.Empty;
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal(string.Empty, result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithNoTextNodes_ReturnsEmptyString()
+ {
+ // Arrange
+ const string body = "";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal(string.Empty, result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithLessCharactersThanLimit_ReturnsFullText()
+ {
+ // Arrange
+ var body = "This is a short review.
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal("This is a short review.…", result);
+ }
+
+ [Fact]
+ public void GetCharacters_WithMoreCharactersThanLimit_TruncatesText()
+ {
+ // Arrange
+ var body = "" + new string('a', 200) + "
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal(new string('a', 175) + "…", result);
+ Assert.Equal(176, result.Length); // 175 characters + ellipsis
+ }
+
+ [Fact]
+ public void GetCharacters_IgnoresScriptTags()
+ {
+ // Arrange
+ const string body = "Visible text
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal("Visible text…", result);
+ Assert.DoesNotContain("hidden", result);
+ }
+
+ [Fact]
+ public void GetCharacters_RemovesMarkdownSymbols()
+ {
+ // Arrange
+ const string body = "This is **bold** and _italic_ text with [link](url).
";
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.Equal("This is bold and italic text with link.…", result);
+ }
+
+ [Fact]
+ public void GetCharacters_HandlesComplexMarkdownAndHtml()
+ {
+ // Arrange
+ const string body = """
+
+
+
# Header
+
This is ~~strikethrough~~ and __underlined__ text
+
~~~code block~~~
+
+++highlighted+++
+
img123(image.jpg)
+
+ """;
+
+ // Act
+ var result = ReviewHelper.GetCharacters(body);
+
+ // Assert
+ Assert.DoesNotContain("~~", result);
+ Assert.DoesNotContain("__", result);
+ Assert.DoesNotContain("~~~", result);
+ Assert.DoesNotContain("+++", result);
+ Assert.DoesNotContain("img123(", result);
+ Assert.Contains("Header", result);
+ Assert.Contains("strikethrough", result);
+ Assert.Contains("underlined", result);
+ Assert.Contains("code block", result);
+ Assert.Contains("highlighted", result);
+ }
+
+ #endregion
+
+ #region Helper Methods
+
+ private static List CreateReviewList(int count)
+ {
+ var reviews = new List();
+ for (var i = 0; i < count; i++)
+ {
+ reviews.Add(new UserReviewDto
+ {
+ Tagline = $"{i + 1}",
+ Score = count - i // This makes them ordered by score descending initially
+ });
+ }
+ return reviews;
+ }
+
+ #endregion
+}
+
diff --git a/API.Tests/Helpers/ScannerHelper.cs b/API.Tests/Helpers/ScannerHelper.cs
new file mode 100644
index 000000000..653efebb1
--- /dev/null
+++ b/API.Tests/Helpers/ScannerHelper.cs
@@ -0,0 +1,208 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.IO.Abstractions;
+using System.IO.Compression;
+using System.Linq;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using System.Xml;
+using System.Xml.Serialization;
+using API.Data;
+using API.Data.Metadata;
+using API.Entities;
+using API.Entities.Enums;
+using API.Helpers;
+using API.Helpers.Builders;
+using API.Services;
+using API.Services.Plus;
+using API.Services.Tasks;
+using API.Services.Tasks.Metadata;
+using API.Services.Tasks.Scanner;
+using API.SignalR;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit.Abstractions;
+
+namespace API.Tests.Helpers;
+#nullable enable
+
+public class ScannerHelper
+{
+ private readonly IUnitOfWork _unitOfWork;
+ private readonly ITestOutputHelper _testOutputHelper;
+ private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
+ private readonly string _testcasesDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/TestCases");
+ private readonly string _imagePath = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/1x1.png");
+ private static readonly string[] ComicInfoExtensions = new[] { ".cbz", ".cbr", ".zip", ".rar" };
+
+ public ScannerHelper(IUnitOfWork unitOfWork, ITestOutputHelper testOutputHelper)
+ {
+ _unitOfWork = unitOfWork;
+ _testOutputHelper = testOutputHelper;
+ }
+
+ public async Task GenerateScannerData(string testcase, Dictionary comicInfos = null)
+ {
+ var testDirectoryPath = await GenerateTestDirectory(Path.Join(_testcasesDirectory, testcase), comicInfos);
+
+ var (publisher, type) = SplitPublisherAndLibraryType(Path.GetFileNameWithoutExtension(testcase));
+
+ var library = new LibraryBuilder(publisher, type)
+ .WithFolders([new FolderPath() {Path = testDirectoryPath}])
+ .Build();
+
+ var admin = new AppUserBuilder("admin", "admin@kavita.com", Seed.DefaultThemes[0])
+ .WithLibrary(library)
+ .Build();
+
+ _unitOfWork.UserRepository.Add(admin); // Admin is needed for generating collections/reading lists
+ _unitOfWork.LibraryRepository.Add(library);
+ await _unitOfWork.CommitAsync();
+
+ return library;
+ }
+
+ public ScannerService CreateServices(DirectoryService ds = null, IFileSystem fs = null)
+ {
+ fs ??= new FileSystem();
+ ds ??= new DirectoryService(Substitute.For>(), fs);
+ var archiveService = new ArchiveService(Substitute.For>(), ds,
+ Substitute.For(), Substitute.For());
+ var readingItemService = new ReadingItemService(archiveService, Substitute.For(),
+ Substitute.For(), ds, Substitute.For>());
+
+
+ var processSeries = new ProcessSeries(_unitOfWork, Substitute.For>(),
+ Substitute.For(),
+ ds, Substitute.For(), readingItemService, new FileService(fs),
+ Substitute.For(),
+ Substitute.For(),
+ Substitute.For(),
+ Substitute.For());
+
+ var scanner = new ScannerService(_unitOfWork, Substitute.For>(),
+ Substitute.For(),
+ Substitute.For(), Substitute.For(), ds,
+ readingItemService, processSeries, Substitute.For());
+ return scanner;
+ }
+
+ private static (string Publisher, LibraryType Type) SplitPublisherAndLibraryType(string input)
+ {
+ // Split the input string based on " - "
+ var parts = input.Split(" - ", StringSplitOptions.RemoveEmptyEntries);
+
+ if (parts.Length != 2)
+ {
+ throw new ArgumentException("Input must be in the format 'Publisher - LibraryType'");
+ }
+
+ var publisher = parts[0].Trim();
+ var libraryTypeString = parts[1].Trim();
+
+ // Try to parse the right-hand side as a LibraryType enum
+ if (!Enum.TryParse(libraryTypeString, out var libraryType))
+ {
+ throw new ArgumentException($"'{libraryTypeString}' is not a valid LibraryType");
+ }
+
+ return (publisher, libraryType);
+ }
+
+
+
+ private async Task GenerateTestDirectory(string mapPath, Dictionary comicInfos = null)
+ {
+ // Read the map file
+ var mapContent = await File.ReadAllTextAsync(mapPath);
+
+ // Deserialize the JSON content into a list of strings using System.Text.Json
+ var filePaths = JsonSerializer.Deserialize>(mapContent);
+
+ // Create a test directory
+ var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(mapPath));
+ if (Directory.Exists(testDirectory))
+ {
+ Directory.Delete(testDirectory, true);
+ }
+ Directory.CreateDirectory(testDirectory);
+
+ // Generate the files and folders
+ await Scaffold(testDirectory, filePaths, comicInfos);
+
+ _testOutputHelper.WriteLine($"Test Directory Path: {testDirectory}");
+
+ return Path.GetFullPath(testDirectory);
+ }
+
+
+ public async Task Scaffold(string testDirectory, List filePaths, Dictionary comicInfos = null)
+ {
+ foreach (var relativePath in filePaths)
+ {
+ var fullPath = Path.Combine(testDirectory, relativePath);
+ var fileDir = Path.GetDirectoryName(fullPath);
+
+ // Create the directory if it doesn't exist
+ if (!Directory.Exists(fileDir))
+ {
+ Directory.CreateDirectory(fileDir);
+ Console.WriteLine($"Created directory: {fileDir}");
+ }
+
+ var ext = Path.GetExtension(fullPath).ToLower();
+ if (ComicInfoExtensions.Contains(ext) && comicInfos != null && comicInfos.TryGetValue(Path.GetFileName(relativePath), out var info))
+ {
+ CreateMinimalCbz(fullPath, info);
+ }
+ else
+ {
+ // Create an empty file
+ await File.Create(fullPath).DisposeAsync();
+ Console.WriteLine($"Created empty file: {fullPath}");
+ }
+ }
+ }
+
+ private void CreateMinimalCbz(string filePath, ComicInfo? comicInfo = null)
+ {
+ using (var archive = ZipFile.Open(filePath, ZipArchiveMode.Create))
+ {
+ // Add the 1x1 image to the archive
+ archive.CreateEntryFromFile(_imagePath, "1x1.png");
+
+ if (comicInfo != null)
+ {
+ // Serialize ComicInfo object to XML
+ var comicInfoXml = SerializeComicInfoToXml(comicInfo);
+
+ // Create an entry for ComicInfo.xml in the archive
+ var entry = archive.CreateEntry("ComicInfo.xml");
+ using var entryStream = entry.Open();
+ using var writer = new StreamWriter(entryStream, Encoding.UTF8);
+
+ // Write the XML to the archive
+ writer.Write(comicInfoXml);
+ }
+
+ }
+ Console.WriteLine($"Created minimal CBZ archive: {filePath} with{(comicInfo != null ? "" : "out")} metadata.");
+ }
+
+
+ private static string SerializeComicInfoToXml(ComicInfo comicInfo)
+ {
+ var xmlSerializer = new XmlSerializer(typeof(ComicInfo));
+ using var stringWriter = new StringWriter();
+ using (var xmlWriter = XmlWriter.Create(stringWriter, new XmlWriterSettings { Indent = true, Encoding = new UTF8Encoding(false), OmitXmlDeclaration = false}))
+ {
+ xmlSerializer.Serialize(xmlWriter, comicInfo);
+ }
+
+ // For the love of god, I spent 2 hours trying to get utf-8 with no BOM
+ return stringWriter.ToString().Replace("""""",
+ @"");
+ }
+}
diff --git a/API.Tests/Helpers/SeriesHelperTests.cs b/API.Tests/Helpers/SeriesHelperTests.cs
index 139803e0a..22b4a3cd1 100644
--- a/API.Tests/Helpers/SeriesHelperTests.cs
+++ b/API.Tests/Helpers/SeriesHelperTests.cs
@@ -1,9 +1,10 @@
using System.Collections.Generic;
using System.Linq;
-using API.Data;
using API.Entities;
using API.Entities.Enums;
+using API.Extensions;
using API.Helpers;
+using API.Helpers.Builders;
using API.Services.Tasks.Scanner;
using Xunit;
@@ -15,147 +16,161 @@ public class SeriesHelperTests
[Fact]
public void FindSeries_ShouldFind_SameFormat()
{
- var series = DbFactory.Series("Darker than Black");
+ var series = new SeriesBuilder("Darker than Black").Build();
series.OriginalName = "Something Random";
series.Format = MangaFormat.Archive;
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "Darker than Black",
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker than Black")
+ NormalizedName = "Darker than Black".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "Darker than Black".ToLower(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker than Black")
+ NormalizedName = "Darker than Black".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "Darker than Black".ToUpper(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker than Black")
+ NormalizedName = "Darker than Black".ToNormalized()
+ }));
+ }
+
+ [Fact]
+ public void FindSeries_ShouldFind_NullName()
+ {
+ var series = new SeriesBuilder("Darker than Black").Build();
+ series.OriginalName = null;
+ series.Format = MangaFormat.Archive;
+ Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
+ {
+ Format = MangaFormat.Archive,
+ Name = "Darker than Black",
+ NormalizedName = "Darker than Black".ToNormalized()
}));
}
[Fact]
public void FindSeries_ShouldNotFind_WrongFormat()
{
- var series = DbFactory.Series("Darker than Black");
+ var series = new SeriesBuilder("Darker than Black").Build();
series.OriginalName = "Something Random";
series.Format = MangaFormat.Archive;
Assert.False(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Darker than Black",
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker than Black")
+ NormalizedName = "Darker than Black".ToNormalized()
}));
Assert.False(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Darker than Black".ToLower(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker than Black")
+ NormalizedName = "Darker than Black".ToNormalized()
}));
Assert.False(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Darker than Black".ToUpper(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker than Black")
+ NormalizedName = "Darker than Black".ToNormalized()
}));
}
[Fact]
public void FindSeries_ShouldFind_UsingOriginalName()
{
- var series = DbFactory.Series("Darker than Black");
+ var series = new SeriesBuilder("Darker than Black").Build();
series.OriginalName = "Something Random";
series.Format = MangaFormat.Image;
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Something Random",
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something Random")
+ NormalizedName = "Something Random".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Something Random".ToLower(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something Random")
+ NormalizedName = "Something Random".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Something Random".ToUpper(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something Random")
+ NormalizedName = "Something Random".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "SomethingRandom".ToUpper(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("SomethingRandom")
+ NormalizedName = "SomethingRandom".ToNormalized()
}));
}
[Fact]
public void FindSeries_ShouldFind_UsingLocalizedName()
{
- var series = DbFactory.Series("Darker than Black");
+ var series = new SeriesBuilder("Darker than Black").Build();
series.LocalizedName = "Something Random";
series.Format = MangaFormat.Image;
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Something Random",
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something Random")
+ NormalizedName = "Something Random".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Something Random".ToLower(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something Random")
+ NormalizedName = "Something Random".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "Something Random".ToUpper(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something Random")
+ NormalizedName = "Something Random".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Image,
Name = "SomethingRandom".ToUpper(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("SomethingRandom")
+ NormalizedName = "SomethingRandom".ToNormalized()
}));
}
[Fact]
public void FindSeries_ShouldFind_UsingLocalizedName_2()
{
- var series = DbFactory.Series("My Dress-Up Darling");
+ var series = new SeriesBuilder("My Dress-Up Darling").Build();
series.LocalizedName = "Sono Bisque Doll wa Koi wo Suru";
series.Format = MangaFormat.Archive;
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "My Dress-Up Darling",
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("My Dress-Up Darling")
+ NormalizedName = "My Dress-Up Darling".ToNormalized()
}));
Assert.True(SeriesHelper.FindSeries(series, new ParsedSeries()
{
Format = MangaFormat.Archive,
Name = "Sono Bisque Doll wa Koi wo Suru".ToLower(),
- NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Sono Bisque Doll wa Koi wo Suru")
+ NormalizedName = "Sono Bisque Doll wa Koi wo Suru".ToNormalized()
}));
}
#endregion
@@ -165,13 +180,13 @@ public class SeriesHelperTests
{
var existingSeries = new List()
{
- EntityFactory.CreateSeries("Darker than Black Vol 1"),
- EntityFactory.CreateSeries("Darker than Black"),
- EntityFactory.CreateSeries("Beastars"),
+ new SeriesBuilder("Darker than Black Vol 1").Build(),
+ new SeriesBuilder("Darker than Black").Build(),
+ new SeriesBuilder("Beastars").Build(),
};
var missingSeries = new List()
{
- EntityFactory.CreateSeries("Darker than Black Vol 1"),
+ new SeriesBuilder("Darker than Black Vol 1").Build(),
};
existingSeries = SeriesHelper.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList();
diff --git a/API.Tests/Helpers/SmartFilterHelperTests.cs b/API.Tests/Helpers/SmartFilterHelperTests.cs
new file mode 100644
index 000000000..974cb0ba6
--- /dev/null
+++ b/API.Tests/Helpers/SmartFilterHelperTests.cs
@@ -0,0 +1,161 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using API.Data.ManualMigrations;
+using API.DTOs.Filtering;
+using API.DTOs.Filtering.v2;
+using API.Entities.Enums;
+using API.Helpers;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+public class SmartFilterHelperTests
+{
+
+ [Theory]
+ [InlineData("", false)]
+ [InlineData("name=DC%20-%20On%20Deck&stmts=comparison%3D1%26field%3D20%26value%3D0,comparison%3D9%26field%3D20%26value%3D100,comparison%3D0%26field%3D19%26value%3D274&sortOptions=sortField%3D1&isAscending=True&limitTo=0&combination=1", true)]
+ [InlineData("name=English%20In%20Progress&stmts=comparison%253D8%252Cfield%253D7%252Cvalue%253D4%25252C3,comparison%253D3%252Cfield%253D20%252Cvalue%253D100,comparison%253D8%252Cfield%253D3%252Cvalue%253Dja,comparison%253D1%252Cfield%253D20%252Cvalue%253D0&sortOptions=sortField%3D7,isAscending%3DFalse&limitTo=0&combination=1", true)]
+ [InlineData("name=Unread%20Isekai%20Light%20Novels&stmts=comparison%253D0%25C2%25A6field%253D20%25C2%25A6value%253D0%EF%BF%BDcomparison%253D5%25C2%25A6field%253D6%25C2%25A6value%253D230%EF%BF%BDcomparison%253D8%25C2%25A6field%253D7%25C2%25A6value%253D4%EF%BF%BDcomparison%253D0%25C2%25A6field%253D19%25C2%25A6value%253D14&sortOptions=sortField%3D5%C2%A6isAscending%3DFalse&limitTo=0&combination=1", false)]
+ [InlineData("name=Zero&stmts=comparison%3d7%26field%3d1%26value%3d0&sortOptions=sortField=2&isAscending=False&limitTo=0&combination=1", true)]
+ public void Test_ShouldMigrateFilter(string filter, bool expected)
+ {
+ Assert.Equal(expected, MigrateSmartFilterEncoding.ShouldMigrateFilter(filter));
+ }
+
+ [Fact]
+ public void Test_Decode()
+ {
+ const string encoded = """
+ name=Test&stmts=comparison%253D0%25C2%25A6field%253D18%25C2%25A6value%253D95�comparison%253D0%25C2%25A6field%253D4%25C2%25A6value%253D0�comparison%253D7%25C2%25A6field%253D1%25C2%25A6value%253Da&sortOptions=sortField%3D2¦isAscending%3DFalse&limitTo=10&combination=1
+ """;
+
+ var filter = SmartFilterHelper.Decode(encoded);
+
+ Assert.Equal(10, filter.LimitTo);
+ Assert.Equal(SortField.CreatedDate, filter.SortOptions.SortField);
+ Assert.False(filter.SortOptions.IsAscending);
+ Assert.Equal("Test" , filter.Name);
+
+ var list = filter.Statements.ToList();
+ AssertStatementSame(list[2], FilterField.SeriesName, FilterComparison.Matches, "a");
+ AssertStatementSame(list[1], FilterField.AgeRating, FilterComparison.Equal, (int) AgeRating.Unknown + string.Empty);
+ AssertStatementSame(list[0], FilterField.Genres, FilterComparison.Equal, "95");
+ }
+
+ [Fact]
+ public void Test_Decode2()
+ {
+ const string encoded = """
+ name=Test%202&stmts=comparison%253D10%25C2%25A6field%253D1%25C2%25A6value%253DA%EF%BF%BDcomparison%253D0%25C2%25A6field%253D19%25C2%25A6value%253D11&sortOptions=sortField%3D1%C2%A6isAscending%3DTrue&limitTo=0&combination=1
+ """;
+
+ var filter = SmartFilterHelper.Decode(encoded);
+ Assert.True(filter.SortOptions.IsAscending);
+ }
+
+ [Fact]
+ public void Test_EncodeDecode()
+ {
+ var filter = new FilterV2Dto()
+ {
+ Name = "Test",
+ SortOptions = new SortOptions() {
+ IsAscending = false,
+ SortField = SortField.CreatedDate
+ },
+ LimitTo = 10,
+ Combination = FilterCombination.And,
+ Statements = new List()
+ {
+ new FilterStatementDto()
+ {
+ Comparison = FilterComparison.Equal,
+ Field = FilterField.AgeRating,
+ Value = (int) AgeRating.Unknown + string.Empty
+ }
+ }
+ };
+
+ var encodedFilter = SmartFilterHelper.Encode(filter);
+
+ var decoded = SmartFilterHelper.Decode(encodedFilter);
+ Assert.Single(decoded.Statements);
+ AssertStatementSame(decoded.Statements.First(), filter.Statements.First());
+ Assert.Equal("Test", decoded.Name);
+ Assert.Equal(10, decoded.LimitTo);
+ Assert.Equal(SortField.CreatedDate, decoded.SortOptions.SortField);
+ Assert.False(decoded.SortOptions.IsAscending);
+ }
+
+ [Fact]
+ public void Test_EncodeDecode_MultipleValues_Contains()
+ {
+ var filter = new FilterV2Dto()
+ {
+ Name = "Test",
+ SortOptions = new SortOptions() {
+ IsAscending = false,
+ SortField = SortField.CreatedDate
+ },
+ LimitTo = 10,
+ Combination = FilterCombination.And,
+ Statements = new List()
+ {
+ new FilterStatementDto()
+ {
+ Comparison = FilterComparison.Equal,
+ Field = FilterField.AgeRating,
+ Value = $"{(int) AgeRating.Unknown + string.Empty},{(int) AgeRating.G + string.Empty}"
+ }
+ }
+ };
+
+ var encodedFilter = SmartFilterHelper.Encode(filter);
+ var decoded = SmartFilterHelper.Decode(encodedFilter);
+
+ Assert.Single(decoded.Statements);
+ AssertStatementSame(decoded.Statements.First(), filter.Statements.First());
+
+ Assert.Equal(2, decoded.Statements.First().Value.Split(",").Length);
+
+ Assert.Equal("Test", decoded.Name);
+ Assert.Equal(10, decoded.LimitTo);
+ Assert.Equal(SortField.CreatedDate, decoded.SortOptions.SortField);
+ Assert.False(decoded.SortOptions.IsAscending);
+ }
+
+ [Theory]
+ [InlineData("name=DC%20-%20On%20Deck&stmts=comparison%3D1%26field%3D20%26value%3D0,comparison%3D9%26field%3D20%26value%3D100,comparison%3D0%26field%3D19%26value%3D274&sortOptions=sortField%3D1&isAscending=True&limitTo=0&combination=1")]
+ [InlineData("name=Manga%20-%20On%20Deck&stmts=comparison%253D1%252Cfield%253D20%252Cvalue%253D0,comparison%253D3%252Cfield%253D20%252Cvalue%253D100,comparison%253D0%252Cfield%253D19%252Cvalue%253D2&sortOptions=sortField%3D1,isAscending%3DTrue&limitTo=0&combination=1")]
+ [InlineData("name=English%20In%20Progress&stmts=comparison%253D8%252Cfield%253D7%252Cvalue%253D4%25252C3,comparison%253D3%252Cfield%253D20%252Cvalue%253D100,comparison%253D8%252Cfield%253D3%252Cvalue%253Dja,comparison%253D1%252Cfield%253D20%252Cvalue%253D0&sortOptions=sortField%3D7,isAscending%3DFalse&limitTo=0&combination=1")]
+ public void MigrationWorks(string filter)
+ {
+ try
+ {
+ var updatedFilter = MigrateSmartFilterEncoding.EncodeFix(filter);
+ Assert.NotNull(updatedFilter);
+ }
+ catch (Exception ex)
+ {
+ Assert.Fail("Exception thrown: " + ex.Message);
+ }
+
+ }
+
+ private static void AssertStatementSame(FilterStatementDto statement, FilterStatementDto statement2)
+ {
+ Assert.Equal(statement.Field, statement2.Field);
+ Assert.Equal(statement.Comparison, statement2.Comparison);
+ Assert.Equal(statement.Value, statement2.Value);
+ }
+
+ private static void AssertStatementSame(FilterStatementDto statement, FilterField field, FilterComparison combination, string value)
+ {
+ Assert.Equal(statement.Field, field);
+ Assert.Equal(statement.Comparison, combination);
+ Assert.Equal(statement.Value, value);
+ }
+
+}
diff --git a/API.Tests/Helpers/StringHelperTests.cs b/API.Tests/Helpers/StringHelperTests.cs
new file mode 100644
index 000000000..8f845c9b0
--- /dev/null
+++ b/API.Tests/Helpers/StringHelperTests.cs
@@ -0,0 +1,46 @@
+using API.Helpers;
+using Xunit;
+
+namespace API.Tests.Helpers;
+
+public class StringHelperTests
+{
+ [Theory]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?
",
+ "A Perfect Marriage Becomes a Perfect Affair!
Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?
"
+ )]
+ [InlineData(
+ "Blog | Twitter | Pixiv | Pawoo
",
+ "Blog | Twitter | Pixiv | Pawoo
"
+ )]
+ public void TestSquashBreaklines(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.SquashBreaklines(input));
+ }
+
+ [Theory]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
(Source: Anime News Network)
",
+ "A Perfect Marriage Becomes a Perfect Affair!
"
+ )]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
(Source: Anime News Network)",
+ "A Perfect Marriage Becomes a Perfect Affair!
"
+ )]
+ public void TestRemoveSourceInDescription(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
+ }
+
+
+ [Theory]
+ [InlineData(
+"""Pawoo
""",
+"""Pawoo"""
+ )]
+ public void TestCorrectUrls(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.CorrectUrls(input));
+ }
+}
diff --git a/API.Tests/Helpers/TagHelperTests.cs b/API.Tests/Helpers/TagHelperTests.cs
deleted file mode 100644
index 80cebc03b..000000000
--- a/API.Tests/Helpers/TagHelperTests.cs
+++ /dev/null
@@ -1,140 +0,0 @@
-using System.Collections.Generic;
-using API.Data;
-using API.Entities;
-using API.Helpers;
-using Xunit;
-
-namespace API.Tests.Helpers;
-
-public class TagHelperTests
-{
- [Fact]
- public void UpdateTag_ShouldAddNewTag()
- {
- var allTags = new List
- {
- DbFactory.Tag("Action", false),
- DbFactory.Tag("action", false),
- DbFactory.Tag("Sci-fi", false),
- };
- var tagAdded = new List();
-
- TagHelper.UpdateTag(allTags, new[] {"Action", "Adventure"}, false, (tag, added) =>
- {
- if (added)
- {
- tagAdded.Add(tag);
- }
-
- });
-
- Assert.Equal(1, tagAdded.Count);
- Assert.Equal(4, allTags.Count);
- }
-
- [Fact]
- public void UpdateTag_ShouldNotAddDuplicateTag()
- {
- var allTags = new List
- {
- DbFactory.Tag("Action", false),
- DbFactory.Tag("action", false),
- DbFactory.Tag("Sci-fi", false),
-
- };
- var tagAdded = new List();
-
- TagHelper.UpdateTag(allTags, new[] {"Action", "Scifi"}, false, (tag, added) =>
- {
- if (added)
- {
- tagAdded.Add(tag);
- }
- TagHelper.AddTagIfNotExists(allTags, tag);
- });
-
- Assert.Equal(3, allTags.Count);
- Assert.Empty(tagAdded);
- }
-
- [Fact]
- public void AddTag_ShouldAddOnlyNonExistingTag()
- {
- var existingTags = new List
- {
- DbFactory.Tag("Action", false),
- DbFactory.Tag("action", false),
- DbFactory.Tag("Sci-fi", false),
- };
-
-
- TagHelper.AddTagIfNotExists(existingTags, DbFactory.Tag("Action", false));
- Assert.Equal(3, existingTags.Count);
-
- TagHelper.AddTagIfNotExists(existingTags, DbFactory.Tag("action", false));
- Assert.Equal(3, existingTags.Count);
-
- TagHelper.AddTagIfNotExists(existingTags, DbFactory.Tag("Shonen", false));
- Assert.Equal(4, existingTags.Count);
- }
-
- [Fact]
- public void AddTag_ShouldNotAddSameNameAndExternal()
- {
- var existingTags = new List
- {
- DbFactory.Tag("Action", false),
- DbFactory.Tag("action", false),
- DbFactory.Tag("Sci-fi", false),
- };
-
-
- TagHelper.AddTagIfNotExists(existingTags, DbFactory.Tag("Action", true));
- Assert.Equal(3, existingTags.Count);
- }
-
- [Fact]
- public void KeepOnlySamePeopleBetweenLists()
- {
- var existingTags = new List
- {
- DbFactory.Tag("Action", false),
- DbFactory.Tag("Sci-fi", false),
- };
-
- var peopleFromChapters = new List
- {
- DbFactory.Tag("Action", false),
- };
-
- var tagRemoved = new List();
- TagHelper.KeepOnlySameTagBetweenLists(existingTags,
- peopleFromChapters, tag =>
- {
- tagRemoved.Add(tag);
- });
-
- Assert.Equal(1, tagRemoved.Count);
- }
-
- [Fact]
- public void RemoveEveryoneIfNothingInRemoveAllExcept()
- {
- var existingTags = new List
- {
- DbFactory.Tag("Action", false),
- DbFactory.Tag("Sci-fi", false),
- };
-
- var peopleFromChapters = new List();
-
- var tagRemoved = new List();
- TagHelper.KeepOnlySameTagBetweenLists(existingTags,
- peopleFromChapters, tag =>
- {
- tagRemoved.Add(tag);
- });
-
- Assert.Equal(2, tagRemoved.Count);
- }
-}
diff --git a/API.Tests/Parser/BookParserTests.cs b/API.Tests/Parser/BookParserTests.cs
deleted file mode 100644
index 003dbfecc..000000000
--- a/API.Tests/Parser/BookParserTests.cs
+++ /dev/null
@@ -1,42 +0,0 @@
-using Xunit;
-
-namespace API.Tests.Parser;
-
-public class BookParserTests
-{
- [Theory]
- [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
- [InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
- [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
- public void ParseSeriesTest(string filename, string expected)
- {
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
- }
-
- [Theory]
- [InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
- [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
- public void ParseVolumeTest(string filename, string expected)
- {
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
- }
-
- // [Theory]
- // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")]
- // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")]
- // public void ReplaceFontSrcUrl(string input, string expected)
- // {
- // var apiBase = "TEST/";
- // var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
- // Assert.Equal(expected, actual);
- // }
- //
- // [Theory]
- // [InlineData("@import url('font.css');", "@import url('TEST/font.css');")]
- // public void ReplaceImportSrcUrl(string input, string expected)
- // {
- // var apiBase = "TEST/";
- // var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
- // Assert.Equal(expected, actual);
- // }
-}
diff --git a/API.Tests/Parsers/BasicParserTests.cs b/API.Tests/Parsers/BasicParserTests.cs
new file mode 100644
index 000000000..32673e0e6
--- /dev/null
+++ b/API.Tests/Parsers/BasicParserTests.cs
@@ -0,0 +1,249 @@
+using System.IO;
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class BasicParserTests : AbstractFsTest
+{
+ private readonly BasicParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private readonly string _rootDirectory;
+
+ public BasicParserTests()
+ {
+ var fileSystem = CreateFileSystem();
+ _rootDirectory = Path.Join(DataDirectory, "Books/");
+ fileSystem.AddDirectory(_rootDirectory);
+ fileSystem.AddFile($"{_rootDirectory}Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
+
+ fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1.cbz", new MockFileData(""));
+ fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1 Chapter 2.cbz", new MockFileData(""));
+ fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Chapter 3.cbz", new MockFileData(""));
+ fileSystem.AddFile("$\"{RootDirectory}Accel World/Accel World Gaiden SP01.cbz", new MockFileData(""));
+
+
+ fileSystem.AddFile($"{_rootDirectory}Accel World/cover.png", new MockFileData(""));
+
+ fileSystem.AddFile($"{_rootDirectory}Batman/Batman #1.cbz", new MockFileData(""));
+
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new BasicParser(ds, new ImageParser(ds));
+ }
+
+ #region Parse_Manga
+
+ ///
+ /// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Accel World/cover.png", $"{_rootDirectory}Accel World/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.Null(actual);
+ }
+
+ ///
+ /// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_OtherImage_ShouldReturnNull()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Accel World/page 01.png", $"{_rootDirectory}Accel World/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+ }
+
+ ///
+ /// Tests that when there is a volume and chapter in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_VolumeAndChapterInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz", $"{_rootDirectory}Mujaki no Rakuen/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Mujaki no Rakuen", actual.Series);
+ Assert.Equal("12", actual.Volumes);
+ Assert.Equal("76", actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is a volume in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_JustVolumeInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz",
+ $"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", actual.Series);
+ Assert.Equal("1", actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is a chapter only in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_JustChapterInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Beelzebub/Beelzebub_01_[Noodles].zip",
+ $"{_rootDirectory}Beelzebub/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Beelzebub", actual.Series);
+ Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is a SP Marker in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialMarkerInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr",
+ $"{_rootDirectory}Summer Time Rendering/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Summer Time Rendering", actual.Series);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+
+ ///
+ /// Tests that when the filename parses as a special, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Volume SP01.cbr",
+ $"{_rootDirectory}Summer Time Rendering/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Summer Time Rendering", actual.Series);
+ Assert.Equal("Volume", actual.Title);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when the filename parses as a special, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialInFilename2()
+ {
+ var actual = _parser.Parse("M:/Kimi wa Midara na Boku no Joou/Specials/[Renzokusei] Special 1 SP02.zip",
+ "M:/Kimi wa Midara na Boku no Joou/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Kimi wa Midara na Boku no Joou", actual.Series);
+ Assert.Equal("[Renzokusei] Special 1", actual.Title);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when the filename parses as a special, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_SpecialInFilename_StrangeNaming()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}My Dress-Up Darling/SP01 1. Special Name.cbz",
+ _rootDirectory,
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("My Dress-Up Darling", actual.Series);
+ Assert.Equal("1. Special Name", actual.Title);
+ Assert.Equal(Parser.SpecialVolume, actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ ///
+ /// Tests that when there is an edition in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaLibrary_EditionInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz",
+ $"{_rootDirectory}Air Gear/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Air Gear", actual.Series);
+ Assert.Equal("1", actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.False(actual.IsSpecial);
+ Assert.Equal("Omnibus", actual.Edition);
+ }
+
+ #endregion
+
+ #region Parse_Books
+ ///
+ /// Tests that when there is a volume in filename, it appropriately parses
+ ///
+ [Fact]
+ public void Parse_MangaBooks_JustVolumeInFilename()
+ {
+ var actual = _parser.Parse($"{_rootDirectory}Epubs/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub",
+ $"{_rootDirectory}Epubs/",
+ _rootDirectory, LibraryType.Manga);
+ Assert.NotNull(actual);
+
+ Assert.Equal("Harrison, Kim - The Good, The Bad, and the Undead - Hollows", actual.Series);
+ Assert.Equal("2.5", actual.Volumes);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ }
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.ComicVine));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.png", LibraryType.Manga));
+ Assert.True(_parser.IsApplicable("something.png", LibraryType.Comic));
+ Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
+ Assert.True(_parser.IsApplicable("something.epub", LibraryType.LightNovel));
+ }
+
+
+ #endregion
+}
diff --git a/API.Tests/Parsers/BookParserTests.cs b/API.Tests/Parsers/BookParserTests.cs
new file mode 100644
index 000000000..90147ac6b
--- /dev/null
+++ b/API.Tests/Parsers/BookParserTests.cs
@@ -0,0 +1,73 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class BookParserTests
+{
+ private readonly BookParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Books/";
+
+ public BookParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Books/");
+ fileSystem.AddFile("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
+ fileSystem.AddFile("C:/Books/Adam Freeman - Pro ASP.NET Core 6.epub", new MockFileData(""));
+ fileSystem.AddFile("C:/Books/My Fav Book SP01.epub", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new BookParser(ds, Substitute.For(), new BasicParser(ds, new ImageParser(ds)));
+ }
+
+ #region Parse
+
+ // TODO: I'm not sure how to actually test this as it relies on an epub parser to actually do anything
+
+ ///
+ /// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
+ ///
+ // [Fact]
+ // public void Parse_SeriesWithDirectoryName()
+ // {
+ // var actual = _parser.Parse("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", "C:/Books/Birds of Prey/",
+ // RootDirectory, LibraryType.Book, new ComicInfo()
+ // {
+ // Series = "Harry Potter",
+ // Volume = "1"
+ // });
+ //
+ // Assert.NotNull(actual);
+ // Assert.Equal("Harry Potter", actual.Series);
+ // Assert.Equal("1", actual.Volumes);
+ // }
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Book));
+
+ }
+
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.epub", LibraryType.Image));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parsers/ComicVineParserTests.cs b/API.Tests/Parsers/ComicVineParserTests.cs
new file mode 100644
index 000000000..2f4fd568e
--- /dev/null
+++ b/API.Tests/Parsers/ComicVineParserTests.cs
@@ -0,0 +1,115 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Data.Metadata;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class ComicVineParserTests
+{
+ private readonly ComicVineParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Comics/";
+
+ public ComicVineParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Comics/");
+ fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
+ fileSystem.AddFile("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new ComicVineParser(ds);
+ }
+
+ #region Parse
+
+ ///
+ /// Tests that when Series and Volume are filled out, Kavita uses that for the Series Name
+ ///
+ [Fact]
+ public void Parse_SeriesWithComicInfo()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
+ RootDirectory, LibraryType.ComicVine, true, new ComicInfo()
+ {
+ Series = "Birds of Prey",
+ Volume = "2002"
+ });
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey (2002)", actual.Series);
+ Assert.Equal("2002", actual.Volumes);
+ }
+
+ ///
+ /// Tests that no ComicInfo, take the Directory Name if it matches "Series (2002)" or "Series (2)"
+ ///
+ [Fact]
+ public void Parse_SeriesWithDirectoryNameAsSeriesYear()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
+ RootDirectory, LibraryType.ComicVine, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey (2002)", actual.Series);
+ Assert.Equal("2002", actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ }
+
+ ///
+ /// Tests that no ComicInfo, take a directory name up to root if it matches "Series (2002)" or "Series (2)"
+ ///
+ [Fact]
+ public void Parse_SeriesWithADirectoryNameAsSeriesYear()
+ {
+ var actual = _parser.Parse("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", "C:/Comics/DC Comics/",
+ RootDirectory, LibraryType.ComicVine, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey (1999)", actual.Series);
+ Assert.Equal("1999", actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ }
+
+ ///
+ /// Tests that no ComicInfo and nothing matches Series (Volume), then just take the directory name as the Series
+ ///
+ [Fact]
+ public void Parse_FallbackToDirectoryNameOnly()
+ {
+ var actual = _parser.Parse("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", "C:/Comics/DC Comics/",
+ RootDirectory, LibraryType.ComicVine, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Blood Syndicate", actual.Series);
+ Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
+ Assert.Equal("1", actual.Chapters);
+ }
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on ComicVine type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("", LibraryType.Comic));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on ComicVine type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("", LibraryType.ComicVine));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parser/DefaultParserTests.cs b/API.Tests/Parsers/DefaultParserTests.cs
similarity index 62%
rename from API.Tests/Parser/DefaultParserTests.cs
rename to API.Tests/Parsers/DefaultParserTests.cs
index 7f843b552..244c08b97 100644
--- a/API.Tests/Parser/DefaultParserTests.cs
+++ b/API.Tests/Parsers/DefaultParserTests.cs
@@ -1,7 +1,6 @@
using System.Collections.Generic;
using System.IO.Abstractions.TestingHelpers;
using API.Entities.Enums;
-using API.Parser;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
using Microsoft.Extensions.Logging;
@@ -9,7 +8,7 @@ using NSubstitute;
using Xunit;
using Xunit.Abstractions;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsers;
public class DefaultParserTests
{
@@ -20,10 +19,12 @@ public class DefaultParserTests
{
_testOutputHelper = testOutputHelper;
var directoryService = new DirectoryService(Substitute.For>(), new MockFileSystem());
- _defaultParser = new DefaultParser(directoryService);
+ _defaultParser = new BasicParser(directoryService, new ImageParser(directoryService));
}
+
+
#region ParseFromFallbackFolders
[Theory]
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
@@ -32,7 +33,7 @@ public class DefaultParserTests
[InlineData("C:/", "C:/Something Random/Mujaki no Rakuen SP01.cbz", "Something Random")]
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
{
- var actual = _defaultParser.Parse(inputPath, rootDir);
+ var actual = _defaultParser.Parse(inputPath, rootDir, rootDir, LibraryType.Manga, true, null);
if (actual == null)
{
Assert.NotNull(actual);
@@ -43,25 +44,24 @@ public class DefaultParserTests
}
[Theory]
- [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
- [InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
- [InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster~0~1")]
- [InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", "Hajime no Ippo~0~0")]
- public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string expectedParseInfo)
+ [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", new [] {"Btooom!", "1", "1"})]
+ [InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", new [] {"Btooom!", "1", "2"})]
+ [InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", new [] {"Monster", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, "1"})]
+ [InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", new [] {"Hajime no Ippo", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter})]
+ public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string[] expectedParseInfo)
{
const string rootDirectory = "/manga/";
- var tokens = expectedParseInfo.Split("~");
- var actual = new ParserInfo {Chapters = "0", Volumes = "0"};
+ var actual = new ParserInfo {Series = "", Chapters = Parser.DefaultChapter, Volumes = Parser.LooseLeafVolume};
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
- Assert.Equal(tokens[0], actual.Series);
- Assert.Equal(tokens[1], actual.Volumes);
- Assert.Equal(tokens[2], actual.Chapters);
+ Assert.Equal(expectedParseInfo[0], actual.Series);
+ Assert.Equal(expectedParseInfo[1], actual.Volumes);
+ Assert.Equal(expectedParseInfo[2], actual.Chapters);
}
[Theory]
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!")]
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!")]
- [InlineData("/manga/Monster #8 (Digital)/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster")]
+ [InlineData("/manga/Monster #8 (Digital)/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "manga")]
[InlineData("/manga/Monster (Digital)/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster")]
[InlineData("/manga/Foo 50/Specials/Foo 50 SP01.cbz", "Foo 50")]
[InlineData("/manga/Foo 50 (kiraa)/Specials/Foo 50 SP01.cbz", "Foo 50")]
@@ -73,8 +73,8 @@ public class DefaultParserTests
fs.AddDirectory(rootDirectory);
fs.AddFile(inputFile, new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), fs);
- var parser = new DefaultParser(ds);
- var actual = parser.Parse(inputFile, rootDirectory);
+ var parser = new BasicParser(ds, new ImageParser(ds));
+ var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, true, null);
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(expectedParseInfo, actual.Series);
}
@@ -89,8 +89,8 @@ public class DefaultParserTests
fs.AddDirectory(rootDirectory);
fs.AddFile(inputFile, new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), fs);
- var parser = new DefaultParser(ds);
- var actual = parser.Parse(inputFile, rootDirectory);
+ var parser = new BasicParser(ds, new ImageParser(ds));
+ var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, true, null);
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(expectedParseInfo, actual.Series);
}
@@ -100,6 +100,7 @@ public class DefaultParserTests
#region Parse
+
[Fact]
public void Parse_ParseInfo_Manga()
{
@@ -118,19 +119,20 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
- Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
+ filepath = @"E:/Manga/Beelzebub/Beelzebub_01_[Noodles].zip";
expected.Add(filepath, new ParserInfo
{
- Series = "Beelzebub", Volumes = "0",
+ Series = "Beelzebub", Volumes = Parser.LooseLeafVolume,
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
+ // Note: Lots of duplicates here. I think I can move them to the ParserTests itself
+ filepath = @"E:/Manga/Ichinensei ni Nacchattara/Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
expected.Add(filepath, new ParserInfo
{
Series = "Ichinensei ni Nacchattara", Volumes = "1",
@@ -138,71 +140,71 @@ public class DefaultParserTests
FullFilePath = filepath
});
- filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
+ filepath = @"E:/Manga/Tenjo Tenge (Color)/Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
- Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
+ filepath = @"E:/Manga/Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)/Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
- Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
+ filepath = @"E:/Manga/Dorohedoro/Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Dorohedoro", Volumes = "1", Edition = "",
- Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
+ filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "APOSIMZ", Volumes = "0", Edition = "",
+ Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
+ filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
+ Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
+ filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
+ Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
- filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
+ filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Summer Time Rendering", Volumes = "0", Edition = "",
- Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
+ Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "",
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = true
});
- filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
+ filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
+ Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
+ filepath = @"E:/Manga/Kono Subarashii Sekai ni Bakuen wo!/Vol. 00 Ch. 000.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
@@ -210,7 +212,7 @@ public class DefaultParserTests
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
+ filepath = @"E:/Manga/Toukyou Akazukin/Vol. 01 Ch. 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
@@ -219,62 +221,37 @@ public class DefaultParserTests
});
// If an image is cover exclusively, ignore it
- filepath = @"E:\Manga\Seraph of the End\cover.png";
+ filepath = @"E:/Manga/Seraph of the End/cover.png";
expected.Add(filepath, null);
- filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
+ filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz";
expected.Add(filepath, new ParserInfo
{
- Series = "The Beginning After the End", Volumes = "0", Edition = "",
+ Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- // Note: Fallback to folder will parse Monster #8 and get Monster
- filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
- expected.Add(filepath, new ParserInfo
- {
- Series = "Monster", Volumes = "0", Edition = "",
- Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Image,
- FullFilePath = filepath, IsSpecial = false
- });
-
- filepath = @"E:\Manga\Air Gear\Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
+ filepath = @"E:/Manga/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
- Chapters = "0", Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch186\Vol. 19 p106.gif";
- expected.Add(filepath, new ParserInfo
- {
- Series = "Just Images the second", Volumes = "19", Edition = "",
- Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
- FullFilePath = filepath, IsSpecial = false
- });
-
- filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch186\Vol. 19 p106.gif";
- expected.Add(filepath, new ParserInfo
- {
- Series = "Just Images the second", Volumes = "19", Edition = "",
- Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
- FullFilePath = filepath, IsSpecial = false
- });
-
- filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
+ filepath = @"E:/Manga/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
expected.Add(filepath, new ParserInfo
{
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
- Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
- var actual = _defaultParser.Parse(file, rootPath);
+ var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Manga, true, null);
if (expectedInfo == null)
{
Assert.Null(actual);
@@ -299,6 +276,90 @@ public class DefaultParserTests
}
}
+ //[Fact]
+ public void Parse_ParseInfo_Manga_ImageOnly()
+ {
+ // Images don't have root path as E:/Manga, but rather as the path of the folder
+
+ // Note: Fallback to folder will parse Monster #8 and get Monster
+ var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg";
+ var expectedInfo2 = new ParserInfo
+ {
+ Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
+ Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+ var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, true, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+
+ filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Vol19/ch. 186/Vol. 19 p106.gif";
+ expectedInfo2 = new ParserInfo
+ {
+ Series = "Just Images the second", Volumes = "19", Edition = "",
+ Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+
+ actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, true, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+
+ filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Blank Folder/Vol19/ch. 186/Vol. 19 p106.gif";
+ expectedInfo2 = new ParserInfo
+ {
+ Series = "Just Images the second", Volumes = "19", Edition = "",
+ Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+
+ actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, true, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+ }
+
[Fact]
public void Parse_ParseInfo_Manga_WithSpecialsFolder()
{
@@ -311,7 +372,7 @@ public class DefaultParserTests
filesystem.AddFile(@"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz", new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var parser = new DefaultParser(ds);
+ var parser = new BasicParser(ds, new ImageParser(ds));
var filepath = @"E:/Manga/Foo 50/Foo 50 v1.cbz";
// There is a bad parse for series like "Foo 50", so we have parsed chapter as 50
@@ -322,7 +383,7 @@ public class DefaultParserTests
FullFilePath = filepath
};
- var actual = parser.Parse(filepath, rootPath);
+ var actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, true, null);
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {filepath}");
@@ -346,12 +407,12 @@ public class DefaultParserTests
filepath = @"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz";
expected = new ParserInfo
{
- Series = "Foo 50", Volumes = "0", IsSpecial = true,
- Chapters = "50", Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
+ Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
+ Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
};
- actual = parser.Parse(filepath, rootPath);
+ actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, true, null);
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {filepath}");
Assert.Equal(expected.Format, actual.Format);
@@ -376,26 +437,26 @@ public class DefaultParserTests
[Fact]
public void Parse_ParseInfo_Comic()
{
- const string rootPath = @"E:/Comics/";
+ const string rootPath = "E:/Comics/";
var expected = new Dictionary();
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Teen Titans", Volumes = "0",
- Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
+ Series = "Teen Titans", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume,
+ Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath
});
// Fallback test with bad naming
- filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
+ filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Babe", Volumes = "0", Edition = "",
+ Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
+ filepath = @"E:/Comics/Comics/Publisher/Batman the Detective (2021)/Batman the Detective - v6 - 11 - (2021).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman the Detective", Volumes = "6", Edition = "",
@@ -403,10 +464,10 @@ public class DefaultParserTests
FullFilePath = filepath, IsSpecial = false
});
- filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
+ filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr";
expected.Add(filepath, new ParserInfo
{
- Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "",
+ Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
@@ -414,7 +475,7 @@ public class DefaultParserTests
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
- var actual = _defaultParser.Parse(file, rootPath, LibraryType.Comic);
+ var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Comic, true, null);
if (expectedInfo == null)
{
Assert.Null(actual);
diff --git a/API.Tests/Parsers/ImageParserTests.cs b/API.Tests/Parsers/ImageParserTests.cs
new file mode 100644
index 000000000..63df1926e
--- /dev/null
+++ b/API.Tests/Parsers/ImageParserTests.cs
@@ -0,0 +1,97 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class ImageParserTests
+{
+ private readonly ImageParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Comics/";
+
+ public ImageParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Comics/");
+ fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
+ fileSystem.AddFile("C:/Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new ImageParser(ds);
+ }
+
+ #region Parse
+
+ ///
+ /// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
+ ///
+ [Fact]
+ public void Parse_SeriesWithDirectoryName()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01/01.jpg", "C:/Comics/Birds of Prey/",
+ RootDirectory, LibraryType.Image, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey", actual.Series);
+ Assert.Equal("1", actual.Chapters);
+ }
+
+ ///
+ /// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder
+ ///
+ [Fact]
+ public void Parse_SeriesWithNoNestedChapter()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01 page 01.jpg", "C:/Comics/",
+ RootDirectory, LibraryType.Image, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey", actual.Series);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ }
+
+ ///
+ /// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder and everything else as a
+ ///
+ [Fact]
+ public void Parse_SeriesWithLooseImages()
+ {
+ var actual = _parser.Parse("C:/Comics/Birds of Prey/page 01.jpg", "C:/Comics/",
+ RootDirectory, LibraryType.Image, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("Birds of Prey", actual.Series);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on images and Image library type
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.png", LibraryType.Image));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parsers/PdfParserTests.cs b/API.Tests/Parsers/PdfParserTests.cs
new file mode 100644
index 000000000..08bf9f25d
--- /dev/null
+++ b/API.Tests/Parsers/PdfParserTests.cs
@@ -0,0 +1,71 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Parsers;
+
+public class PdfParserTests
+{
+ private readonly PdfParser _parser;
+ private readonly ILogger _dsLogger = Substitute.For>();
+ private const string RootDirectory = "C:/Books/";
+
+ public PdfParserTests()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.AddDirectory("C:/Books/");
+ fileSystem.AddDirectory("C:/Books/Birds of Prey (2002)");
+ fileSystem.AddFile("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf", new MockFileData(""));
+ fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
+ var ds = new DirectoryService(_dsLogger, fileSystem);
+ _parser = new PdfParser(ds);
+ }
+
+ #region Parse
+
+ ///
+ /// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
+ ///
+ [Fact]
+ public void Parse_Book_SeriesWithDirectoryName()
+ {
+ var actual = _parser.Parse("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf",
+ "C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/",
+ RootDirectory, LibraryType.Book, true, null);
+
+ Assert.NotNull(actual);
+ Assert.Equal("A Dictionary of Japanese Food - Ingredients and Culture", actual.Series);
+ Assert.Equal(Parser.DefaultChapter, actual.Chapters);
+ Assert.True(actual.IsSpecial);
+ }
+
+ #endregion
+
+ #region IsApplicable
+ ///
+ /// Tests that this Parser can only be used on pdfs
+ ///
+ [Fact]
+ public void IsApplicable_Fails_WhenNonMatchingLibraryType()
+ {
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
+ Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
+ Assert.False(_parser.IsApplicable("something.png", LibraryType.Book));
+ }
+
+ ///
+ /// Tests that this Parser can only be used on pdfs
+ ///
+ [Fact]
+ public void IsApplicable_Success_WhenMatchingLibraryType()
+ {
+ Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
+ Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Manga));
+ }
+ #endregion
+}
diff --git a/API.Tests/Parsing/BookParsingTests.cs b/API.Tests/Parsing/BookParsingTests.cs
new file mode 100644
index 000000000..9b02eff63
--- /dev/null
+++ b/API.Tests/Parsing/BookParsingTests.cs
@@ -0,0 +1,24 @@
+using API.Entities.Enums;
+using Xunit;
+
+namespace API.Tests.Parsing;
+
+public class BookParsingTests
+{
+ [Theory]
+ [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
+ [InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
+ [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
+ public void ParseSeriesTest(string filename, string expected)
+ {
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Book));
+ }
+
+ [Theory]
+ [InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
+ [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
+ public void ParseVolumeTest(string filename, string expected)
+ {
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Book));
+ }
+}
diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parsing/ComicParsingTests.cs
similarity index 66%
rename from API.Tests/Parser/ComicParserTests.cs
rename to API.Tests/Parsing/ComicParsingTests.cs
index 689327d98..a0375a566 100644
--- a/API.Tests/Parser/ComicParserTests.cs
+++ b/API.Tests/Parsing/ComicParsingTests.cs
@@ -1,27 +1,11 @@
-using System.IO.Abstractions.TestingHelpers;
-using API.Parser;
-using API.Services;
+using API.Entities.Enums;
using API.Services.Tasks.Scanner.Parser;
-using Microsoft.Extensions.Logging;
-using NSubstitute;
using Xunit;
-using Xunit.Abstractions;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
-public class ComicParserTests
+public class ComicParsingTests
{
- private readonly ITestOutputHelper _testOutputHelper;
- private readonly DefaultParser _defaultParser;
-
- public ComicParserTests(ITestOutputHelper testOutputHelper)
- {
- _testOutputHelper = testOutputHelper;
- _defaultParser =
- new DefaultParser(new DirectoryService(Substitute.For>(),
- new MockFileSystem()));
- }
-
[Theory]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")]
@@ -67,57 +51,60 @@ public class ComicParserTests
[InlineData("Demon 012 (Sep 1973) c2c", "Demon")]
[InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")]
[InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")]
- [InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")]
- [InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")]
+ [InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire Special - Adam Strange")]
+ [InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis Extra - Rags Morales Sketches")]
[InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")]
[InlineData("Batgirl T2000 #57", "Batgirl")]
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
[InlineData("Conquistador_-Tome_2", "Conquistador")]
[InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
- [InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")]
+ [InlineData("Bd Fr-Aldebaran-Antares-t6", "Bd Fr-Aldebaran-Antares")]
[InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")]
[InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")]
[InlineData("Kebab Том 1 Глава 1", "Kebab")]
[InlineData("Манга Глава 1", "Манга")]
+ [InlineData("ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก เล่ม 1", "ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก")]
+ [InlineData("SKY WORLD สกายเวิลด์ เล่มที่ 1", "SKY WORLD สกายเวิลด์")]
public void ParseComicSeriesTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename));
+ Assert.Equal(expected, Parser.ParseComicSeries(filename));
}
[Theory]
- [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")]
- [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
- [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
- [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")]
- [InlineData("Batman & Daredevil - King of New York", "0")]
- [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")]
- [InlineData("Batman & Robin the Teen Wonder #0", "0")]
- [InlineData("Batman & Wildcat (1 of 3)", "0")]
- [InlineData("Batman And Superman World's Finest #01", "0")]
- [InlineData("Babe 01", "0")]
- [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")]
+ [InlineData("01 Spider-Man & Wolverine 01.cbr", Parser.LooseLeafVolume)]
+ [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
+ [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Catwoman - Trail of the Gun 01", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Daredevil - King of New York", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Robin the Teen Wonder #0", Parser.LooseLeafVolume)]
+ [InlineData("Batman & Wildcat (1 of 3)", Parser.LooseLeafVolume)]
+ [InlineData("Batman And Superman World's Finest #01", Parser.LooseLeafVolume)]
+ [InlineData("Babe 01", Parser.LooseLeafVolume)]
+ [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", Parser.LooseLeafVolume)]
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
- [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")]
+ [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", Parser.LooseLeafVolume)]
[InlineData("Superman v1 024 (09-10 1943)", "1")]
- [InlineData("Amazing Man Comics chapter 25", "0")]
- [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")]
- [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
- [InlineData("spawn-123", "0")]
- [InlineData("spawn-chapter-123", "0")]
- [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")]
- [InlineData("Batman Beyond 04 (of 6) (1999)", "0")]
- [InlineData("Batman Beyond 001 (2012)", "0")]
- [InlineData("Batman Beyond 2.0 001 (2013)", "0")]
- [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")]
+ [InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
+ [InlineData("Amazing Man Comics chapter 25", Parser.LooseLeafVolume)]
+ [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", Parser.LooseLeafVolume)]
+ [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", Parser.LooseLeafVolume)]
+ [InlineData("spawn-123", Parser.LooseLeafVolume)]
+ [InlineData("spawn-chapter-123", Parser.LooseLeafVolume)]
+ [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", Parser.LooseLeafVolume)]
+ [InlineData("Batman Beyond 04 (of 6) (1999)", Parser.LooseLeafVolume)]
+ [InlineData("Batman Beyond 001 (2012)", Parser.LooseLeafVolume)]
+ [InlineData("Batman Beyond 2.0 001 (2013)", Parser.LooseLeafVolume)]
+ [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", Parser.LooseLeafVolume)]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
- [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
+ [InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.LooseLeafVolume)]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
[InlineData("Batgirl V2000 #57", "2000")]
- [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")]
- [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
- [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")]
+ [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", Parser.LooseLeafVolume)]
+ [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", Parser.LooseLeafVolume)]
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
+ [InlineData("Daredevil - v6.5", "6.5")]
// Tome Tests
[InlineData("Daredevil - t6 - 10 - (2019)", "6")]
[InlineData("Batgirl T2000 #57", "2000")]
@@ -125,22 +112,25 @@ public class ComicParserTests
[InlineData("Conquistador_Tome_2", "2")]
[InlineData("Max_l_explorateur-_Tome_0", "0")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
- [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "0")]
+ [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", Parser.LooseLeafVolume)]
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
// Russian Tests
[InlineData("Kebab Том 1 Глава 3", "1")]
- [InlineData("Манга Глава 2", "0")]
+ [InlineData("Манга Глава 2", Parser.LooseLeafVolume)]
+ [InlineData("ย้อนเวลากลับมาร้าย เล่ม 1", "1")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "1")]
+ [InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", Parser.LooseLeafVolume)]
public void ParseComicVolumeTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename));
+ Assert.Equal(expected, Parser.ParseComicVolume(filename));
}
[Theory]
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
- [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
- [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
+ [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.DefaultChapter)]
+ [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.DefaultChapter)]
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
- [InlineData("Batman & Daredevil - King of New York", "0")]
+ [InlineData("Batman & Daredevil - King of New York", Parser.DefaultChapter)]
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
[InlineData("Batman & Wildcat (1 of 3)", "1")]
@@ -164,8 +154,8 @@ public class ComicParserTests
[InlineData("Batman Beyond 001 (2012)", "1")]
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
- [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")]
- [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
+ [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", Parser.DefaultChapter)]
+ [InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.DefaultChapter)]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
[InlineData("Batgirl V2000 #57", "57")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
@@ -174,43 +164,47 @@ public class ComicParserTests
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
- [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "0")]
+ [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", Parser.DefaultChapter)]
[InlineData("Kebab Том 1 Глава 3", "3")]
[InlineData("Манга Глава 2", "2")]
[InlineData("Манга 2 Глава", "2")]
[InlineData("Манга Том 1 2 Глава", "2")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
+ [InlineData("Max Level Returner ตอนที่ 5", "5")]
+ [InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
public void ParseComicChapterTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(filename));
+ Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Comic));
}
[Theory]
- [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
- [InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
+ [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", false)]
+ [InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", false)]
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
- [InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
- [InlineData("Boule et Bill - THS -Bill à disparu", true)]
- [InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)]
- [InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)]
+ [InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", false)]
+ [InlineData("Boule et Bill - THS -Bill à disparu", false)]
+ [InlineData("Asterix - HS - Les 12 travaux d'Astérix", false)]
+ [InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", false)]
[InlineData("laughs", false)]
- [InlineData("Annual Days of Summer", true)]
- [InlineData("Adventure Time 2013 Annual #001 (2013)", true)]
- [InlineData("Adventure Time 2013_Annual_#001 (2013)", true)]
- [InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)]
+ [InlineData("Annual Days of Summer", false)]
+ [InlineData("Adventure Time 2013 Annual #001 (2013)", false)]
+ [InlineData("Adventure Time 2013_Annual_#001 (2013)", false)]
+ [InlineData("Adventure Time 2013_-_Annual #001 (2013)", false)]
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
[InlineData("Mazebook 001", false)]
- [InlineData("X-23 One Shot (2010)", true)]
- [InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)]
- [InlineData("Batman Beyond Annual", true)]
- [InlineData("Batman Beyond Bonus", true)]
- [InlineData("Batman Beyond OneShot", true)]
- [InlineData("Batman Beyond Specials", true)]
- [InlineData("Batman Beyond Omnibus (1999)", true)]
- [InlineData("Batman Beyond Omnibus", true)]
- [InlineData("01 Annual Batman Beyond", true)]
+ [InlineData("X-23 One Shot (2010)", false)]
+ [InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", false)]
+ [InlineData("Batman Beyond Annual", false)]
+ [InlineData("Batman Beyond Bonus", false)]
+ [InlineData("Batman Beyond OneShot", false)]
+ [InlineData("Batman Beyond Specials", false)]
+ [InlineData("Batman Beyond Omnibus (1999)", false)]
+ [InlineData("Batman Beyond Omnibus", false)]
+ [InlineData("01 Annual Batman Beyond", false)]
+ [InlineData("Blood Syndicate Annual #001", false)]
public void IsComicSpecialTest(string input, bool expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsComicSpecial(input));
+ Assert.Equal(expected, Parser.IsSpecial(input, LibraryType.Comic));
}
}
diff --git a/API.Tests/Parsing/ImageParsingTests.cs b/API.Tests/Parsing/ImageParsingTests.cs
new file mode 100644
index 000000000..362b4b08c
--- /dev/null
+++ b/API.Tests/Parsing/ImageParsingTests.cs
@@ -0,0 +1,107 @@
+using System.IO.Abstractions.TestingHelpers;
+using API.Entities.Enums;
+using API.Services;
+using API.Services.Tasks.Scanner.Parser;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace API.Tests.Parsing;
+
+public class ImageParsingTests
+{
+ private readonly ITestOutputHelper _testOutputHelper;
+ private readonly ImageParser _parser;
+
+ public ImageParsingTests(ITestOutputHelper testOutputHelper)
+ {
+ _testOutputHelper = testOutputHelper;
+ var directoryService = new DirectoryService(Substitute.For>(), new MockFileSystem());
+ _parser = new ImageParser(directoryService);
+ }
+
+ //[Fact]
+ public void Parse_ParseInfo_Manga_ImageOnly()
+ {
+ // Images don't have root path as E:\Manga, but rather as the path of the folder
+
+ // Note: Fallback to folder will parse Monster #8 and get Monster
+ var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
+ var expectedInfo2 = new ParserInfo
+ {
+ Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
+ Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+ var actual2 = _parser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Image, true, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+
+ filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
+ expectedInfo2 = new ParserInfo
+ {
+ Series = "Just Images the second", Volumes = "19", Edition = "",
+ Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+
+ actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, true, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+
+ filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
+ expectedInfo2 = new ParserInfo
+ {
+ Series = "Just Images the second", Volumes = "19", Edition = "",
+ Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
+ FullFilePath = filepath, IsSpecial = false
+ };
+
+ actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, true, null);
+ Assert.NotNull(actual2);
+ _testOutputHelper.WriteLine($"Validating {filepath}");
+ Assert.Equal(expectedInfo2.Format, actual2.Format);
+ _testOutputHelper.WriteLine("Format ✓");
+ Assert.Equal(expectedInfo2.Series, actual2.Series);
+ _testOutputHelper.WriteLine("Series ✓");
+ Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
+ _testOutputHelper.WriteLine("Chapters ✓");
+ Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
+ _testOutputHelper.WriteLine("Volumes ✓");
+ Assert.Equal(expectedInfo2.Edition, actual2.Edition);
+ _testOutputHelper.WriteLine("Edition ✓");
+ Assert.Equal(expectedInfo2.Filename, actual2.Filename);
+ _testOutputHelper.WriteLine("Filename ✓");
+ Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
+ _testOutputHelper.WriteLine("FullFilePath ✓");
+ }
+}
diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parsing/MangaParsingTests.cs
similarity index 81%
rename from API.Tests/Parser/MangaParserTests.cs
rename to API.Tests/Parsing/MangaParsingTests.cs
index 20c1a27ae..53f2bc4c9 100644
--- a/API.Tests/Parser/MangaParserTests.cs
+++ b/API.Tests/Parsing/MangaParsingTests.cs
@@ -1,19 +1,10 @@
-using System.Runtime.InteropServices;
using API.Entities.Enums;
using Xunit;
-using Xunit.Abstractions;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
-public class MangaParserTests
+public class MangaParsingTests
{
- private readonly ITestOutputHelper _testOutputHelper;
-
- public MangaParserTests(ITestOutputHelper testOutputHelper)
- {
- _testOutputHelper = testOutputHelper;
- }
-
[Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
@@ -26,7 +17,7 @@ public class MangaParserTests
[InlineData("v001", "1")]
[InlineData("Vol 1", "1")]
[InlineData("vol_356-1", "356")] // Mangapy syntax
- [InlineData("No Volume", "0")]
+ [InlineData("No Volume", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1.1")]
[InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")]
@@ -41,18 +32,18 @@ public class MangaParserTests
[InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")]
[InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")]
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")]
- [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")]
+ [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")]
- [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")]
+ [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
- [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")]
+ [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Vagabond_v03", "3")]
[InlineData("Mujaki No Rakune Volume 10.cbz", "10")]
- [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "0")]
+ [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
[InlineData("Gantz.V26.cbz", "26")]
@@ -61,7 +52,7 @@ public class MangaParserTests
[InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")]
[InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")]
[InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")]
- [InlineData("Noblesse - Episode 406 (52 Pages).7z", "0")]
+ [InlineData("Noblesse - Episode 406 (52 Pages).7z", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("X-Men v1 #201 (September 2007).cbz", "1")]
[InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")]
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")]
@@ -73,18 +64,21 @@ public class MangaParserTests
[InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")]
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "3.5")]
[InlineData("Kebab Том 1 Глава 3", "1")]
- [InlineData("Манга Глава 2", "0")]
+ [InlineData("Манга Глава 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Манга Тома 1-4", "1-4")]
[InlineData("Манга Том 1-4", "1-4")]
[InlineData("조선왕조실톡 106화", "106")]
- [InlineData("죽음 13회", "13")]
[InlineData("동의보감 13장", "13")]
[InlineData("몰?루 아카이브 7.5권", "7.5")]
[InlineData("63권#200", "63")]
[InlineData("시즌34삽화2", "34")]
+ [InlineData("Accel World Chapter 001 Volume 002", "2")]
+ [InlineData("Accel World Volume 2", "2")]
+ [InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.31 Omake", "30")]
+ [InlineData("Zom 100 - Bucket List of the Dead v01", "1")]
public void ParseVolumeTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Manga));
}
[Theory]
@@ -137,7 +131,6 @@ public class MangaParserTests
[InlineData("Vagabond_v03", "Vagabond")]
[InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")]
[InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")]
- [InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")]
[InlineData("Baketeriya ch01-05.zip", "Baketeriya")]
[InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")]
[InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")]
@@ -196,21 +189,38 @@ public class MangaParserTests
[InlineData("Манга Том 1 3-4 Глава", "Манга")]
[InlineData("Esquire 6권 2021년 10월호", "Esquire")]
[InlineData("Accel World: Vol 1", "Accel World")]
+ [InlineData("Accel World Chapter 001 Volume 002", "Accel World")]
+ [InlineData("Bleach 001-003", "Bleach")]
+ [InlineData("Accel World Volume 2", "Accel World")]
+ [InlineData("죠시라쿠! 2년 후 v01", "죠시라쿠! 2년 후")]
+ [InlineData("죠시라쿠! 2년 후 1권", "죠시라쿠! 2년 후")]
+ [InlineData("test 2 years 1권", "test 2 years")]
+ [InlineData("test 2 years 1화", "test 2 years")]
+ [InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.30 Omake", "Nagasarete Airantou")]
+ [InlineData("Cynthia The Mission - c000 - c006 (v06)", "Cynthia The Mission")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1", "เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท")]
+ [InlineData("Max Level Returner เล่มที่ 5", "Max Level Returner")]
+ [InlineData("หนึ่งความคิด นิจนิรันดร์ เล่ม 2", "หนึ่งความคิด นิจนิรันดร์")]
+ [InlineData("不安の種\uff0b - 01", "不安の種\uff0b")]
+ [InlineData("Giant Ojou-sama - Ch. 33.5 - Volume 04 Bonus Chapter", "Giant Ojou-sama")]
+ [InlineData("[218565]-(C92) [BRIO (Puyocha)] Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", "")]
+ [InlineData("Monster #8 Ch. 001", "Monster #8")]
+ [InlineData("Zom 100 - Bucket List of the Dead v01", "Zom 100 - Bucket List of the Dead")]
public void ParseSeriesTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Manga));
}
[Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")]
- [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")]
- [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")]
+ [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
+ [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")]
- [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")]
+ [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("c001", "1")]
- [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "0")]
+ [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
[InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")]
[InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")]
@@ -233,7 +243,7 @@ public class MangaParserTests
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")]
[InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")]
- [InlineData("Vol 1", "0")]
+ [InlineData("Vol 1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")]
@@ -245,10 +255,10 @@ public class MangaParserTests
[InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")]
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
- [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
+ [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Beelzebub_153b_RHS.zip", "153.5")]
[InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
- [InlineData("Transferred to another world magical swordsman v1.1", "0")]
+ [InlineData("Transferred to another world magical swordsman v1.1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")]
[InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")]
[InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")]
@@ -267,21 +277,31 @@ public class MangaParserTests
[InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")]
[InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")]
[InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")]
- [InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")]
+ [InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")]
- [InlineData("Samurai Jack Vol. 01 - The threads of Time", "0")]
+ [InlineData("Samurai Jack Vol. 01 - The threads of Time", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")]
[InlineData("자유록 13회#2", "13")]
[InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")]
[InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")]
- [InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "0")]
+ [InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Kebab Том 1 Глава 3", "3")]
[InlineData("Манга Глава 2", "2")]
[InlineData("Манга 2 Глава", "2")]
[InlineData("Манга Том 1 2 Глава", "2")]
+ [InlineData("Accel World Chapter 001 Volume 002", "1")]
+ [InlineData("Bleach 001-003", "1-3")]
+ [InlineData("Accel World Volume 2", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
+ [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98", "90-98")]
+ [InlineData("Historys Strongest Disciple Kenichi c01-c04", "1-4")]
+ [InlineData("Adabana c00-02", "0-2")]
+ [InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
+ [InlineData("Max Level Returner ตอนที่ 5", "5")]
+ [InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
+ [InlineData("Monster #8 Ch. 001", "1")]
public void ParseChaptersTest(string filename, string expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Manga));
}
@@ -301,25 +321,25 @@ public class MangaParserTests
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
}
[Theory]
- [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
- [InlineData("Beelzebub_Omake_June_2012_RHS", true)]
+ [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
+ [InlineData("Beelzebub_Omake_June_2012_RHS", false)]
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
- [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
- [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
- [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
- [InlineData("Ani-Hina Art Collection.cbz", true)]
- [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
- [InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
+ [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", false)]
+ [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", false)]
+ [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", false)]
+ [InlineData("Ani-Hina Art Collection.cbz", false)]
+ [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", false)]
+ [InlineData("A Town Where You Live - Bonus Chapter.zip", false)]
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
- [InlineData("Beastars - SP01", false)]
- [InlineData("Beastars SP01", false)]
+ [InlineData("Beastars - SP01", true)]
+ [InlineData("Beastars SP01", true)]
[InlineData("The League of Extraordinary Gentlemen", false)]
[InlineData("The League of Extra-ordinary Gentlemen", false)]
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]
[InlineData("Hajime no Ippo - Artbook", false)]
public void IsMangaSpecialTest(string input, bool expected)
{
- Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsMangaSpecial(input));
+ Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsSpecial(input, LibraryType.Manga));
}
[Theory]
diff --git a/API.Tests/Parser/ParserInfoTests.cs b/API.Tests/Parsing/ParserInfoTests.cs
similarity index 89%
rename from API.Tests/Parser/ParserInfoTests.cs
rename to API.Tests/Parsing/ParserInfoTests.cs
index ee4881eff..cbb8ae99a 100644
--- a/API.Tests/Parser/ParserInfoTests.cs
+++ b/API.Tests/Parsing/ParserInfoTests.cs
@@ -1,8 +1,8 @@
using API.Entities.Enums;
-using API.Parser;
+using API.Services.Tasks.Scanner.Parser;
using Xunit;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
public class ParserInfoTests
{
@@ -11,14 +11,14 @@ public class ParserInfoTests
{
var p1 = new ParserInfo()
{
- Chapters = "0",
+ Chapters = Parser.DefaultChapter,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
IsSpecial = false,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
var p2 = new ParserInfo()
@@ -30,7 +30,7 @@ public class ParserInfoTests
IsSpecial = false,
Series = "darker than black",
Title = "Darker Than Black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
var expected = new ParserInfo()
@@ -42,7 +42,7 @@ public class ParserInfoTests
IsSpecial = false,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
p1.Merge(p2);
@@ -62,12 +62,12 @@ public class ParserInfoTests
IsSpecial = true,
Series = "darker than black",
Title = "darker than black",
- Volumes = "0"
+ Volumes = Parser.LooseLeafVolume
};
var p2 = new ParserInfo()
{
- Chapters = "0",
+ Chapters = Parser.DefaultChapter,
Edition = "",
Format = MangaFormat.Archive,
FullFilePath = "/manga/darker than black.cbz",
diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parsing/ParsingTests.cs
similarity index 84%
rename from API.Tests/Parser/ParserTest.cs
rename to API.Tests/Parsing/ParsingTests.cs
index e2f06465b..7d5da4f9c 100644
--- a/API.Tests/Parser/ParserTest.cs
+++ b/API.Tests/Parsing/ParsingTests.cs
@@ -1,11 +1,34 @@
+using System.Globalization;
using System.Linq;
using Xunit;
using static API.Services.Tasks.Scanner.Parser.Parser;
-namespace API.Tests.Parser;
+namespace API.Tests.Parsing;
-public class ParserTests
+public class ParsingTests
{
+ [Fact]
+ public void ShouldWork()
+ {
+ var s = 6.5f.ToString(CultureInfo.InvariantCulture);
+ var a = float.Parse(s, CultureInfo.InvariantCulture);
+ Assert.Equal(6.5f, a);
+
+ s = 6.5f + "";
+ a = float.Parse(s, CultureInfo.CurrentCulture);
+ Assert.Equal(6.5f, a);
+ }
+
+ // [Theory]
+ // [InlineData("de-DE")]
+ // [InlineData("en-US")]
+ // public void ShouldParse(string culture)
+ // {
+ // var s = 6.5f + "";
+ // var a = float.Parse(s, CultureInfo.CreateSpecificCulture(culture));
+ // Assert.Equal(6.5f, a);
+ // }
+
[Theory]
[InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")]
[InlineData("Shmo, Joe", "Shmo, Joe")]
@@ -20,6 +43,7 @@ public class ParserTests
[InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")]
[InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")]
[InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")]
+ [InlineData("SP01 1. DEAD Tube Prologue", "1. DEAD Tube Prologue")]
public void CleanSpecialTitleTest(string input, string expected)
{
Assert.Equal(expected, CleanSpecialTitle(input));
@@ -36,6 +60,18 @@ public class ParserTests
Assert.Equal(expected, HasSpecialMarker(input));
}
+ [Theory]
+ [InlineData("Beastars - SP01", 1)]
+ [InlineData("Beastars SP01", 1)]
+ [InlineData("Beastars Special 01", 0)]
+ [InlineData("Beastars Extra 01", 0)]
+ [InlineData("Batman Beyond - Return of the Joker (2001) SP01", 1)]
+ [InlineData("Batman Beyond - Return of the Joker (2001)", 0)]
+ public void ParseSpecialIndexTest(string input, int expected)
+ {
+ Assert.Equal(expected, ParseSpecialIndex(input));
+ }
+
[Theory]
[InlineData("0001", "1")]
[InlineData("1", "1")]
@@ -62,7 +98,8 @@ public class ParserTests
[InlineData("-The Title", false, "The Title")]
[InlineData("- The Title", false, "The Title")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
- [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")]
+ [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)",
+ true, "Batman - Detective Comics - Rebirth Deluxe Edition Book 04")]
[InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")]
[InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")]
[InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")]
@@ -146,6 +183,7 @@ public class ParserTests
[InlineData("3.5", 3.5)]
[InlineData("3.5-4.0", 3.5)]
[InlineData("asdfasdf", 0.0)]
+ [InlineData("-10", -10.0)]
public void MinimumNumberFromRangeTest(string input, float expected)
{
Assert.Equal(expected, MinNumberFromRange(input));
@@ -162,6 +200,7 @@ public class ParserTests
[InlineData("3.5", 3.5)]
[InlineData("3.5-4.0", 4.0)]
[InlineData("asdfasdf", 0.0)]
+ [InlineData("-10", -10.0)]
public void MaximumNumberFromRangeTest(string input, float expected)
{
Assert.Equal(expected, MaxNumberFromRange(input));
@@ -177,6 +216,7 @@ public class ParserTests
[InlineData("카비타", "카비타")]
[InlineData("06", "06")]
[InlineData("", "")]
+ [InlineData("不安の種+", "不安の種+")]
public void NormalizeTest(string input, string expected)
{
Assert.Equal(expected, Normalize(input));
@@ -211,6 +251,7 @@ public class ParserTests
[InlineData("ch1/backcover.png", false)]
[InlineData("backcover.png", false)]
[InlineData("back_cover.png", false)]
+ [InlineData("LD Blacklands #1 35 (back cover).png", false)]
public void IsCoverImageTest(string inputPath, bool expected)
{
Assert.Equal(expected, IsCoverImage(inputPath));
@@ -225,6 +266,8 @@ public class ParserTests
[InlineData("@Recently-Snapshot/Love Hina/", true)]
[InlineData("@recycle/Love Hina/", true)]
[InlineData("E:/Test/__MACOSX/Love Hina/", true)]
+ [InlineData("E:/Test/.caltrash/Love Hina/", true)]
+ [InlineData("E:/Test/.yacreaderlibrary/Love Hina/", true)]
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
{
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
@@ -249,7 +292,7 @@ public class ParserTests
[InlineData("The ()quick brown fox jumps over the lazy dog")]
[InlineData("The (quick (brown)) fox jumps over the lazy dog")]
[InlineData("The (quick (brown) fox jumps over the lazy dog)")]
- public void BalancedParenTestMatches(string input)
+ public void BalancedParenTest_Matches(string input)
{
Assert.Matches($@"^{BalancedParen}$", input);
}
@@ -261,7 +304,7 @@ public class ParserTests
[InlineData("The quick (brown)) fox jumps over the lazy dog")]
[InlineData("The quick (brown) fox jumps over the lazy dog)")]
[InlineData("(The ))(quick (brown) fox jumps over the lazy dog")]
- public void BalancedParenTestDoesNotMatch(string input)
+ public void BalancedParenTest_DoesNotMatch(string input)
{
Assert.DoesNotMatch($@"^{BalancedParen}$", input);
}
@@ -273,9 +316,9 @@ public class ParserTests
[InlineData("The []quick brown fox jumps over the lazy dog")]
[InlineData("The [quick [brown]] fox jumps over the lazy dog")]
[InlineData("The [quick [brown] fox jumps over the lazy dog]")]
- public void BalancedBrackTestMatches(string input)
+ public void BalancedBracketTest_Matches(string input)
{
- Assert.Matches($@"^{BalancedBrack}$", input);
+ Assert.Matches($@"^{BalancedBracket}$", input);
}
[Theory]
@@ -285,8 +328,8 @@ public class ParserTests
[InlineData("The quick [brown]] fox jumps over the lazy dog")]
[InlineData("The quick [brown] fox jumps over the lazy dog]")]
[InlineData("[The ]][quick [brown] fox jumps over the lazy dog")]
- public void BalancedBrackTestDoesNotMatch(string input)
+ public void BalancedBracketTest_DoesNotMatch(string input)
{
- Assert.DoesNotMatch($@"^{BalancedBrack}$", input);
+ Assert.DoesNotMatch($@"^{BalancedBracket}$", input);
}
}
diff --git a/API.Tests/Repository/CollectionTagRepositoryTests.cs b/API.Tests/Repository/CollectionTagRepositoryTests.cs
new file mode 100644
index 000000000..5318260be
--- /dev/null
+++ b/API.Tests/Repository/CollectionTagRepositoryTests.cs
@@ -0,0 +1,177 @@
+using System.Collections.Generic;
+using System.Data.Common;
+using System.IO.Abstractions.TestingHelpers;
+using System.Linq;
+using System.Threading.Tasks;
+using API.Data;
+using API.Entities;
+using API.Entities.Enums;
+using API.Helpers;
+using API.Helpers.Builders;
+using API.Services;
+using AutoMapper;
+using Microsoft.Data.Sqlite;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+
+namespace API.Tests.Repository;
+
+public class CollectionTagRepositoryTests
+{
+ private readonly IUnitOfWork _unitOfWork;
+
+ private readonly DbConnection _connection;
+ private readonly DataContext _context;
+
+ private const string CacheDirectory = "C:/kavita/config/cache/";
+ private const string CoverImageDirectory = "C:/kavita/config/covers/";
+ private const string BackupDirectory = "C:/kavita/config/backups/";
+ private const string DataDirectory = "C:/data/";
+
+ public CollectionTagRepositoryTests()
+ {
+ var contextOptions = new DbContextOptionsBuilder().UseSqlite(CreateInMemoryDatabase()).Options;
+ _connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
+
+ _context = new DataContext(contextOptions);
+ Task.Run(SeedDb).GetAwaiter().GetResult();
+
+ var config = new MapperConfiguration(cfg => cfg.AddProfile());
+ var mapper = config.CreateMapper();
+ _unitOfWork = new UnitOfWork(_context, mapper, null);
+ }
+
+ #region Setup
+
+ private static DbConnection CreateInMemoryDatabase()
+ {
+ var connection = new SqliteConnection("Filename=:memory:");
+
+ connection.Open();
+
+ return connection;
+ }
+
+ private async Task SeedDb()
+ {
+ await _context.Database.MigrateAsync();
+ var filesystem = CreateFileSystem();
+
+ await Seed.SeedSettings(_context,
+ new DirectoryService(Substitute.For>(), filesystem));
+
+ var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
+ setting.Value = CacheDirectory;
+
+ setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
+ setting.Value = BackupDirectory;
+
+ _context.ServerSetting.Update(setting);
+
+
+ var lib = new LibraryBuilder("Manga")
+ .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
+ .Build();
+
+ _context.AppUser.Add(new AppUser()
+ {
+ UserName = "majora2007",
+ Libraries = new List()
+ {
+ lib
+ }
+ });
+
+ return await _context.SaveChangesAsync() > 0;
+ }
+
+ private async Task ResetDb()
+ {
+ _context.Series.RemoveRange(_context.Series.ToList());
+ _context.AppUserRating.RemoveRange(_context.AppUserRating.ToList());
+ _context.Genre.RemoveRange(_context.Genre.ToList());
+ _context.CollectionTag.RemoveRange(_context.CollectionTag.ToList());
+ _context.Person.RemoveRange(_context.Person.ToList());
+
+ await _context.SaveChangesAsync();
+ }
+
+ private static MockFileSystem CreateFileSystem()
+ {
+ var fileSystem = new MockFileSystem();
+ fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
+ fileSystem.AddDirectory("C:/kavita/config/");
+ fileSystem.AddDirectory(CacheDirectory);
+ fileSystem.AddDirectory(CoverImageDirectory);
+ fileSystem.AddDirectory(BackupDirectory);
+ fileSystem.AddDirectory(DataDirectory);
+
+ return fileSystem;
+ }
+
+ #endregion
+
+ // #region RemoveTagsWithoutSeries
+ //
+ // [Fact]
+ // public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
+ // {
+ // var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
+ // var series = new SeriesBuilder("Test 1").Build();
+ // var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
+ // series.Metadata.CollectionTags.Add(commonTag);
+ // series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
+ //
+ // var series2 = new SeriesBuilder("Test 1").Build();
+ // series2.Metadata.CollectionTags.Add(commonTag);
+ // library.Series.Add(series);
+ // library.Series.Add(series2);
+ // _unitOfWork.LibraryRepository.Add(library);
+ // await _unitOfWork.CommitAsync();
+ //
+ // Assert.Equal(2, series.Metadata.CollectionTags.Count);
+ // Assert.Single(series2.Metadata.CollectionTags);
+ //
+ // // Delete both series
+ // _unitOfWork.SeriesRepository.Remove(series);
+ // _unitOfWork.SeriesRepository.Remove(series2);
+ //
+ // await _unitOfWork.CommitAsync();
+ //
+ // // Validate that both tags exist
+ // Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
+ //
+ // await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
+ //
+ // Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
+ // }
+ //
+ // [Fact]
+ // public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
+ // {
+ // var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
+ // var series = new SeriesBuilder("Test 1").Build();
+ // var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
+ // series.Metadata.CollectionTags.Add(commonTag);
+ // series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
+ //
+ // var series2 = new SeriesBuilder("Test 1").Build();
+ // series2.Metadata.CollectionTags.Add(commonTag);
+ // library.Series.Add(series);
+ // library.Series.Add(series2);
+ // _unitOfWork.LibraryRepository.Add(library);
+ // await _unitOfWork.CommitAsync();
+ //
+ // Assert.Equal(2, series.Metadata.CollectionTags.Count);
+ // Assert.Single(series2.Metadata.CollectionTags);
+ //
+ // await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
+ //
+ // // Validate that both tags exist
+ // Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
+ // }
+ //
+ // #endregion
+}
diff --git a/API.Tests/Repository/GenreRepositoryTests.cs b/API.Tests/Repository/GenreRepositoryTests.cs
new file mode 100644
index 000000000..d197a91ba
--- /dev/null
+++ b/API.Tests/Repository/GenreRepositoryTests.cs
@@ -0,0 +1,280 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using API.DTOs.Metadata.Browse;
+using API.Entities;
+using API.Entities.Enums;
+using API.Entities.Metadata;
+using API.Helpers;
+using API.Helpers.Builders;
+using Xunit;
+
+namespace API.Tests.Repository;
+
+public class GenreRepositoryTests : AbstractDbTest
+{
+ private AppUser _fullAccess;
+ private AppUser _restrictedAccess;
+ private AppUser _restrictedAgeAccess;
+
+ protected override async Task ResetDb()
+ {
+ Context.Genre.RemoveRange(Context.Genre);
+ Context.Library.RemoveRange(Context.Library);
+ await Context.SaveChangesAsync();
+ }
+
+ private TestGenreSet CreateTestGenres()
+ {
+ return new TestGenreSet
+ {
+ SharedSeriesChaptersGenre = new GenreBuilder("Shared Series Chapter Genre").Build(),
+ SharedSeriesGenre = new GenreBuilder("Shared Series Genre").Build(),
+ SharedChaptersGenre = new GenreBuilder("Shared Chapters Genre").Build(),
+ Lib0SeriesChaptersGenre = new GenreBuilder("Lib0 Series Chapter Genre").Build(),
+ Lib0SeriesGenre = new GenreBuilder("Lib0 Series Genre").Build(),
+ Lib0ChaptersGenre = new GenreBuilder("Lib0 Chapters Genre").Build(),
+ Lib1SeriesChaptersGenre = new GenreBuilder("Lib1 Series Chapter Genre").Build(),
+ Lib1SeriesGenre = new GenreBuilder("Lib1 Series Genre").Build(),
+ Lib1ChaptersGenre = new GenreBuilder("Lib1 Chapters Genre").Build(),
+ Lib1ChapterAgeGenre = new GenreBuilder("Lib1 Chapter Age Genre").Build()
+ };
+ }
+
+ private async Task SeedDbWithGenres(TestGenreSet genres)
+ {
+ await CreateTestUsers();
+ await AddGenresToContext(genres);
+ await CreateLibrariesWithGenres(genres);
+ await AssignLibrariesToUsers();
+ }
+
+ private async Task CreateTestUsers()
+ {
+ _fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
+ _restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
+ _restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
+ _restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
+ _restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
+
+ Context.Users.Add(_fullAccess);
+ Context.Users.Add(_restrictedAccess);
+ Context.Users.Add(_restrictedAgeAccess);
+ await Context.SaveChangesAsync();
+ }
+
+ private async Task AddGenresToContext(TestGenreSet genres)
+ {
+ var allGenres = genres.GetAllGenres();
+ Context.Genre.AddRange(allGenres);
+ await Context.SaveChangesAsync();
+ }
+
+ private async Task CreateLibrariesWithGenres(TestGenreSet genres)
+ {
+ var lib0 = new LibraryBuilder("lib0")
+ .WithSeries(new SeriesBuilder("lib0-s0")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib0SeriesChaptersGenre, genres.Lib0SeriesGenre])
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib0SeriesChaptersGenre, genres.Lib0ChaptersGenre])
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
+ .Build())
+ .Build())
+ .Build())
+ .Build();
+
+ var lib1 = new LibraryBuilder("lib1")
+ .WithSeries(new SeriesBuilder("lib1-s0")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1SeriesGenre])
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre, genres.Lib1ChapterAgeGenre])
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("lib1-s1")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1SeriesGenre])
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
+ .Build())
+ .Build())
+ .Build())
+ .Build();
+
+ Context.Library.Add(lib0);
+ Context.Library.Add(lib1);
+ await Context.SaveChangesAsync();
+ }
+
+ private async Task AssignLibrariesToUsers()
+ {
+ var lib0 = Context.Library.First(l => l.Name == "lib0");
+ var lib1 = Context.Library.First(l => l.Name == "lib1");
+
+ _fullAccess.Libraries.Add(lib0);
+ _fullAccess.Libraries.Add(lib1);
+ _restrictedAccess.Libraries.Add(lib1);
+ _restrictedAgeAccess.Libraries.Add(lib1);
+
+ await Context.SaveChangesAsync();
+ }
+
+ private static Predicate ContainsGenreCheck(Genre genre)
+ {
+ return g => g.Id == genre.Id;
+ }
+
+ private static void AssertGenrePresent(IEnumerable genres, Genre expectedGenre)
+ {
+ Assert.Contains(genres, ContainsGenreCheck(expectedGenre));
+ }
+
+ private static void AssertGenreNotPresent(IEnumerable genres, Genre expectedGenre)
+ {
+ Assert.DoesNotContain(genres, ContainsGenreCheck(expectedGenre));
+ }
+
+ private static BrowseGenreDto GetGenreDto(IEnumerable genres, Genre genre)
+ {
+ return genres.First(dto => dto.Id == genre.Id);
+ }
+
+ [Fact]
+ public async Task GetBrowseableGenre_FullAccess_ReturnsAllGenresWithCorrectCounts()
+ {
+ // Arrange
+ await ResetDb();
+ var genres = CreateTestGenres();
+ await SeedDbWithGenres(genres);
+
+ // Act
+ var fullAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_fullAccess.Id, new UserParams());
+
+ // Assert
+ Assert.Equal(genres.GetAllGenres().Count, fullAccessGenres.TotalCount);
+
+ foreach (var genre in genres.GetAllGenres())
+ {
+ AssertGenrePresent(fullAccessGenres, genre);
+ }
+
+ // Verify counts - 1 lib0 series, 2 lib1 series = 3 total series
+ Assert.Equal(3, GetGenreDto(fullAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
+ Assert.Equal(6, GetGenreDto(fullAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
+ Assert.Equal(1, GetGenreDto(fullAccessGenres, genres.Lib0SeriesGenre).SeriesCount);
+ }
+
+ [Fact]
+ public async Task GetBrowseableGenre_RestrictedAccess_ReturnsOnlyAccessibleGenres()
+ {
+ // Arrange
+ await ResetDb();
+ var genres = CreateTestGenres();
+ await SeedDbWithGenres(genres);
+
+ // Act
+ var restrictedAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAccess.Id, new UserParams());
+
+ // Assert - Should see: 3 shared + 4 library 1 specific = 7 genres
+ Assert.Equal(7, restrictedAccessGenres.TotalCount);
+
+ // Verify shared and Library 1 genres are present
+ AssertGenrePresent(restrictedAccessGenres, genres.SharedSeriesChaptersGenre);
+ AssertGenrePresent(restrictedAccessGenres, genres.SharedSeriesGenre);
+ AssertGenrePresent(restrictedAccessGenres, genres.SharedChaptersGenre);
+ AssertGenrePresent(restrictedAccessGenres, genres.Lib1SeriesChaptersGenre);
+ AssertGenrePresent(restrictedAccessGenres, genres.Lib1SeriesGenre);
+ AssertGenrePresent(restrictedAccessGenres, genres.Lib1ChaptersGenre);
+ AssertGenrePresent(restrictedAccessGenres, genres.Lib1ChapterAgeGenre);
+
+ // Verify Library 0 specific genres are not present
+ AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0SeriesChaptersGenre);
+ AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0SeriesGenre);
+ AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0ChaptersGenre);
+
+ // Verify counts - 2 lib1 series
+ Assert.Equal(2, GetGenreDto(restrictedAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
+ Assert.Equal(4, GetGenreDto(restrictedAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
+ Assert.Equal(2, GetGenreDto(restrictedAccessGenres, genres.Lib1SeriesGenre).SeriesCount);
+ Assert.Equal(4, GetGenreDto(restrictedAccessGenres, genres.Lib1ChaptersGenre).ChapterCount);
+ Assert.Equal(1, GetGenreDto(restrictedAccessGenres, genres.Lib1ChapterAgeGenre).ChapterCount);
+ }
+
+ [Fact]
+ public async Task GetBrowseableGenre_RestrictedAgeAccess_FiltersAgeRestrictedContent()
+ {
+ // Arrange
+ await ResetDb();
+ var genres = CreateTestGenres();
+ await SeedDbWithGenres(genres);
+
+ // Act
+ var restrictedAgeAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAgeAccess.Id, new UserParams());
+
+ // Assert - Should see: 3 shared + 3 lib1 specific = 6 genres (age-restricted genre filtered out)
+ Assert.Equal(6, restrictedAgeAccessGenres.TotalCount);
+
+ // Verify accessible genres are present
+ AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre);
+ AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedSeriesGenre);
+ AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedChaptersGenre);
+ AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1SeriesChaptersGenre);
+ AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1SeriesGenre);
+ AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1ChaptersGenre);
+
+ // Verify age-restricted genre is filtered out
+ AssertGenreNotPresent(restrictedAgeAccessGenres, genres.Lib1ChapterAgeGenre);
+
+ // Verify counts - 1 series lib1 (age-restricted series filtered out)
+ Assert.Equal(1, GetGenreDto(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
+ Assert.Equal(1, GetGenreDto(restrictedAgeAccessGenres, genres.Lib1SeriesGenre).SeriesCount);
+
+ // These values represent a bug - chapters are not properly filtered when their series is age-restricted
+ // Should be 2, but currently returns 3 due to the filtering issue
+ Assert.Equal(3, GetGenreDto(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
+ Assert.Equal(3, GetGenreDto(restrictedAgeAccessGenres, genres.Lib1ChaptersGenre).ChapterCount);
+ }
+
+ private class TestGenreSet
+ {
+ public Genre SharedSeriesChaptersGenre { get; set; }
+ public Genre SharedSeriesGenre { get; set; }
+ public Genre SharedChaptersGenre { get; set; }
+ public Genre Lib0SeriesChaptersGenre { get; set; }
+ public Genre Lib0SeriesGenre { get; set; }
+ public Genre Lib0ChaptersGenre { get; set; }
+ public Genre Lib1SeriesChaptersGenre { get; set; }
+ public Genre Lib1SeriesGenre { get; set; }
+ public Genre Lib1ChaptersGenre { get; set; }
+ public Genre Lib1ChapterAgeGenre { get; set; }
+
+ public List GetAllGenres()
+ {
+ return
+ [
+ SharedSeriesChaptersGenre, SharedSeriesGenre, SharedChaptersGenre,
+ Lib0SeriesChaptersGenre, Lib0SeriesGenre, Lib0ChaptersGenre,
+ Lib1SeriesChaptersGenre, Lib1SeriesGenre, Lib1ChaptersGenre, Lib1ChapterAgeGenre
+ ];
+ }
+ }
+}
diff --git a/API.Tests/Repository/PersonRepositoryTests.cs b/API.Tests/Repository/PersonRepositoryTests.cs
new file mode 100644
index 000000000..a2b19cc0c
--- /dev/null
+++ b/API.Tests/Repository/PersonRepositoryTests.cs
@@ -0,0 +1,342 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using API.DTOs.Metadata.Browse;
+using API.DTOs.Metadata.Browse.Requests;
+using API.Entities;
+using API.Entities.Enums;
+using API.Entities.Person;
+using API.Helpers;
+using API.Helpers.Builders;
+using Xunit;
+
+namespace API.Tests.Repository;
+
+public class PersonRepositoryTests : AbstractDbTest
+{
+ private AppUser _fullAccess;
+ private AppUser _restrictedAccess;
+ private AppUser _restrictedAgeAccess;
+
+ protected override async Task ResetDb()
+ {
+ Context.Person.RemoveRange(Context.Person.ToList());
+ Context.Library.RemoveRange(Context.Library.ToList());
+ Context.AppUser.RemoveRange(Context.AppUser.ToList());
+ await UnitOfWork.CommitAsync();
+ }
+
+ private async Task SeedDb()
+ {
+ _fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
+ _restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
+ _restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
+ _restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
+ _restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
+
+ Context.AppUser.Add(_fullAccess);
+ Context.AppUser.Add(_restrictedAccess);
+ Context.AppUser.Add(_restrictedAgeAccess);
+ await Context.SaveChangesAsync();
+
+ var people = CreateTestPeople();
+ Context.Person.AddRange(people);
+ await Context.SaveChangesAsync();
+
+ var libraries = CreateTestLibraries(people);
+ Context.Library.AddRange(libraries);
+ await Context.SaveChangesAsync();
+
+ _fullAccess.Libraries.Add(libraries[0]); // lib0
+ _fullAccess.Libraries.Add(libraries[1]); // lib1
+ _restrictedAccess.Libraries.Add(libraries[1]); // lib1 only
+ _restrictedAgeAccess.Libraries.Add(libraries[1]); // lib1 only
+
+ await Context.SaveChangesAsync();
+ }
+
+ private static List CreateTestPeople()
+ {
+ return new List
+ {
+ new PersonBuilder("Shared Series Chapter Person").Build(),
+ new PersonBuilder("Shared Series Person").Build(),
+ new PersonBuilder("Shared Chapters Person").Build(),
+ new PersonBuilder("Lib0 Series Chapter Person").Build(),
+ new PersonBuilder("Lib0 Series Person").Build(),
+ new PersonBuilder("Lib0 Chapters Person").Build(),
+ new PersonBuilder("Lib1 Series Chapter Person").Build(),
+ new PersonBuilder("Lib1 Series Person").Build(),
+ new PersonBuilder("Lib1 Chapters Person").Build(),
+ new PersonBuilder("Lib1 Chapter Age Person").Build()
+ };
+ }
+
+ private static List CreateTestLibraries(List people)
+ {
+ var lib0 = new LibraryBuilder("lib0")
+ .WithSeries(new SeriesBuilder("lib0-s0")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Writer)
+ .WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Writer)
+ .WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Writer)
+ .WithPerson(GetPersonByName(people, "Lib0 Series Person"), PersonRole.Writer)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Colorist)
+ .WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Colorist)
+ .WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Colorist)
+ .WithPerson(GetPersonByName(people, "Lib0 Chapters Person"), PersonRole.Colorist)
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Editor)
+ .WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Editor)
+ .WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Editor)
+ .WithPerson(GetPersonByName(people, "Lib0 Chapters Person"), PersonRole.Editor)
+ .Build())
+ .Build())
+ .Build())
+ .Build();
+
+ var lib1 = new LibraryBuilder("lib1")
+ .WithSeries(new SeriesBuilder("lib1-s0")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Letterer)
+ .WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Letterer)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Letterer)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Person"), PersonRole.Letterer)
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Imprint)
+ .WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Imprint)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Imprint)
+ .WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Imprint)
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.CoverArtist)
+ .WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.CoverArtist)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.CoverArtist)
+ .WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.CoverArtist)
+ .WithPerson(GetPersonByName(people, "Lib1 Chapter Age Person"), PersonRole.CoverArtist)
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("lib1-s1")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Inker)
+ .WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Inker)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Inker)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Person"), PersonRole.Inker)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Team)
+ .WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Team)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Team)
+ .WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Team)
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Translator)
+ .WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Translator)
+ .WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Translator)
+ .WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Translator)
+ .Build())
+ .Build())
+ .Build())
+ .Build();
+
+ return new List { lib0, lib1 };
+ }
+
+ private static Person GetPersonByName(List people, string name)
+ {
+ return people.First(p => p.Name == name);
+ }
+
+ private Person GetPersonByName(string name)
+ {
+ return Context.Person.First(p => p.Name == name);
+ }
+
+ private static Predicate ContainsPersonCheck(Person person)
+ {
+ return p => p.Id == person.Id;
+ }
+
+ [Fact]
+ public async Task GetBrowsePersonDtos()
+ {
+ await ResetDb();
+ await SeedDb();
+
+ // Get people from database for assertions
+ var sharedSeriesChaptersPerson = GetPersonByName("Shared Series Chapter Person");
+ var lib0SeriesPerson = GetPersonByName("Lib0 Series Person");
+ var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
+ var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
+ var allPeople = Context.Person.ToList();
+
+ var fullAccessPeople =
+ await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_fullAccess.Id, new BrowsePersonFilterDto(),
+ new UserParams());
+ Assert.Equal(allPeople.Count, fullAccessPeople.TotalCount);
+
+ foreach (var person in allPeople)
+ Assert.Contains(fullAccessPeople, ContainsPersonCheck(person));
+
+ // 1 series in lib0, 2 series in lib1
+ Assert.Equal(3, fullAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
+ // 3 series with each 2 chapters
+ Assert.Equal(6, fullAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).ChapterCount);
+ // 1 series in lib0
+ Assert.Equal(1, fullAccessPeople.First(dto => dto.Id == lib0SeriesPerson.Id).SeriesCount);
+ // 2 series in lib1
+ Assert.Equal(2, fullAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
+
+ var restrictedAccessPeople =
+ await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAccess.Id, new BrowsePersonFilterDto(),
+ new UserParams());
+
+ Assert.Equal(7, restrictedAccessPeople.TotalCount);
+
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Chapter Person")));
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Person")));
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Chapters Person")));
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Chapter Person")));
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Person")));
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapters Person")));
+ Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapter Age Person")));
+
+ // 2 series in lib1, no series in lib0
+ Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
+ // 2 series with each 2 chapters
+ Assert.Equal(4, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).ChapterCount);
+ // 2 series in lib1
+ Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
+
+ var restrictedAgeAccessPeople = await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAgeAccess.Id,
+ new BrowsePersonFilterDto(), new UserParams());
+
+ // Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
+ Assert.Equal(6, restrictedAgeAccessPeople.TotalCount);
+
+ // No access to the age restricted chapter
+ Assert.DoesNotContain(restrictedAgeAccessPeople, ContainsPersonCheck(lib1ChapterAgePerson));
+ }
+
+ [Fact]
+ public async Task GetRolesForPersonByName()
+ {
+ await ResetDb();
+ await SeedDb();
+
+ var sharedSeriesPerson = GetPersonByName("Shared Series Person");
+ var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
+ var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
+
+ var sharedSeriesRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _fullAccess.Id);
+ var chapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _fullAccess.Id);
+ var ageChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _fullAccess.Id);
+ Assert.Equal(3, sharedSeriesRoles.Count());
+ Assert.Equal(6, chapterRoles.Count());
+ Assert.Single(ageChapterRoles);
+
+ var restrictedRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAccess.Id);
+ var restrictedChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAccess.Id);
+ var restrictedAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAccess.Id);
+ Assert.Equal(2, restrictedRoles.Count());
+ Assert.Equal(4, restrictedChapterRoles.Count());
+ Assert.Single(restrictedAgePersonChapterRoles);
+
+ var restrictedAgeRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
+ var restrictedAgeChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAgeAccess.Id);
+ var restrictedAgeAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAgeAccess.Id);
+ Assert.Single(restrictedAgeRoles);
+ Assert.Equal(2, restrictedAgeChapterRoles.Count());
+ // Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
+ Assert.Empty(restrictedAgeAgePersonChapterRoles);
+ }
+
+ [Fact]
+ public async Task GetPersonDtoByName()
+ {
+ await ResetDb();
+ await SeedDb();
+
+ var allPeople = Context.Person.ToList();
+
+ foreach (var person in allPeople)
+ {
+ Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName(person.Name, _fullAccess.Id));
+ }
+
+ Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAccess.Id));
+ Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Shared Series Person", _restrictedAccess.Id));
+ Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAccess.Id));
+
+ Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAgeAccess.Id));
+ Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAgeAccess.Id));
+ // Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
+ Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Chapter Age Person", _restrictedAgeAccess.Id));
+ }
+
+ [Fact]
+ public async Task GetSeriesKnownFor()
+ {
+ await ResetDb();
+ await SeedDb();
+
+ var sharedSeriesPerson = GetPersonByName("Shared Series Person");
+ var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
+
+ var series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _fullAccess.Id);
+ Assert.Equal(3, series.Count());
+
+ series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAccess.Id);
+ Assert.Equal(2, series.Count());
+
+ series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
+ Assert.Single(series);
+
+ series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(lib1SeriesPerson.Id, _restrictedAgeAccess.Id);
+ Assert.Single(series);
+ }
+
+ [Fact]
+ public async Task GetChaptersForPersonByRole()
+ {
+ await ResetDb();
+ await SeedDb();
+
+ var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
+
+ // Lib0
+ var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Colorist);
+ var restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Colorist);
+ var restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Colorist);
+ Assert.Single(chapters);
+ Assert.Empty(restrictedChapters);
+ Assert.Empty(restrictedAgeChapters);
+
+ // Lib1 - age restricted series
+ chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Imprint);
+ restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Imprint);
+ restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Imprint);
+ Assert.Single(chapters);
+ Assert.Single(restrictedChapters);
+ Assert.Empty(restrictedAgeChapters);
+
+ // Lib1 - not age restricted series
+ chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Team);
+ restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Team);
+ restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Team);
+ Assert.Single(chapters);
+ Assert.Single(restrictedChapters);
+ Assert.Single(restrictedAgeChapters);
+ }
+}
diff --git a/API.Tests/Repository/SeriesRepositoryTests.cs b/API.Tests/Repository/SeriesRepositoryTests.cs
index fe285641e..5705e1bc0 100644
--- a/API.Tests/Repository/SeriesRepositoryTests.cs
+++ b/API.Tests/Repository/SeriesRepositoryTests.cs
@@ -7,6 +7,7 @@ using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
+using API.Helpers.Builders;
using API.Services;
using AutoMapper;
using Microsoft.Data.Sqlite;
@@ -18,11 +19,13 @@ using Xunit;
namespace API.Tests.Repository;
+#nullable enable
+
public class SeriesRepositoryTests
{
private readonly IUnitOfWork _unitOfWork;
- private readonly DbConnection _connection;
+ private readonly DbConnection? _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
@@ -40,7 +43,7 @@ public class SeriesRepositoryTests
var config = new MapperConfiguration(cfg => cfg.AddProfile());
var mapper = config.CreateMapper();
- _unitOfWork = new UnitOfWork(_context, mapper, null);
+ _unitOfWork = new UnitOfWork(_context, mapper, null!);
}
#region Setup
@@ -70,10 +73,9 @@ public class SeriesRepositoryTests
_context.ServerSetting.Update(setting);
- var lib = new Library()
- {
- Name = "Manga", Folders = new List() {new FolderPath() {Path = "C:/data/"}}
- };
+ var lib = new LibraryBuilder("Manga")
+ .WithFolderPath(new FolderPathBuilder("C:/data/").Build())
+ .Build();
_context.AppUser.Add(new AppUser()
{
@@ -115,37 +117,36 @@ public class SeriesRepositoryTests
private async Task SetupSeriesData()
{
- var library = new Library()
- {
- Name = "Manga",
- Type = LibraryType.Manga,
- Folders = new List()
- {
- new FolderPath() {Path = "C:/data/manga/"}
- }
- };
-
- var s = DbFactory.Series("The Idaten Deities Know Only Peace", "Heion Sedai no Idaten-tachi");
- s.Format = MangaFormat.Archive;
-
- library.Series = new List()
- {
- s,
- };
+ var library = new LibraryBuilder("GetFullSeriesByAnyName Manga", LibraryType.Manga)
+ .WithFolderPath(new FolderPathBuilder("C:/data/manga/").Build())
+ .WithSeries(new SeriesBuilder("The Idaten Deities Know Only Peace")
+ .WithLocalizedName("Heion Sedai no Idaten-tachi")
+ .WithFormat(MangaFormat.Archive)
+ .Build())
+ .WithSeries(new SeriesBuilder("Hitomi-chan is Shy With Strangers")
+ .WithLocalizedName("Hitomi-chan wa Hitomishiri")
+ .WithFormat(MangaFormat.Archive)
+ .Build())
+ .Build();
_unitOfWork.LibraryRepository.Add(library);
await _unitOfWork.CommitAsync();
}
- [InlineData("Heion Sedai no Idaten-tachi", "", MangaFormat.Archive, "The Idaten Deities Know Only Peace")] // Matching on localized name in DB
- [InlineData("Heion Sedai no Idaten-tachi", "", MangaFormat.Pdf, null)]
+ [Theory]
+ [InlineData("The Idaten Deities Know Only Peace", MangaFormat.Archive, "", "The Idaten Deities Know Only Peace")] // Matching on series name in DB
+ [InlineData("Heion Sedai no Idaten-tachi", MangaFormat.Archive, "The Idaten Deities Know Only Peace", "The Idaten Deities Know Only Peace")] // Matching on localized name in DB
+ [InlineData("Heion Sedai no Idaten-tachi", MangaFormat.Pdf, "", null)]
+ [InlineData("Hitomi-chan wa Hitomishiri", MangaFormat.Archive, "", "Hitomi-chan is Shy With Strangers")]
public async Task GetFullSeriesByAnyName_Should(string seriesName, MangaFormat format, string localizedName, string? expected)
{
- var firstSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
+ await ResetDb();
+ await SetupSeriesData();
+
var series =
await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(seriesName, localizedName,
- 1, format);
+ 2, format, false);
if (expected == null)
{
Assert.Null(series);
@@ -157,6 +158,6 @@ public class SeriesRepositoryTests
}
}
+ // TODO: GetSeriesDtoForLibraryIdV2Async Tests (On Deck)
- //public async Task
}
diff --git a/API.Tests/Repository/TagRepositoryTests.cs b/API.Tests/Repository/TagRepositoryTests.cs
new file mode 100644
index 000000000..229082eb6
--- /dev/null
+++ b/API.Tests/Repository/TagRepositoryTests.cs
@@ -0,0 +1,278 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using API.DTOs.Metadata.Browse;
+using API.Entities;
+using API.Entities.Enums;
+using API.Entities.Metadata;
+using API.Helpers;
+using API.Helpers.Builders;
+using Xunit;
+
+namespace API.Tests.Repository;
+
+public class TagRepositoryTests : AbstractDbTest
+{
+ private AppUser _fullAccess;
+ private AppUser _restrictedAccess;
+ private AppUser _restrictedAgeAccess;
+
+ protected override async Task ResetDb()
+ {
+ Context.Tag.RemoveRange(Context.Tag);
+ Context.Library.RemoveRange(Context.Library);
+ await Context.SaveChangesAsync();
+ }
+
+ private TestTagSet CreateTestTags()
+ {
+ return new TestTagSet
+ {
+ SharedSeriesChaptersTag = new TagBuilder("Shared Series Chapter Tag").Build(),
+ SharedSeriesTag = new TagBuilder("Shared Series Tag").Build(),
+ SharedChaptersTag = new TagBuilder("Shared Chapters Tag").Build(),
+ Lib0SeriesChaptersTag = new TagBuilder("Lib0 Series Chapter Tag").Build(),
+ Lib0SeriesTag = new TagBuilder("Lib0 Series Tag").Build(),
+ Lib0ChaptersTag = new TagBuilder("Lib0 Chapters Tag").Build(),
+ Lib1SeriesChaptersTag = new TagBuilder("Lib1 Series Chapter Tag").Build(),
+ Lib1SeriesTag = new TagBuilder("Lib1 Series Tag").Build(),
+ Lib1ChaptersTag = new TagBuilder("Lib1 Chapters Tag").Build(),
+ Lib1ChapterAgeTag = new TagBuilder("Lib1 Chapter Age Tag").Build()
+ };
+ }
+
+ private async Task SeedDbWithTags(TestTagSet tags)
+ {
+ await CreateTestUsers();
+ await AddTagsToContext(tags);
+ await CreateLibrariesWithTags(tags);
+ await AssignLibrariesToUsers();
+ }
+
+ private async Task CreateTestUsers()
+ {
+ _fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
+ _restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
+ _restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
+ _restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
+ _restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
+
+ Context.Users.Add(_fullAccess);
+ Context.Users.Add(_restrictedAccess);
+ Context.Users.Add(_restrictedAgeAccess);
+ await Context.SaveChangesAsync();
+ }
+
+ private async Task AddTagsToContext(TestTagSet tags)
+ {
+ var allTags = tags.GetAllTags();
+ Context.Tag.AddRange(allTags);
+ await Context.SaveChangesAsync();
+ }
+
+ private async Task CreateLibrariesWithTags(TestTagSet tags)
+ {
+ var lib0 = new LibraryBuilder("lib0")
+ .WithSeries(new SeriesBuilder("lib0-s0")
+ .WithMetadata(new SeriesMetadata
+ {
+ Tags = [tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib0SeriesChaptersTag, tags.Lib0SeriesTag]
+ })
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib0SeriesChaptersTag, tags.Lib0ChaptersTag])
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
+ .Build())
+ .Build())
+ .Build())
+ .Build();
+
+ var lib1 = new LibraryBuilder("lib1")
+ .WithSeries(new SeriesBuilder("lib1-s0")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib1SeriesChaptersTag, tags.Lib1SeriesTag])
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag, tags.Lib1ChapterAgeTag])
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .Build())
+ .Build())
+ .WithSeries(new SeriesBuilder("lib1-s1")
+ .WithMetadata(new SeriesMetadataBuilder()
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib1SeriesChaptersTag, tags.Lib1SeriesTag])
+ .Build())
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
+ .Build())
+ .WithChapter(new ChapterBuilder("2")
+ .WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
+ .WithAgeRating(AgeRating.Mature17Plus)
+ .Build())
+ .Build())
+ .Build())
+ .Build();
+
+ Context.Library.Add(lib0);
+ Context.Library.Add(lib1);
+ await Context.SaveChangesAsync();
+ }
+
+ private async Task AssignLibrariesToUsers()
+ {
+ var lib0 = Context.Library.First(l => l.Name == "lib0");
+ var lib1 = Context.Library.First(l => l.Name == "lib1");
+
+ _fullAccess.Libraries.Add(lib0);
+ _fullAccess.Libraries.Add(lib1);
+ _restrictedAccess.Libraries.Add(lib1);
+ _restrictedAgeAccess.Libraries.Add(lib1);
+
+ await Context.SaveChangesAsync();
+ }
+
+ private static Predicate ContainsTagCheck(Tag tag)
+ {
+ return t => t.Id == tag.Id;
+ }
+
+ private static void AssertTagPresent(IEnumerable tags, Tag expectedTag)
+ {
+ Assert.Contains(tags, ContainsTagCheck(expectedTag));
+ }
+
+ private static void AssertTagNotPresent(IEnumerable tags, Tag expectedTag)
+ {
+ Assert.DoesNotContain(tags, ContainsTagCheck(expectedTag));
+ }
+
+ private static BrowseTagDto GetTagDto(IEnumerable tags, Tag tag)
+ {
+ return tags.First(dto => dto.Id == tag.Id);
+ }
+
+ [Fact]
+ public async Task GetBrowseableTag_FullAccess_ReturnsAllTagsWithCorrectCounts()
+ {
+ // Arrange
+ await ResetDb();
+ var tags = CreateTestTags();
+ await SeedDbWithTags(tags);
+
+ // Act
+ var fullAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_fullAccess.Id, new UserParams());
+
+ // Assert
+ Assert.Equal(tags.GetAllTags().Count, fullAccessTags.TotalCount);
+
+ foreach (var tag in tags.GetAllTags())
+ {
+ AssertTagPresent(fullAccessTags, tag);
+ }
+
+ // Verify counts - 1 series lib0, 2 series lib1 = 3 total series
+ Assert.Equal(3, GetTagDto(fullAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
+ Assert.Equal(6, GetTagDto(fullAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
+ Assert.Equal(1, GetTagDto(fullAccessTags, tags.Lib0SeriesTag).SeriesCount);
+ }
+
+ [Fact]
+ public async Task GetBrowseableTag_RestrictedAccess_ReturnsOnlyAccessibleTags()
+ {
+ // Arrange
+ await ResetDb();
+ var tags = CreateTestTags();
+ await SeedDbWithTags(tags);
+
+ // Act
+ var restrictedAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAccess.Id, new UserParams());
+
+ // Assert - Should see: 3 shared + 4 library 1 specific = 7 tags
+ Assert.Equal(7, restrictedAccessTags.TotalCount);
+
+ // Verify shared and Library 1 tags are present
+ AssertTagPresent(restrictedAccessTags, tags.SharedSeriesChaptersTag);
+ AssertTagPresent(restrictedAccessTags, tags.SharedSeriesTag);
+ AssertTagPresent(restrictedAccessTags, tags.SharedChaptersTag);
+ AssertTagPresent(restrictedAccessTags, tags.Lib1SeriesChaptersTag);
+ AssertTagPresent(restrictedAccessTags, tags.Lib1SeriesTag);
+ AssertTagPresent(restrictedAccessTags, tags.Lib1ChaptersTag);
+ AssertTagPresent(restrictedAccessTags, tags.Lib1ChapterAgeTag);
+
+ // Verify Library 0 specific tags are not present
+ AssertTagNotPresent(restrictedAccessTags, tags.Lib0SeriesChaptersTag);
+ AssertTagNotPresent(restrictedAccessTags, tags.Lib0SeriesTag);
+ AssertTagNotPresent(restrictedAccessTags, tags.Lib0ChaptersTag);
+
+ // Verify counts - 2 series lib1
+ Assert.Equal(2, GetTagDto(restrictedAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
+ Assert.Equal(4, GetTagDto(restrictedAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
+ Assert.Equal(2, GetTagDto(restrictedAccessTags, tags.Lib1SeriesTag).SeriesCount);
+ Assert.Equal(4, GetTagDto(restrictedAccessTags, tags.Lib1ChaptersTag).ChapterCount);
+ }
+
+ [Fact]
+ public async Task GetBrowseableTag_RestrictedAgeAccess_FiltersAgeRestrictedContent()
+ {
+ // Arrange
+ await ResetDb();
+ var tags = CreateTestTags();
+ await SeedDbWithTags(tags);
+
+ // Act
+ var restrictedAgeAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAgeAccess.Id, new UserParams());
+
+ // Assert - Should see: 3 shared + 3 lib1 specific = 6 tags (age-restricted tag filtered out)
+ Assert.Equal(6, restrictedAgeAccessTags.TotalCount);
+
+ // Verify accessible tags are present
+ AssertTagPresent(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag);
+ AssertTagPresent(restrictedAgeAccessTags, tags.SharedSeriesTag);
+ AssertTagPresent(restrictedAgeAccessTags, tags.SharedChaptersTag);
+ AssertTagPresent(restrictedAgeAccessTags, tags.Lib1SeriesChaptersTag);
+ AssertTagPresent(restrictedAgeAccessTags, tags.Lib1SeriesTag);
+ AssertTagPresent(restrictedAgeAccessTags, tags.Lib1ChaptersTag);
+
+ // Verify age-restricted tag is filtered out
+ AssertTagNotPresent(restrictedAgeAccessTags, tags.Lib1ChapterAgeTag);
+
+ // Verify counts - 1 series lib1 (age-restricted series filtered out)
+ Assert.Equal(1, GetTagDto(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
+ Assert.Equal(2, GetTagDto(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
+ Assert.Equal(1, GetTagDto(restrictedAgeAccessTags, tags.Lib1SeriesTag).SeriesCount);
+ Assert.Equal(2, GetTagDto(restrictedAgeAccessTags, tags.Lib1ChaptersTag).ChapterCount);
+ }
+
+ private class TestTagSet
+ {
+ public Tag SharedSeriesChaptersTag { get; set; }
+ public Tag SharedSeriesTag { get; set; }
+ public Tag SharedChaptersTag { get; set; }
+ public Tag Lib0SeriesChaptersTag { get; set; }
+ public Tag Lib0SeriesTag { get; set; }
+ public Tag Lib0ChaptersTag { get; set; }
+ public Tag Lib1SeriesChaptersTag { get; set; }
+ public Tag Lib1SeriesTag { get; set; }
+ public Tag Lib1ChaptersTag { get; set; }
+ public Tag Lib1ChapterAgeTag { get; set; }
+
+ public List GetAllTags()
+ {
+ return
+ [
+ SharedSeriesChaptersTag, SharedSeriesTag, SharedChaptersTag,
+ Lib0SeriesChaptersTag, Lib0SeriesTag, Lib0ChaptersTag,
+ Lib1SeriesChaptersTag, Lib1SeriesTag, Lib1ChaptersTag, Lib1ChapterAgeTag
+ ];
+ }
+ }
+}
diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs
index b59ee097e..8cf93df37 100644
--- a/API.Tests/Services/ArchiveServiceTests.cs
+++ b/API.Tests/Services/ArchiveServiceTests.cs
@@ -5,7 +5,7 @@ using System.IO.Abstractions.TestingHelpers;
using System.IO.Compression;
using System.Linq;
using API.Archive;
-using API.Data.Metadata;
+using API.Entities.Enums;
using API.Services;
using Microsoft.Extensions.Logging;
using NetVips;
@@ -27,7 +27,9 @@ public class ArchiveServiceTests
public ArchiveServiceTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
- _archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For>(), _directoryService));
+ _archiveService = new ArchiveService(_logger, _directoryService,
+ new ImageService(Substitute.For>(), _directoryService),
+ Substitute.For());
}
[Theory]
@@ -153,19 +155,19 @@ public class ArchiveServiceTests
}
- [Theory]
- [InlineData("v10.cbz", "v10.expected.png")]
- [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
- [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
+ [Theory(Skip = "No specific reason")]
+ //[InlineData("v10.cbz", "v10.expected.png")] // Commented out as these break usually when NetVips is updated
+ //[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
+ //[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
[InlineData("macos_native.zip", "macos_native.png")]
- [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
+ //[InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
[InlineData("sorting.zip", "sorting.expected.png")]
[InlineData("test.zip", "test.expected.jpg")]
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
{
var ds = Substitute.For(_directoryServiceLogger, new FileSystem());
var imageService = new ImageService(Substitute.For>(), ds);
- var archiveService = Substitute.For(_logger, ds, imageService);
+ var archiveService = Substitute.For(_logger, ds, imageService, Substitute.For());
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
var expectedBytes = Image.Thumbnail(Path.Join(testDirectory, expectedOutputFile), 320).WriteToBuffer(".png");
@@ -177,7 +179,7 @@ public class ArchiveServiceTests
_directoryService.ExistOrCreate(outputDir);
var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
- Path.GetFileNameWithoutExtension(inputFile) + "_output", outputDir);
+ Path.GetFileNameWithoutExtension(inputFile) + "_output", outputDir, EncodeFormat.PNG);
var actual = File.ReadAllBytes(Path.Join(outputDir, coverImagePath));
@@ -186,18 +188,19 @@ public class ArchiveServiceTests
}
- [Theory]
- [InlineData("v10.cbz", "v10.expected.png")]
- [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
- [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
+ [Theory(Skip = "No specific reason")]
+ //[InlineData("v10.cbz", "v10.expected.png")] // Commented out as these break usually when NetVips is updated
+ //[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")]
+ //[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")]
[InlineData("macos_native.zip", "macos_native.png")]
- [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
+ //[InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")]
[InlineData("sorting.zip", "sorting.expected.png")]
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
{
var imageService = new ImageService(Substitute.For>(), _directoryService);
var archiveService = Substitute.For(_logger,
- new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService);
+ new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService,
+ Substitute.For());
var testDirectory = API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages")));
var outputDir = Path.Join(testDirectory, "output");
@@ -206,7 +209,7 @@ public class ArchiveServiceTests
archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.SharpCompress);
var coverOutputFile = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
- Path.GetFileNameWithoutExtension(inputFile), outputDir);
+ Path.GetFileNameWithoutExtension(inputFile), outputDir, EncodeFormat.PNG);
var actualBytes = File.ReadAllBytes(Path.Join(outputDir, coverOutputFile));
var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile));
Assert.Equal(expectedBytes, actualBytes);
@@ -220,13 +223,14 @@ public class ArchiveServiceTests
public void CanParseCoverImage(string inputFile)
{
var imageService = Substitute.For();
- imageService.WriteCoverThumbnail(Arg.Any(), Arg.Any(), Arg.Any()).Returns(x => "cover.jpg");
- var archiveService = new ArchiveService(_logger, _directoryService, imageService);
+ imageService.WriteCoverThumbnail(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any())
+ .Returns(x => "cover.jpg");
+ var archiveService = new ArchiveService(_logger, _directoryService, imageService, Substitute.For());
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/");
var inputPath = Path.GetFullPath(Path.Join(testDirectory, inputFile));
var outputPath = Path.Join(testDirectory, Path.GetFileNameWithoutExtension(inputFile) + "_output");
new DirectoryInfo(outputPath).Create();
- var expectedImage = archiveService.GetCoverImage(inputPath, inputFile, outputPath);
+ var expectedImage = archiveService.GetCoverImage(inputPath, inputFile, outputPath, EncodeFormat.PNG);
Assert.Equal("cover.jpg", expectedImage);
new DirectoryInfo(outputPath).Delete();
}
@@ -245,6 +249,17 @@ public class ArchiveServiceTests
Assert.Equal(summaryInfo, comicInfo.Summary);
}
+ [Fact]
+ public void ShouldHaveComicInfo_CanParseUmlaut()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
+ var archive = Path.Join(testDirectory, "Umlaut.zip");
+
+ var comicInfo = _archiveService.GetComicInfo(archive);
+ Assert.NotNull(comicInfo);
+ Assert.Equal("Belladonna", comicInfo.Series);
+ }
+
[Fact]
public void ShouldHaveComicInfo_WithAuthors()
{
@@ -359,7 +374,7 @@ public class ArchiveServiceTests
#region CreateZipForDownload
- //[Fact]
+ [Fact(Skip = "No specific reason")]
public void CreateZipForDownloadTest()
{
var fileSystem = new MockFileSystem();
diff --git a/API.Tests/Services/BackupServiceTests.cs b/API.Tests/Services/BackupServiceTests.cs
index 783e0b62d..aac5724f7 100644
--- a/API.Tests/Services/BackupServiceTests.cs
+++ b/API.Tests/Services/BackupServiceTests.cs
@@ -1,18 +1,14 @@
-using System.Collections.Generic;
-using System.Data.Common;
+using System.Data.Common;
using System.IO.Abstractions.TestingHelpers;
-using System.IO.Compression;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
-using API.Entities;
using API.Entities.Enums;
-using API.Extensions;
+using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks;
using API.SignalR;
using AutoMapper;
-using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
@@ -23,7 +19,7 @@ using Xunit;
namespace API.Tests.Services;
-public class BackupServiceTests
+public class BackupServiceTests: AbstractFsTest
{
private readonly ILogger _logger = Substitute.For>();
private readonly IUnitOfWork _unitOfWork;
@@ -33,13 +29,6 @@ public class BackupServiceTests
private readonly DbConnection _connection;
private readonly DataContext _context;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string LogDirectory = "C:/kavita/config/logs/";
- private const string ConfigDirectory = "C:/kavita/config/";
- private const string BookmarkDirectory = "C:/kavita/config/bookmarks";
- private const string ThemesDirectory = "C:/kavita/config/theme";
public BackupServiceTests()
{
@@ -83,18 +72,9 @@ public class BackupServiceTests
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
-
- _context.Library.Add(new Library()
- {
- Name = "Manga",
- Folders = new List()
- {
- new FolderPath()
- {
- Path = "C:/data/"
- }
- }
- });
+ _context.Library.Add(new LibraryBuilder("Manga")
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
+ .Build());
return await _context.SaveChangesAsync() > 0;
}
@@ -105,22 +85,6 @@ public class BackupServiceTests
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(LogDirectory);
- fileSystem.AddDirectory(ThemesDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory("C:/data/");
-
- return fileSystem;
- }
-
#endregion
diff --git a/API.Tests/Services/BookServiceTests.cs b/API.Tests/Services/BookServiceTests.cs
index 4665ab691..5848c74ba 100644
--- a/API.Tests/Services/BookServiceTests.cs
+++ b/API.Tests/Services/BookServiceTests.cs
@@ -1,6 +1,8 @@
using System.IO;
using System.IO.Abstractions;
+using API.Entities.Enums;
using API.Services;
+using API.Services.Tasks.Scanner.Parser;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
@@ -15,7 +17,9 @@ public class BookServiceTests
public BookServiceTests()
{
var directoryService = new DirectoryService(Substitute.For>(), new FileSystem());
- _bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For>(), directoryService));
+ _bookService = new BookService(_logger, directoryService,
+ new ImageService(Substitute.For>(), directoryService)
+ , Substitute.For());
}
[Theory]
@@ -78,4 +82,64 @@ public class BookServiceTests
Assert.Equal("Accel World", comicInfo.Series);
}
+ [Fact]
+ public void ShouldHaveComicInfoForPdf()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var document = Path.Join(testDirectory, "test.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.NotNull(comicInfo);
+ Assert.Equal("Variations Chromatiques de concert", comicInfo.Title);
+ Assert.Equal("Georges Bizet \\(1838-1875\\)", comicInfo.Writer);
+ }
+
+ //[Fact]
+ public void ShouldUsePdfInfoDict()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Library/Books/PDFs");
+ var document = Path.Join(testDirectory, "Rollo at Work SP01.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.NotNull(comicInfo);
+ Assert.Equal("Rollo at Work", comicInfo.Title);
+ Assert.Equal("Jacob Abbott", comicInfo.Writer);
+ Assert.Equal(2008, comicInfo.Year);
+ }
+
+ [Fact]
+ public void ShouldHandleIndirectPdfObjects()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var document = Path.Join(testDirectory, "indirect.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.NotNull(comicInfo);
+ Assert.Equal(2018, comicInfo.Year);
+ Assert.Equal(8, comicInfo.Month);
+ }
+
+ [Fact]
+ public void FailGracefullyWithEncryptedPdf()
+ {
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var document = Path.Join(testDirectory, "encrypted.pdf");
+ var comicInfo = _bookService.GetComicInfo(document);
+ Assert.Null(comicInfo);
+ }
+
+ [Fact]
+ public void SeriesFallBackToMetadataTitle()
+ {
+ var ds = new DirectoryService(Substitute.For>(), new FileSystem());
+ var pdfParser = new PdfParser(ds);
+
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
+ var filePath = Path.Join(testDirectory, "Bizet-Variations_Chromatiques_de_concert_Theme_A4.pdf");
+
+ var comicInfo = _bookService.GetComicInfo(filePath);
+ Assert.NotNull(comicInfo);
+
+ var parserInfo = pdfParser.Parse(filePath, testDirectory, ds.GetParentDirectoryName(testDirectory), LibraryType.Book, true, comicInfo);
+ Assert.NotNull(parserInfo);
+ Assert.Equal(parserInfo.Title, comicInfo.Title);
+ Assert.Equal(parserInfo.Series, comicInfo.Title);
+ }
}
diff --git a/API.Tests/Services/BookmarkServiceTests.cs b/API.Tests/Services/BookmarkServiceTests.cs
index 97c07a281..596fbbc4d 100644
--- a/API.Tests/Services/BookmarkServiceTests.cs
+++ b/API.Tests/Services/BookmarkServiceTests.cs
@@ -1,5 +1,4 @@
-using System;
-using System.Collections.Generic;
+using System.Collections.Generic;
using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
@@ -11,8 +10,8 @@ using API.DTOs.Reader;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
+using API.Helpers.Builders;
using API.Services;
-using API.SignalR;
using AutoMapper;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
@@ -23,17 +22,12 @@ using Xunit;
namespace API.Tests.Services;
-public class BookmarkServiceTests
+public class BookmarkServiceTests: AbstractFsTest
{
private readonly IUnitOfWork _unitOfWork;
private readonly DbConnection _connection;
private readonly DataContext _context;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
-
public BookmarkServiceTests()
{
@@ -53,7 +47,7 @@ public class BookmarkServiceTests
private BookmarkService Create(IDirectoryService ds)
{
return new BookmarkService(Substitute.For>(), _unitOfWork, ds,
- Substitute.For(), Substitute.For());
+Substitute.For());
}
#region Setup
@@ -85,17 +79,9 @@ public class BookmarkServiceTests
_context.ServerSetting.Update(setting);
- _context.Library.Add(new Library()
- {
- Name = "Manga",
- Folders = new List()
- {
- new FolderPath()
- {
- Path = "C:/data/"
- }
- }
- });
+ _context.Library.Add(new LibraryBuilder("Manga")
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
+ .Build());
return await _context.SaveChangesAsync() > 0;
}
@@ -108,20 +94,6 @@ public class BookmarkServiceTests
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory("C:/data/");
-
- return fileSystem;
- }
-
#endregion
#region BookmarkPage
@@ -136,27 +108,16 @@ public class BookmarkServiceTests
// Delete all Series to reset state
await ResetDB();
- _context.Series.Add(new Series()
- {
- Name = "Test",
- Library = new Library() {
- Name = "Test LIb",
- Type = LibraryType.Manga,
- },
- Volumes = new List()
- {
- new Volume()
- {
- Chapters = new List()
- {
- new Chapter()
- {
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(new ChapterBuilder("1")
+ .Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
+ _context.Series.Add(series);
- }
- }
- }
- }
- });
_context.AppUser.Add(new AppUser()
{
@@ -180,7 +141,7 @@ public class BookmarkServiceTests
Assert.True(result);
- Assert.Equal(1, ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories).Count());
+ Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
}
@@ -194,27 +155,17 @@ public class BookmarkServiceTests
// Delete all Series to reset state
await ResetDB();
- _context.Series.Add(new Series()
- {
- Name = "Test",
- Library = new Library() {
- Name = "Test LIb",
- Type = LibraryType.Manga,
- },
- Volumes = new List()
- {
- new Volume()
- {
- Chapters = new List()
- {
- new Chapter()
- {
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
- }
- }
- }
- }
- });
+ _context.Series.Add(series);
_context.AppUser.Add(new AppUser()
@@ -250,7 +201,7 @@ public class BookmarkServiceTests
Assert.True(result);
- Assert.Equal(0, ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories).Count());
+ Assert.Empty(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.Null(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
}
@@ -270,28 +221,17 @@ public class BookmarkServiceTests
// Delete all Series to reset state
await ResetDB();
- _context.Series.Add(new Series()
- {
- Name = "Test",
- Library = new Library() {
- Name = "Test LIb",
- Type = LibraryType.Manga,
- },
- Volumes = new List()
- {
- new Volume()
- {
- Chapters = new List()
- {
- new Chapter()
- {
-
- }
- }
- }
- }
- });
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder("1")
+ .Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
+ _context.Series.Add(series);
_context.AppUser.Add(new AppUser()
{
@@ -342,7 +282,7 @@ public class BookmarkServiceTests
Assert.Equal(2, ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories).Count());
- Assert.False(ds.FileSystem.FileInfo.FromFileName(Path.Join(BookmarkDirectory, "1/1/1/0001.jpg")).Exists);
+ Assert.False(ds.FileSystem.FileInfo.New(Path.Join(BookmarkDirectory, "1/1/1/0001.jpg")).Exists);
}
#endregion
@@ -357,27 +297,18 @@ public class BookmarkServiceTests
// Delete all Series to reset state
await ResetDB();
- _context.Series.Add(new Series()
- {
- Name = "Test",
- Library = new Library() {
- Name = "Test LIb",
- Type = LibraryType.Manga,
- },
- Volumes = new List()
- {
- new Volume()
- {
- Chapters = new List()
- {
- new Chapter()
- {
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder("1")
+ .Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
+
+ _context.Series.Add(series);
- }
- }
- }
- }
- });
_context.AppUser.Add(new AppUser()
{
@@ -419,28 +350,17 @@ public class BookmarkServiceTests
// Delete all Series to reset state
await ResetDB();
- _context.Series.Add(new Series()
- {
- Name = "Test",
- Library = new Library() {
- Name = "Test LIb",
- Type = LibraryType.Manga,
- },
- Volumes = new List()
- {
- new Volume()
- {
- Chapters = new List()
- {
- new Chapter()
- {
-
- }
- }
- }
- }
- });
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder("1")
+ .Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
+ _context.Series.Add(series);
_context.AppUser.Add(new AppUser()
{
@@ -469,7 +389,7 @@ public class BookmarkServiceTests
await _unitOfWork.CommitAsync();
- Assert.Equal(1, ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories).Count());
+ Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
}
@@ -483,28 +403,15 @@ public class BookmarkServiceTests
// Delete all Series to reset state
await ResetDB();
- var series = new Series()
- {
- Name = "Test",
- Library = new Library()
- {
- Name = "Test LIb",
- Type = LibraryType.Manga,
- },
- Volumes = new List()
- {
- new Volume()
- {
- Chapters = new List()
- {
- new Chapter()
- {
-
- }
- }
- }
- }
- };
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder("1")
+ .WithMinNumber(1)
+ .WithChapter(new ChapterBuilder("1")
+ .Build())
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
@@ -528,7 +435,7 @@ public class BookmarkServiceTests
await _context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
- Assert.NotEmpty(user.Bookmarks);
+ Assert.NotEmpty(user!.Bookmarks);
series.Volumes = new List();
_unitOfWork.SeriesRepository.Update(series);
diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs
index e3be8dce5..caf1ae393 100644
--- a/API.Tests/Services/CacheServiceTests.cs
+++ b/API.Tests/Services/CacheServiceTests.cs
@@ -1,15 +1,14 @@
-using System.Collections.Generic;
-using System.Data.Common;
+using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
-using API.Entities;
using API.Entities.Enums;
-using API.Parser;
+using API.Helpers.Builders;
using API.Services;
+using API.Services.Tasks.Scanner.Parser;
using API.SignalR;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
@@ -41,7 +40,7 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
return 1;
}
- public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format)
+ public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format, EncodeFormat encodeFormat, CoverImageSize size = CoverImageSize.Default)
{
return string.Empty;
}
@@ -51,17 +50,17 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
throw new System.NotImplementedException();
}
- public ParserInfo Parse(string path, string rootPath, LibraryType type)
+ public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata = true)
{
throw new System.NotImplementedException();
}
- public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
+ public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata = true)
{
throw new System.NotImplementedException();
}
}
-public class CacheServiceTests
+public class CacheServiceTests: AbstractFsTest
{
private readonly ILogger _logger = Substitute.For>();
private readonly IUnitOfWork _unitOfWork;
@@ -70,11 +69,6 @@ public class CacheServiceTests
private readonly DbConnection _connection;
private readonly DataContext _context;
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string DataDirectory = "C:/data/";
-
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
@@ -116,17 +110,9 @@ public class CacheServiceTests
_context.ServerSetting.Update(setting);
- _context.Library.Add(new Library()
- {
- Name = "Manga",
- Folders = new List()
- {
- new FolderPath()
- {
- Path = "C:/data/"
- }
- }
- });
+ _context.Library.Add(new LibraryBuilder("Manga")
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
+ .Build());
return await _context.SaveChangesAsync() > 0;
}
@@ -137,19 +123,6 @@ public class CacheServiceTests
await _context.SaveChangesAsync();
}
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(DataDirectory);
-
- return fileSystem;
- }
-
#endregion
#region Ensure
@@ -163,23 +136,16 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(),
+ Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
await ResetDB();
- var s = DbFactory.Series("Test");
- var v = DbFactory.Volume("1");
- var c = new Chapter()
- {
- Number = "1",
- Files = new List()
- {
- new MangaFile()
- {
- Format = MangaFormat.Archive,
- FilePath = $"{DataDirectory}Test v1.zip",
- }
- }
- };
+ var s = new SeriesBuilder("Test").Build();
+ var v = new VolumeBuilder("1").Build();
+ var c = new ChapterBuilder("1")
+ .WithFile(new MangaFileBuilder($"{DataDirectory}Test v1.zip", MangaFormat.Archive).Build())
+ .Build();
v.Chapters.Add(c);
s.Volumes.Add(v);
s.LibraryId = 1;
@@ -206,8 +172,8 @@ public class CacheServiceTests
// new ReadingItemService(archiveService, Substitute.For(), Substitute.For(), ds));
//
// await ResetDB();
- // var s = DbFactory.Series("Test");
- // var v = DbFactory.Volume("1");
+ // var s = new SeriesBuilder("Test").Build();
+ // var v = new VolumeBuilder("1").Build();
// var c = new Chapter()
// {
// Number = "1",
@@ -247,7 +213,8 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
cleanupService.CleanupChapters(new []{1, 3});
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
@@ -268,24 +235,15 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
- var c = new Chapter()
- {
- Files = new List()
- {
- new MangaFile()
- {
- FilePath = $"{DataDirectory}1.epub"
- },
- new MangaFile()
- {
- FilePath = $"{DataDirectory}2.epub"
- }
- }
- };
+ var c = new ChapterBuilder("1")
+ .WithFile(new MangaFileBuilder($"{DataDirectory}1.epub", MangaFormat.Epub).Build())
+ .WithFile(new MangaFileBuilder($"{DataDirectory}2.epub", MangaFormat.Epub).Build())
+ .Build();
cs.GetCachedFile(c);
- Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
+ Assert.Equal($"{DataDirectory}1.epub", cs.GetCachedFile(c));
}
#endregion
@@ -300,11 +258,9 @@ public class CacheServiceTests
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
- var c = new Chapter()
- {
- Id = 1,
- Files = new List()
- };
+ var c = new ChapterBuilder("1")
+ .WithId(1)
+ .Build();
var fileIndex = 0;
foreach (var file in c.Files)
@@ -320,12 +276,13 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
- var path = cs.GetCachedPagePath(c, 11);
+ var path = cs.GetCachedPagePath(c.Id, 11);
Assert.Equal(string.Empty, path);
}
@@ -337,26 +294,17 @@ public class CacheServiceTests
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
- var c = new Chapter()
- {
- Id = 1,
- Files = new List()
- {
- new MangaFile()
- {
- Id = 1,
- FilePath = $"{DataDirectory}1.zip",
- Pages = 10
-
- },
- new MangaFile()
- {
- Id = 2,
- FilePath = $"{DataDirectory}2.zip",
- Pages = 5
- }
- }
- };
+ var c = new ChapterBuilder("1")
+ .WithId(1)
+ .WithFile(new MangaFileBuilder($"{DataDirectory}1.zip", MangaFormat.Archive)
+ .WithPages(10)
+ .WithId(1)
+ .Build())
+ .WithFile(new MangaFileBuilder($"{DataDirectory}2.zip", MangaFormat.Archive)
+ .WithPages(5)
+ .WithId(2)
+ .Build())
+ .Build();
var fileIndex = 0;
foreach (var file in c.Files)
@@ -372,12 +320,13 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
- Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0)));
+ Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c.Id, 0)));
}
@@ -389,20 +338,13 @@ public class CacheServiceTests
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
- var c = new Chapter()
- {
- Id = 1,
- Files = new List()
- {
- new MangaFile()
- {
- Id = 1,
- FilePath = $"{DataDirectory}1.zip",
- Pages = 10
-
- }
- }
- };
+ var c = new ChapterBuilder("1")
+ .WithId(1)
+ .WithFile(new MangaFileBuilder($"{DataDirectory}1.zip", MangaFormat.Archive)
+ .WithPages(10)
+ .WithId(1)
+ .Build())
+ .Build();
c.Pages = c.Files.Sum(f => f.Pages);
var fileIndex = 0;
@@ -419,13 +361,14 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the 10th file
- var path = cs.GetCachedPagePath(c, c.Pages);
+ var path = cs.GetCachedPagePath(c.Id, c.Pages);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
@@ -437,26 +380,17 @@ public class CacheServiceTests
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
- var c = new Chapter()
- {
- Id = 1,
- Files = new List()
- {
- new MangaFile()
- {
- Id = 1,
- FilePath = $"{DataDirectory}1.zip",
- Pages = 10
-
- },
- new MangaFile()
- {
- Id = 2,
- FilePath = $"{DataDirectory}2.zip",
- Pages = 5
- }
- }
- };
+ var c = new ChapterBuilder("1")
+ .WithId(1)
+ .WithFile(new MangaFileBuilder($"{DataDirectory}1.zip", MangaFormat.Archive)
+ .WithPages(10)
+ .WithId(1)
+ .Build())
+ .WithFile(new MangaFileBuilder($"{DataDirectory}2.zip", MangaFormat.Archive)
+ .WithPages(5)
+ .WithId(2)
+ .Build())
+ .Build();
var fileIndex = 0;
foreach (var file in c.Files)
@@ -472,13 +406,14 @@ public class CacheServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For(),
- Substitute.For(), Substitute.For(), ds), Substitute.For());
+ Substitute.For(), Substitute.For(), ds, Substitute.For>()),
+ Substitute.For());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the page + 1 file
- var path = cs.GetCachedPagePath(c, 10);
+ var path = cs.GetCachedPagePath(c.Id, 10);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
diff --git a/API.Tests/Services/CleanupServiceTests.cs b/API.Tests/Services/CleanupServiceTests.cs
index 5c60baf4d..b0610aed5 100644
--- a/API.Tests/Services/CleanupServiceTests.cs
+++ b/API.Tests/Services/CleanupServiceTests.cs
@@ -1,135 +1,57 @@
using System;
using System.Collections.Generic;
-using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
-using API.Data;
-using API.DTOs.Settings;
+using API.Data.Repositories;
+using API.DTOs.Filtering;
using API.Entities;
using API.Entities.Enums;
+using API.Extensions;
using API.Helpers;
-using API.Helpers.Converters;
+using API.Helpers.Builders;
using API.Services;
+using API.Services.Plus;
using API.Services.Tasks;
using API.SignalR;
-using API.Tests.Helpers;
-using AutoMapper;
-using Microsoft.AspNetCore.SignalR;
-using Microsoft.Data.Sqlite;
-using Microsoft.EntityFrameworkCore;
-using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
-public class CleanupServiceTests
+public class CleanupServiceTests : AbstractDbTest
{
private readonly ILogger _logger = Substitute.For>();
- private readonly IUnitOfWork _unitOfWork;
private readonly IEventHub _messageHub = Substitute.For();
+ private readonly IReaderService _readerService;
- private readonly DbConnection _connection;
- private readonly DataContext _context;
-
- private const string CacheDirectory = "C:/kavita/config/cache/";
- private const string CoverImageDirectory = "C:/kavita/config/covers/";
- private const string BackupDirectory = "C:/kavita/config/backups/";
- private const string LogDirectory = "C:/kavita/config/logs/";
- private const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
-
-
- public CleanupServiceTests()
+ public CleanupServiceTests() : base()
{
- var contextOptions = new DbContextOptionsBuilder()
- .UseSqlite(CreateInMemoryDatabase())
- .Options;
- _connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
+ Context.Library.Add(new LibraryBuilder("Manga")
+ .WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
+ .Build());
- _context = new DataContext(contextOptions);
- Task.Run(SeedDb).GetAwaiter().GetResult();
-
- var config = new MapperConfiguration(cfg => cfg.AddProfile());
- var mapper = config.CreateMapper();
-
- _unitOfWork = new UnitOfWork(_context, mapper, null);
+ _readerService = new ReaderService(UnitOfWork, Substitute.For>(), Substitute.For(),
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()), Substitute.For());
}
#region Setup
- private static DbConnection CreateInMemoryDatabase()
+
+ protected override async Task ResetDb()
{
- var connection = new SqliteConnection("Filename=:memory:");
+ Context.Series.RemoveRange(Context.Series.ToList());
+ Context.Users.RemoveRange(Context.Users.ToList());
+ Context.AppUserBookmark.RemoveRange(Context.AppUserBookmark.ToList());
- connection.Open();
-
- return connection;
- }
-
- private async Task SeedDb()
- {
- await _context.Database.MigrateAsync();
- var filesystem = CreateFileSystem();
-
- await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem));
-
- var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
- setting.Value = CacheDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
- setting.Value = BackupDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
- setting.Value = BookmarkDirectory;
-
- setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
- setting.Value = "10";
-
- _context.ServerSetting.Update(setting);
-
- _context.Library.Add(new Library()
- {
- Name = "Manga",
- Folders = new List()
- {
- new FolderPath()
- {
- Path = "C:/data/"
- }
- }
- });
- return await _context.SaveChangesAsync() > 0;
- }
-
- private async Task ResetDB()
- {
- _context.Series.RemoveRange(_context.Series.ToList());
- _context.Users.RemoveRange(_context.Users.ToList());
- _context.AppUserBookmark.RemoveRange(_context.AppUserBookmark.ToList());
-
- await _context.SaveChangesAsync();
- }
-
- private static MockFileSystem CreateFileSystem()
- {
- var fileSystem = new MockFileSystem();
- fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
- fileSystem.AddDirectory("C:/kavita/config/");
- fileSystem.AddDirectory(CacheDirectory);
- fileSystem.AddDirectory(CoverImageDirectory);
- fileSystem.AddDirectory(BackupDirectory);
- fileSystem.AddDirectory(BookmarkDirectory);
- fileSystem.AddDirectory("C:/data/");
-
- return fileSystem;
+ await Context.SaveChangesAsync();
}
#endregion
-
#region DeleteSeriesCoverImages
[Fact]
@@ -141,23 +63,23 @@ public class CleanupServiceTests
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetSeriesFormat(1000)}.jpg", new MockFileData(""));
// Delete all Series to reset state
- await ResetDB();
+ await ResetDb();
- var s = DbFactory.Series("Test 1");
+ var s = new SeriesBuilder("Test 1").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
s.LibraryId = 1;
- _context.Series.Add(s);
- s = DbFactory.Series("Test 2");
+ Context.Series.Add(s);
+ s = new SeriesBuilder("Test 2").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
s.LibraryId = 1;
- _context.Series.Add(s);
- s = DbFactory.Series("Test 3");
+ Context.Series.Add(s);
+ s = new SeriesBuilder("Test 3").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(1000)}.jpg";
s.LibraryId = 1;
- _context.Series.Add(s);
+ Context.Series.Add(s);
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.DeleteSeriesCoverImages();
@@ -174,22 +96,22 @@ public class CleanupServiceTests
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetSeriesFormat(1000)}.jpg", new MockFileData(""));
// Delete all Series to reset state
- await ResetDB();
+ await ResetDb();
// Add 2 series with cover images
- var s = DbFactory.Series("Test 1");
+ var s = new SeriesBuilder("Test 1").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
s.LibraryId = 1;
- _context.Series.Add(s);
- s = DbFactory.Series("Test 2");
+ Context.Series.Add(s);
+ s = new SeriesBuilder("Test 2").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
s.LibraryId = 1;
- _context.Series.Add(s);
+ Context.Series.Add(s);
- await _context.SaveChangesAsync();
+ await Context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.DeleteSeriesCoverImages();
@@ -208,37 +130,31 @@ public class CleanupServiceTests
filesystem.AddFile($"{CoverImageDirectory}v01_c1000.jpg", new MockFileData(""));
// Delete all Series to reset state
- await ResetDB();
+ await ResetDb();
// Add 2 series with cover images
- var s = DbFactory.Series("Test 1");
- var v = DbFactory.Volume("1");
- v.Chapters.Add(new Chapter()
- {
- CoverImage = "v01_c01.jpg"
- });
- v.CoverImage = "v01_c01.jpg";
- s.Volumes.Add(v);
- s.CoverImage = "series_01.jpg";
- s.LibraryId = 1;
- _context.Series.Add(s);
+ Context.Series.Add(new SeriesBuilder("Test 1")
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c01.jpg").Build())
+ .WithCoverImage("v01_c01.jpg")
+ .Build())
+ .WithCoverImage("series_01.jpg")
+ .WithLibraryId(1)
+ .Build());
- s = DbFactory.Series("Test 2");
- v = DbFactory.Volume("1");
- v.Chapters.Add(new Chapter()
- {
- CoverImage = "v01_c03.jpg"
- });
- v.CoverImage = "v01_c03jpg";
- s.Volumes.Add(v);
- s.CoverImage = "series_03.jpg";
- s.LibraryId = 1;
- _context.Series.Add(s);
+ Context.Series.Add(new SeriesBuilder("Test 2")
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c03.jpg").Build())
+ .WithCoverImage("v01_c03.jpg")
+ .Build())
+ .WithCoverImage("series_03.jpg")
+ .WithLibraryId(1)
+ .Build());
- await _context.SaveChangesAsync();
+ await Context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.DeleteChapterCoverImages();
@@ -247,54 +163,53 @@ public class CleanupServiceTests
}
#endregion
- #region DeleteTagCoverImages
-
- [Fact]
- public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
- {
- var filesystem = CreateFileSystem();
- filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
- filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
- filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
-
- // Delete all Series to reset state
- await ResetDB();
-
- // Add 2 series with cover images
- var s = DbFactory.Series("Test 1");
- s.Metadata.CollectionTags = new List();
- s.Metadata.CollectionTags.Add(new CollectionTag()
- {
- Title = "Something",
- CoverImage = $"{ImageService.GetCollectionTagFormat(1)}.jpg"
- });
- s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
- s.LibraryId = 1;
- _context.Series.Add(s);
-
- s = DbFactory.Series("Test 2");
- s.Metadata.CollectionTags = new List();
- s.Metadata.CollectionTags.Add(new CollectionTag()
- {
- Title = "Something 2",
- CoverImage = $"{ImageService.GetCollectionTagFormat(2)}.jpg"
- });
- s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
- s.LibraryId = 1;
- _context.Series.Add(s);
-
-
- await _context.SaveChangesAsync();
- var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
- ds);
-
- await cleanupService.DeleteTagCoverImages();
-
- Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
- }
-
- #endregion
+ // #region DeleteTagCoverImages
+ //
+ // [Fact]
+ // public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
+ // {
+ // var filesystem = CreateFileSystem();
+ // filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
+ // filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
+ // filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
+ //
+ // // Delete all Series to reset state
+ // await ResetDb();
+ //
+ // // Add 2 series with cover images
+ //
+ // _context.Series.Add(new SeriesBuilder("Test 1")
+ // .WithMetadata(new SeriesMetadataBuilder()
+ // .WithCollectionTag(new AppUserCollectionBuilder("Something")
+ // .WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
+ // .Build())
+ // .Build())
+ // .WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
+ // .WithLibraryId(1)
+ // .Build());
+ //
+ // _context.Series.Add(new SeriesBuilder("Test 2")
+ // .WithMetadata(new SeriesMetadataBuilder()
+ // .WithCollectionTag(new AppUserCollectionBuilder("Something")
+ // .WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
+ // .Build())
+ // .Build())
+ // .WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
+ // .WithLibraryId(1)
+ // .Build());
+ //
+ //
+ // await _context.SaveChangesAsync();
+ // var ds = new DirectoryService(Substitute.For>(), filesystem);
+ // var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ // ds);
+ //
+ // await cleanupService.DeleteTagCoverImages();
+ //
+ // Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
+ // }
+ //
+ // #endregion
#region DeleteReadingListCoverImages
[Fact]
@@ -306,31 +221,27 @@ public class CleanupServiceTests
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetReadingListFormat(3)}.jpg", new MockFileData(""));
// Delete all Series to reset state
- await ResetDB();
+ await ResetDb();
- _context.Users.Add(new AppUser()
+ Context.Users.Add(new AppUser()
{
UserName = "Joe",
ReadingLists = new List()
{
- new ReadingList()
- {
- Title = "Something",
- NormalizedTitle = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something"),
- CoverImage = $"{ImageService.GetReadingListFormat(1)}.jpg"
- },
- new ReadingList()
- {
- Title = "Something 2",
- NormalizedTitle = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Something 2"),
- CoverImage = $"{ImageService.GetReadingListFormat(2)}.jpg"
- }
+ new ReadingListBuilder("Something")
+ .WithRating(AgeRating.Unknown)
+ .WithCoverImage($"{ImageService.GetReadingListFormat(1)}.jpg")
+ .Build(),
+ new ReadingListBuilder("Something 2")
+ .WithRating(AgeRating.Unknown)
+ .WithCoverImage($"{ImageService.GetReadingListFormat(2)}.jpg")
+ .Build(),
}
});
- await _context.SaveChangesAsync();
+ await Context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.DeleteReadingListCoverImages();
@@ -349,7 +260,7 @@ public class CleanupServiceTests
filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
cleanupService.CleanupCacheAndTempDirectories();
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
@@ -363,7 +274,7 @@ public class CleanupServiceTests
filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
cleanupService.CleanupCacheAndTempDirectories();
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
@@ -386,7 +297,7 @@ public class CleanupServiceTests
filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile);
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.CleanupBackups();
Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories));
@@ -408,7 +319,7 @@ public class CleanupServiceTests
});
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.CleanupBackups();
Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip"));
@@ -432,7 +343,7 @@ public class CleanupServiceTests
}
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.CleanupLogs();
Assert.Single(ds.GetFiles(LogDirectory, searchOption: SearchOption.AllDirectories));
@@ -461,7 +372,7 @@ public class CleanupServiceTests
var ds = new DirectoryService(Substitute.For>(), filesystem);
- var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
+ var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
await cleanupService.CleanupLogs();
Assert.True(filesystem.File.Exists($"{LogDirectory}kavita20200911.log"));
@@ -469,7 +380,275 @@ public class CleanupServiceTests
#endregion
- // #region CleanupBookmarks
+ #region CleanupDbEntries
+
+ [Fact]
+ public async Task CleanupDbEntries_CleanupAbandonedChapters()
+ {
+ var c = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
+ .WithPages(1)
+ .Build();
+ var series = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
+ .WithChapter(c)
+ .Build())
+ .Build();
+ series.Library = new LibraryBuilder("Test LIb").Build();
+
+ Context.Series.Add(series);
+
+
+ Context.AppUser.Add(new AppUser()
+ {
+ UserName = "majora2007"
+ });
+
+ await Context.SaveChangesAsync();
+
+ var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
+ await _readerService.MarkChaptersUntilAsRead(user, 1, 5);
+ await Context.SaveChangesAsync();
+
+ // Validate correct chapters have read status
+ Assert.Equal(1, (await UnitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
+
+ var cleanupService = new CleanupService(Substitute.For>(), UnitOfWork,
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()));
+
+ // Delete the Chapter
+ Context.Chapter.Remove(c);
+ await UnitOfWork.CommitAsync();
+ Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
+
+ // NOTE: This may not be needed, the underlying DB structure seems fixed as of v0.7
+ await cleanupService.CleanupDbEntries();
+
+ Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
+ }
+
+ [Fact]
+ public async Task CleanupDbEntries_RemoveTagsWithoutSeries()
+ {
+ var s = new SeriesBuilder("Test")
+ .WithFormat(MangaFormat.Epub)
+ .WithMetadata(new SeriesMetadataBuilder().Build())
+ .Build();
+ s.Library = new LibraryBuilder("Test LIb").Build();
+ Context.Series.Add(s);
+
+ var c = new AppUserCollection()
+ {
+ Title = "Test Tag",
+ NormalizedTitle = "Test Tag".ToNormalized(),
+ AgeRating = AgeRating.Unknown,
+ Items = new List() {s}
+ };
+
+ Context.AppUser.Add(new AppUser()
+ {
+ UserName = "majora2007",
+ Collections = new List() {c}
+ });
+ await Context.SaveChangesAsync();
+
+ var cleanupService = new CleanupService(Substitute.For>(), UnitOfWork,
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()));
+
+ // Delete the Chapter
+ Context.Series.Remove(s);
+ await UnitOfWork.CommitAsync();
+
+ await cleanupService.CleanupDbEntries();
+
+ Assert.Empty(await UnitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
+ }
+
+ #endregion
+
+ #region CleanupWantToRead
+
+ [Fact]
+ public async Task CleanupWantToRead_ShouldRemoveFullyReadSeries()
+ {
+ await ResetDb();
+
+ var s = new SeriesBuilder("Test CleanupWantToRead_ShouldRemoveFullyReadSeries")
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Completed).Build())
+ .Build();
+
+ s.Library = new LibraryBuilder("Test LIb").Build();
+ Context.Series.Add(s);
+
+ var user = new AppUser()
+ {
+ UserName = "CleanupWantToRead_ShouldRemoveFullyReadSeries",
+ };
+ Context.AppUser.Add(user);
+
+ await UnitOfWork.CommitAsync();
+
+ // Add want to read
+ user.WantToRead = new List()
+ {
+ new AppUserWantToRead()
+ {
+ SeriesId = s.Id
+ }
+ };
+ await UnitOfWork.CommitAsync();
+
+ await _readerService.MarkSeriesAsRead(user, s.Id);
+ await UnitOfWork.CommitAsync();
+
+ var cleanupService = new CleanupService(Substitute.For>(), UnitOfWork,
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()));
+
+
+ await cleanupService.CleanupWantToRead();
+
+ var wantToRead =
+ await UnitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
+
+ Assert.Equal(0, wantToRead.TotalCount);
+ }
+ #endregion
+
+ #region ConsolidateProgress
+
+ [Fact]
+ public async Task ConsolidateProgress_ShouldRemoveDuplicates()
+ {
+ await ResetDb();
+
+ var s = new SeriesBuilder("Test ConsolidateProgress_ShouldRemoveDuplicates")
+ .WithVolume(new VolumeBuilder("1")
+ .WithChapter(new ChapterBuilder("1")
+ .WithPages(3)
+ .Build())
+ .Build())
+ .Build();
+
+ s.Library = new LibraryBuilder("Test Lib").Build();
+ Context.Series.Add(s);
+
+ var user = new AppUser()
+ {
+ UserName = "ConsolidateProgress_ShouldRemoveDuplicates",
+ };
+ Context.AppUser.Add(user);
+
+ await UnitOfWork.CommitAsync();
+
+ // Add 2 progress events
+ user.Progresses ??= [];
+ user.Progresses.Add(new AppUserProgress()
+ {
+ ChapterId = 1,
+ VolumeId = 1,
+ SeriesId = 1,
+ LibraryId = s.LibraryId,
+ PagesRead = 1,
+ });
+ await UnitOfWork.CommitAsync();
+
+ // Add a duplicate with higher page number
+ user.Progresses.Add(new AppUserProgress()
+ {
+ ChapterId = 1,
+ VolumeId = 1,
+ SeriesId = 1,
+ LibraryId = s.LibraryId,
+ PagesRead = 3,
+ });
+ await UnitOfWork.CommitAsync();
+
+ Assert.Equal(2, (await UnitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
+
+ var cleanupService = new CleanupService(Substitute.For>(), UnitOfWork,
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()));
+
+
+ await cleanupService.ConsolidateProgress();
+
+ var progress = await UnitOfWork.AppUserProgressRepository.GetAllProgress();
+
+ Assert.Single(progress);
+ Assert.True(progress.First().PagesRead == 3);
+ }
+ #endregion
+
+
+ #region EnsureChapterProgressIsCapped
+
+ [Fact]
+ public async Task EnsureChapterProgressIsCapped_ShouldNormalizeProgress()
+ {
+ await ResetDb();
+
+ var s = new SeriesBuilder("Test CleanupWantToRead_ShouldRemoveFullyReadSeries")
+ .WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Completed).Build())
+ .Build();
+
+ s.Library = new LibraryBuilder("Test LIb").Build();
+ var c = new ChapterBuilder("1").WithPages(2).Build();
+ c.UserProgress = new List();
+ s.Volumes = new List()
+ {
+ new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume).WithChapter(c).Build()
+ };
+ Context.Series.Add(s);
+
+ var user = new AppUser()
+ {
+ UserName = "EnsureChapterProgressIsCapped",
+ Progresses = new List()
+ };
+ Context.AppUser.Add(user);
+
+ await UnitOfWork.CommitAsync();
+
+ await _readerService.MarkChaptersAsRead(user, s.Id, new List() {c});
+ await UnitOfWork.CommitAsync();
+
+ var chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
+ await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
+
+ Assert.NotNull(chapter);
+ Assert.Equal(2, chapter.PagesRead);
+
+ // Update chapter to have 1 page
+ c.Pages = 1;
+ UnitOfWork.ChapterRepository.Update(c);
+ await UnitOfWork.CommitAsync();
+
+ chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
+ await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
+ Assert.NotNull(chapter);
+ Assert.Equal(2, chapter.PagesRead);
+ Assert.Equal(1, chapter.Pages);
+
+ var cleanupService = new CleanupService(Substitute.For>(), UnitOfWork,
+ Substitute.For(),
+ new DirectoryService(Substitute.For>(), new MockFileSystem()));
+
+ await cleanupService.EnsureChapterProgressIsCapped();
+ chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
+ await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
+
+ Assert.NotNull(chapter);
+ Assert.Equal(1, chapter.PagesRead);
+
+ Context.AppUser.Remove(user);
+ await UnitOfWork.CommitAsync();
+ }
+ #endregion
+
+ #region CleanupBookmarks
//
// [Fact]
// public async Task CleanupBookmarks_LeaveAllFiles()
@@ -479,7 +658,7 @@ public class CleanupServiceTests
// filesystem.AddFile($"{BookmarkDirectory}1/1/1/0002.jpg", new MockFileData(""));
//
// // Delete all Series to reset state
- // await ResetDB();
+ // await ResetDb();
//
// _context.Series.Add(new Series()
// {
@@ -551,7 +730,7 @@ public class CleanupServiceTests
// filesystem.AddFile($"{BookmarkDirectory}1/1/2/0002.jpg", new MockFileData(""));
//
// // Delete all Series to reset state
- // await ResetDB();
+ // await ResetDb();
//
// _context.Series.Add(new Series()
// {
@@ -606,5 +785,5 @@ public class CleanupServiceTests
// Assert.Equal(1, ds.FileSystem.Directory.GetDirectories($"{BookmarkDirectory}1/1/").Length);
// }
//
- // #endregion
+ #endregion
}
diff --git a/API.Tests/Services/CollectionTagServiceTests.cs b/API.Tests/Services/CollectionTagServiceTests.cs
new file mode 100644
index 000000000..3414dd86b
--- /dev/null
+++ b/API.Tests/Services/CollectionTagServiceTests.cs
@@ -0,0 +1,529 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using API.Constants;
+using API.Data;
+using API.Data.Repositories;
+using API.DTOs.Collection;
+using API.Entities;
+using API.Entities.Enums;
+using API.Helpers.Builders;
+using API.Services;
+using API.Services.Plus;
+using API.SignalR;
+using Kavita.Common;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Services;
+
+public class CollectionTagServiceTests : AbstractDbTest
+{
+ private readonly ICollectionTagService _service;
+ public CollectionTagServiceTests()
+ {
+ _service = new CollectionTagService(UnitOfWork, Substitute.For());
+ }
+
+ protected override async Task ResetDb()
+ {
+ Context.AppUserCollection.RemoveRange(Context.AppUserCollection.ToList());
+ Context.Library.RemoveRange(Context.Library.ToList());
+
+ await UnitOfWork.CommitAsync();
+ }
+
+ private async Task SeedSeries()
+ {
+ if (Context.AppUserCollection.Any()) return;
+
+ var s1 = new SeriesBuilder("Series 1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build()).Build();
+ var s2 = new SeriesBuilder("Series 2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build()).Build();
+ Context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
+ .WithSeries(s1)
+ .WithSeries(s2)
+ .Build());
+
+ var user = new AppUserBuilder("majora2007", "majora2007", Seed.DefaultThemes.First()).Build();
+ user.Collections = new List()
+ {
+ new AppUserCollectionBuilder("Tag 1").WithItems(new []{s1}).Build(),
+ new AppUserCollectionBuilder("Tag 2").WithItems(new []{s1, s2}).WithIsPromoted(true).Build()
+ };
+ UnitOfWork.UserRepository.Add(user);
+
+ await UnitOfWork.CommitAsync();
+ }
+
+ #region DeleteTag
+
+ [Fact]
+ public async Task DeleteTag_ShouldDeleteTag_WhenTagExists()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act
+ var result = await _service.DeleteTag(1, user);
+
+ // Assert
+ Assert.True(result);
+ var deletedTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.Null(deletedTag);
+ Assert.Single(user.Collections); // Only one collection should remain
+ }
+
+ [Fact]
+ public async Task DeleteTag_ShouldReturnTrue_WhenTagDoesNotExist()
+ {
+ // Arrange
+ await SeedSeries();
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act - Try to delete a non-existent tag
+ var result = await _service.DeleteTag(999, user);
+
+ // Assert
+ Assert.True(result); // Should return true because the tag is already "deleted"
+ Assert.Equal(2, user.Collections.Count); // Both collections should remain
+ }
+
+ [Fact]
+ public async Task DeleteTag_ShouldNotAffectOtherTags()
+ {
+ // Arrange
+ await SeedSeries();
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act
+ var result = await _service.DeleteTag(1, user);
+
+ // Assert
+ Assert.True(result);
+ var remainingTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(remainingTag);
+ Assert.Equal("Tag 2", remainingTag.Title);
+ Assert.True(remainingTag.Promoted);
+ }
+
+ #endregion
+
+ #region UpdateTag
+
+ [Fact]
+ public async Task UpdateTag_ShouldUpdateFields()
+ {
+ await SeedSeries();
+
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldUpdateFields").WithIsPromoted(true).Build());
+ UnitOfWork.UserRepository.Update(user);
+ await UnitOfWork.CommitAsync();
+
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "UpdateTag_ShouldUpdateFields",
+ Id = 3,
+ Promoted = true,
+ Summary = "Test Summary",
+ AgeRating = AgeRating.Unknown
+ }, 1);
+
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
+ Assert.NotNull(tag);
+ Assert.True(tag.Promoted);
+ Assert.False(string.IsNullOrEmpty(tag.Summary));
+ }
+
+ ///
+ /// UpdateTag should not change any title if non-Kavita source
+ ///
+ [Fact]
+ public async Task UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource()
+ {
+ await SeedSeries();
+
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource").WithSource(ScrobbleProvider.Mal).Build());
+ UnitOfWork.UserRepository.Update(user);
+ await UnitOfWork.CommitAsync();
+
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "New Title",
+ Id = 3,
+ Promoted = true,
+ Summary = "Test Summary",
+ AgeRating = AgeRating.Unknown
+ }, 1);
+
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
+ Assert.NotNull(tag);
+ Assert.Equal("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource", tag.Title);
+ Assert.False(string.IsNullOrEmpty(tag.Summary));
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldThrowException_WhenTagDoesNotExist()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Non-existent Tag",
+ Id = 999, // Non-existent ID
+ Promoted = false
+ }, 1));
+
+ Assert.Equal("collection-doesnt-exist", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldThrowException_WhenUserDoesNotOwnTag()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Create a second user
+ var user2 = new AppUserBuilder("user2", "user2", Seed.DefaultThemes.First()).Build();
+ UnitOfWork.UserRepository.Add(user2);
+ await UnitOfWork.CommitAsync();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1, // This belongs to user1
+ Promoted = false
+ }, 2)); // User with ID 2
+
+ Assert.Equal("access-denied", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldThrowException_WhenTitleIsEmpty()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = " ", // Empty after trimming
+ Id = 1,
+ Promoted = false
+ }, 1));
+
+ Assert.Equal("collection-tag-title-required", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldThrowException_WhenTitleAlreadyExists()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(() => _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 2", // Already exists
+ Id = 1, // Trying to rename Tag 1 to Tag 2
+ Promoted = false
+ }, 1));
+
+ Assert.Equal("collection-tag-duplicate", exception.Message);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldUpdateCoverImageSettings()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Act
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ CoverImageLocked = true
+ }, 1);
+
+ // Assert
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.True(tag.CoverImageLocked);
+
+ // Now test unlocking the cover image
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ CoverImageLocked = false
+ }, 1);
+
+ tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.False(tag.CoverImageLocked);
+ Assert.Equal(string.Empty, tag.CoverImage);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldAllowPromoteForAdminRole()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Setup a user with admin role
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+ await AddUserWithRole(user.Id, PolicyConstants.AdminRole);
+
+
+ // Act - Try to promote a tag that wasn't previously promoted
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ Promoted = true
+ }, 1);
+
+ // Assert
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.True(tag.Promoted);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldAllowPromoteForPromoteRole()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Setup a user with promote role
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Mock to return promote role for the user
+ await AddUserWithRole(user.Id, PolicyConstants.PromoteRole);
+
+ // Act - Try to promote a tag that wasn't previously promoted
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ Promoted = true
+ }, 1);
+
+ // Assert
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.True(tag.Promoted);
+ }
+
+ [Fact]
+ public async Task UpdateTag_ShouldNotChangePromotion_WhenUserHasNoPermission()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Setup a user with no special roles
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Act - Try to promote a tag without proper role
+ await _service.UpdateTag(new AppUserCollectionDto()
+ {
+ Title = "Tag 1",
+ Id = 1,
+ Promoted = true
+ }, 1);
+
+ // Assert
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.False(tag.Promoted); // Should remain unpromoted
+ }
+ #endregion
+
+
+ #region RemoveTagFromSeries
+
+ [Fact]
+ public async Task RemoveTagFromSeries_RemoveSeriesFromTag()
+ {
+ await SeedSeries();
+
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Tag 2 has 2 series
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+
+ await _service.RemoveTagFromSeries(tag, new[] {1});
+ var userCollections = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.Equal(2, userCollections!.Collections.Count);
+ Assert.Single(tag.Items);
+ Assert.Equal(2, tag.Items.First().Id);
+ }
+
+ ///
+ /// Ensure the rating of the tag updates after a series change
+ ///
+ [Fact]
+ public async Task RemoveTagFromSeries_RemoveSeriesFromTag_UpdatesRating()
+ {
+ await SeedSeries();
+
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Tag 2 has 2 series
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+
+ await _service.RemoveTagFromSeries(tag, new[] {1});
+
+ Assert.Equal(AgeRating.G, tag.AgeRating);
+ }
+
+ ///
+ /// Should remove the tag when there are no items left on the tag
+ ///
+ [Fact]
+ public async Task RemoveTagFromSeries_RemoveSeriesFromTag_DeleteTagWhenNoSeriesLeft()
+ {
+ await SeedSeries();
+
+ var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
+ Assert.NotNull(user);
+
+ // Tag 1 has 1 series
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+
+ await _service.RemoveTagFromSeries(tag, new[] {1});
+ var tag2 = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.Null(tag2);
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldReturnFalse_WhenTagIsNull()
+ {
+ // Act
+ var result = await _service.RemoveTagFromSeries(null, [1]);
+
+ // Assert
+ Assert.False(result);
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldHandleEmptySeriesIdsList()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ var initialItemCount = tag.Items.Count;
+
+ // Act
+ var result = await _service.RemoveTagFromSeries(tag, Array.Empty());
+
+ // Assert
+ Assert.True(result);
+ tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldHandleNonExistentSeriesIds()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ var initialItemCount = tag.Items.Count;
+
+ // Act - Try to remove a series that doesn't exist in the tag
+ var result = await _service.RemoveTagFromSeries(tag, [999]);
+
+ // Assert
+ Assert.True(result);
+ tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+ Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldHandleNullItemsList()
+ {
+ // Arrange
+ await SeedSeries();
+
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.NotNull(tag);
+
+ // Force null items list
+ tag.Items = null;
+ UnitOfWork.CollectionTagRepository.Update(tag);
+ await UnitOfWork.CommitAsync();
+
+ // Act
+ var result = await _service.RemoveTagFromSeries(tag, [1]);
+
+ // Assert
+ Assert.True(result);
+ // The tag should not be removed since the items list was null, not empty
+ var tagAfter = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
+ Assert.Null(tagAfter);
+ }
+
+ [Fact]
+ public async Task RemoveTagFromSeries_ShouldUpdateAgeRating_WhenMultipleSeriesRemain()
+ {
+ // Arrange
+ await SeedSeries();
+
+ // Add a third series with a different age rating
+ var s3 = new SeriesBuilder("Series 3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.PG).Build()).Build();
+ Context.Library.First().Series.Add(s3);
+ await UnitOfWork.CommitAsync();
+
+ // Add series 3 to tag 2
+ var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+ tag.Items.Add(s3);
+ UnitOfWork.CollectionTagRepository.Update(tag);
+ await UnitOfWork.CommitAsync();
+
+ // Act - Remove the series with Mature rating
+ await _service.RemoveTagFromSeries(tag, new[] {1});
+
+ // Assert
+ tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
+ Assert.NotNull(tag);
+ Assert.Equal(2, tag.Items.Count);
+
+ // The age rating should be updated to the highest remaining rating (PG)
+ Assert.Equal(AgeRating.PG, tag.AgeRating);
+ }
+
+
+ #endregion
+
+}
diff --git a/API.Tests/Services/CoverDbServiceTests.cs b/API.Tests/Services/CoverDbServiceTests.cs
new file mode 100644
index 000000000..93217c3b5
--- /dev/null
+++ b/API.Tests/Services/CoverDbServiceTests.cs
@@ -0,0 +1,117 @@
+using System.IO;
+using System.IO.Abstractions;
+using System.Reflection;
+using System.Threading.Tasks;
+using API.Constants;
+using API.Entities.Enums;
+using API.Extensions;
+using API.Services;
+using API.Services.Tasks.Metadata;
+using API.SignalR;
+using EasyCaching.Core;
+using Kavita.Common;
+using Microsoft.Extensions.Hosting;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
+using Xunit;
+
+namespace API.Tests.Services;
+
+public class CoverDbServiceTests : AbstractDbTest
+{
+ private readonly DirectoryService _directoryService;
+ private readonly IEasyCachingProviderFactory _cacheFactory = Substitute.For();
+ private readonly ICoverDbService _coverDbService;
+
+ private static readonly string FaviconPath = Path.Join(Directory.GetCurrentDirectory(),
+ "../../../Services/Test Data/CoverDbService/Favicons");
+ ///
+ /// Path to download files temp to. Should be empty after each test.
+ ///
+ private static readonly string TempPath = Path.Join(Directory.GetCurrentDirectory(),
+ "../../../Services/Test Data/CoverDbService/Temp");
+
+ public CoverDbServiceTests()
+ {
+ _directoryService = new DirectoryService(Substitute.For>(), CreateFileSystem());
+ var imageService = new ImageService(Substitute.For>(), _directoryService);
+
+ _coverDbService = new CoverDbService(Substitute.For>(), _directoryService, _cacheFactory,
+ Substitute.For(), imageService, UnitOfWork, Substitute.For());
+ }
+
+ protected override Task ResetDb()
+ {
+ throw new System.NotImplementedException();
+ }
+
+
+ #region Download Favicon
+
+ ///
+ /// I cannot figure out how to test this code due to the reliance on the _directoryService.FaviconDirectory and not being
+ /// able to redirect it to the real filesystem.
+ ///
+ public async Task DownloadFaviconAsync_ShouldDownloadAndMatchExpectedFavicon()
+ {
+ // Arrange
+ var testUrl = "https://anilist.co/anime/6205/Kmpfer/";
+ var encodeFormat = EncodeFormat.WEBP;
+ var expectedFaviconPath = Path.Combine(FaviconPath, "anilist.co.webp");
+
+ // Ensure TempPath exists
+ _directoryService.ExistOrCreate(TempPath);
+
+ var baseUrl = "https://anilist.co";
+
+ // Ensure there is no cache result for this URL
+ var provider = Substitute.For();
+ provider.GetAsync(baseUrl).Returns(new CacheValue(null, false));
+ _cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
+
+
+ // // Replace favicon directory with TempPath
+ // var directoryService = (DirectoryService)_directoryService;
+ // directoryService.FaviconDirectory = TempPath;
+
+ // Hack: Swap FaviconDirectory with TempPath for ability to download real files
+ typeof(DirectoryService)
+ .GetField("FaviconDirectory", BindingFlags.NonPublic | BindingFlags.Instance)
+ ?.SetValue(_directoryService, TempPath);
+
+
+ // Act
+ var resultFilename = await _coverDbService.DownloadFaviconAsync(testUrl, encodeFormat);
+ var actualFaviconPath = Path.Combine(TempPath, resultFilename);
+
+ // Assert file exists
+ Assert.True(File.Exists(actualFaviconPath), "Downloaded favicon does not exist in temp path");
+
+ // Load and compare similarity
+
+ var similarity = expectedFaviconPath.CalculateSimilarity(actualFaviconPath); // Assuming you have this extension
+ Assert.True(similarity > 0.9f, $"Image similarity too low: {similarity}");
+ }
+
+ [Fact]
+ public async Task DownloadFaviconAsync_ShouldThrowKavitaException_WhenPreviouslyFailedUrlExistsInCache()
+ {
+ // Arrange
+ var testUrl = "https://example.com";
+ var encodeFormat = EncodeFormat.WEBP;
+
+ var provider = Substitute.For();
+ provider.GetAsync(Arg.Any())
+ .Returns(new CacheValue(string.Empty, true)); // Simulate previous failure
+
+ _cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
+
+ // Act & Assert
+ await Assert.ThrowsAsync(() =>
+ _coverDbService.DownloadFaviconAsync(testUrl, encodeFormat));
+ }
+
+ #endregion
+
+
+}
diff --git a/API.Tests/Services/DeviceServiceTests.cs b/API.Tests/Services/DeviceServiceTests.cs
index 717f3e98b..cbcf70f82 100644
--- a/API.Tests/Services/DeviceServiceTests.cs
+++ b/API.Tests/Services/DeviceServiceTests.cs
@@ -5,27 +5,26 @@ using API.DTOs.Device;
using API.Entities;
using API.Entities.Enums.Device;
using API.Services;
-using API.Services.Tasks;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
-public class DeviceServiceTests : BasicTest
+public class DeviceServiceDbTests : AbstractDbTest
{
private readonly ILogger _logger = Substitute.For>();
private readonly IDeviceService _deviceService;
- public DeviceServiceTests() : base()
+ public DeviceServiceDbTests() : base()
{
- _deviceService = new DeviceService(_unitOfWork, _logger, Substitute.For());
+ _deviceService = new DeviceService(UnitOfWork, _logger, Substitute.For());
}
- protected new Task ResetDb()
+ protected override async Task ResetDb()
{
- _context.Users.RemoveRange(_context.Users.ToList());
- return Task.CompletedTask;
+ Context.Users.RemoveRange(Context.Users.ToList());
+ await UnitOfWork.CommitAsync();
}
@@ -40,8 +39,8 @@ public class DeviceServiceTests : BasicTest
Devices = new List()
};
- _context.Users.Add(user);
- await _unitOfWork.CommitAsync();
+ Context.Users.Add(user);
+ await UnitOfWork.CommitAsync();
var device = await _deviceService.Create(new CreateDeviceDto()
{
@@ -51,7 +50,6 @@ public class DeviceServiceTests : BasicTest
}, user);
Assert.NotNull(device);
-
}
[Fact]
@@ -64,8 +62,8 @@ public class DeviceServiceTests : BasicTest
Devices = new List()
};
- _context.Users.Add(user);
- await _unitOfWork.CommitAsync();
+ Context.Users.Add(user);
+ await UnitOfWork.CommitAsync();
var device = await _deviceService.Create(new CreateDeviceDto()
{
diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs
index 134dc2361..c5216bebf 100644
--- a/API.Tests/Services/DirectoryServiceTests.cs
+++ b/API.Tests/Services/DirectoryServiceTests.cs
@@ -1,20 +1,30 @@
using System;
using System.Collections.Generic;
+using System.Globalization;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
+using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
using API.Services;
+using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
+using Xunit.Abstractions;
namespace API.Tests.Services;
-public class DirectoryServiceTests
+public class DirectoryServiceTests: AbstractFsTest
{
private readonly ILogger _logger = Substitute.For>();
+ private readonly ITestOutputHelper _testOutputHelper;
+
+ public DirectoryServiceTests(ITestOutputHelper testOutputHelper)
+ {
+ _testOutputHelper = testOutputHelper;
+ }
#region TraverseTreeParallelForEach
@@ -61,13 +71,13 @@ public class DirectoryServiceTests
API.Services.Tasks.Scanner.Parser.Parser.ImageFileExtensions, _logger);
Assert.Equal(1, fileCount);
}
- catch (Exception ex)
+ catch
{
Assert.False(true);
}
- Assert.Equal(1, files.Count);
+ Assert.Single(files);
}
@@ -75,7 +85,7 @@ public class DirectoryServiceTests
[Fact]
public void TraverseTreeParallelForEach_DontCountExcludedDirectories_ShouldBe28()
{
- var testDirectory = "/manga/";
+ const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 28; i++)
{
@@ -85,6 +95,7 @@ public class DirectoryServiceTests
fileSystem.AddFile($"{Path.Join(testDirectory, "@eaDir")}file_{29}.jpg", new MockFileData(""));
fileSystem.AddFile($"{Path.Join(testDirectory, ".DS_Store")}file_{30}.jpg", new MockFileData(""));
fileSystem.AddFile($"{Path.Join(testDirectory, ".qpkg")}file_{30}.jpg", new MockFileData(""));
+ fileSystem.AddFile($"{Path.Join(testDirectory, ".@_thumb")}file_{30}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For