Compare commits
550 commits
bugfix/web
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ef2640b5fc | ||
|
|
76fd7ab4ce | ||
|
|
08c52b4281 | ||
|
|
9eadf956fb | ||
|
|
eab3d7a207 | ||
|
|
1389eb6320 | ||
|
|
8deb96cf48 | ||
|
|
ff17908400 | ||
|
|
e5d949161e | ||
|
|
6d4e207b65 | ||
|
|
3ac816eaf7 | ||
|
|
d909e03baf | ||
|
|
4b9bbc5d78 | ||
|
|
994e5d4d83 | ||
|
|
9c485350a5 | ||
|
|
6fa1cf994e | ||
|
|
62231d3c4e | ||
|
|
d536cc7f6a | ||
|
|
36aa5f5c85 | ||
|
|
fa8d778c8d | ||
|
|
225572732f | ||
|
|
14a8f5c1e5 | ||
|
|
45e24aa311 | ||
|
|
55f94602d4 | ||
|
|
3107ca73e4 | ||
|
|
b6d004614a | ||
|
|
10280c5487 | ||
|
|
59e461fc96 | ||
|
|
c52ed1f65d | ||
|
|
00c4712fc3 | ||
|
|
5e54306fd0 | ||
|
|
fc4ba4509f | ||
|
|
3d68c30cac | ||
|
|
1856b01a46 | ||
|
|
ea28d64302 | ||
|
|
7d6c541be5 | ||
|
|
193e9b1da9 | ||
|
|
6288d89651 | ||
|
|
a751da4ca6 | ||
|
|
4396a07e7b | ||
|
|
d9ac52aa0f | ||
|
|
8ed2fa3829 | ||
|
|
66f78aa859 | ||
|
|
f88720ccfd | ||
|
|
70f00895e8 | ||
|
|
574cf4b78e | ||
|
|
005c1bf60b | ||
|
|
7ce36bfc44 | ||
|
|
cd2a6af6f2 | ||
|
|
7e4216d0bf | ||
|
|
5b8a643d82 | ||
|
|
50a052e412 | ||
|
|
b83df31701 | ||
|
|
bc41b0256e | ||
|
|
3a0d33ca13 | ||
|
|
5a540ba7ea | ||
|
|
4f7625ea77 | ||
|
|
3b8997e46e | ||
|
|
8a77ffb4f9 | ||
|
|
7b3198ed9c | ||
|
|
5c06e14a73 | ||
|
|
0ea1116d1d | ||
|
|
7ccf743f9f | ||
|
|
ed154e4768 | ||
|
|
4521965315 | ||
|
|
9ee5821cb2 | ||
|
|
06a6d9e03b | ||
|
|
9bb741a81b | ||
|
|
14bf4400a9 | ||
|
|
d3f8a503c2 | ||
|
|
a1264cb5e8 | ||
|
|
e76a7152db | ||
|
|
925ae83baf | ||
|
|
ad152aa26a | ||
|
|
00b759e532 | ||
|
|
5c5b0df814 | ||
|
|
45122f7bd8 | ||
|
|
85471b4de0 | ||
|
|
fa88177783 | ||
|
|
96fe0b9b3a | ||
|
|
ce16988490 | ||
|
|
33221fee2b | ||
|
|
55966f0b2a | ||
|
|
45481cafee | ||
|
|
cc4de12df8 | ||
|
|
4876f5ce7c | ||
|
|
4453482d93 | ||
|
|
67d7d8467e | ||
|
|
6b7462a790 | ||
|
|
ccb682ccc7 | ||
|
|
c80d046fc7 | ||
|
|
9d9938bce2 | ||
|
|
66d764f026 | ||
|
|
6396455dab | ||
|
|
93dc6534fc | ||
|
|
cbb97208b8 | ||
|
|
71a0a8756b | ||
|
|
d8c4b95089 | ||
|
|
37734554ba | ||
|
|
ea9b7ad0d1 | ||
|
|
412cecaf3c | ||
|
|
4abe2532bf | ||
|
|
dad212bfb9 | ||
|
|
b2ee651fb8 | ||
|
|
283936e618 | ||
|
|
5738014a88 | ||
|
|
da9f02f963 | ||
|
|
7a1541d229 | ||
|
|
cc3ae7f472 | ||
|
|
535165c445 | ||
|
|
9c12de42d5 | ||
|
|
de651215f5 | ||
|
|
b45d92ea5c | ||
|
|
b24e18c99e | ||
|
|
a17d39f25a | ||
|
|
1ad8a360cb | ||
|
|
a1d3aef39b | ||
|
|
acaf308c2e | ||
|
|
e1e00c8c47 | ||
|
|
4a6ea0a27d | ||
|
|
43d0d1277f | ||
|
|
9fb3bdd548 | ||
|
|
6bb03618fb | ||
|
|
4c68d2db26 | ||
|
|
6e72854d37 | ||
|
|
a7e1386bad | ||
|
|
0785d4afab | ||
|
|
912c8af450 | ||
|
|
b5d201e69b | ||
|
|
0f72f63b35 | ||
|
|
98a2b9d3ed | ||
|
|
e897fb9a12 | ||
|
|
ba17bf483d | ||
|
|
df11f5bac2 | ||
|
|
92afcf7a23 | ||
|
|
fb8bbcfa13 | ||
|
|
e93cab240e | ||
|
|
cb207557e6 | ||
|
|
4cb16be6a5 | ||
|
|
75419fb62b | ||
|
|
9b70da7926 | ||
|
|
f281a63934 | ||
|
|
82e8f7fade | ||
|
|
08d5c7e9da | ||
|
|
a6ccae5849 | ||
|
|
b644022f30 | ||
|
|
8f454a0282 | ||
|
|
78f5e240e2 | ||
|
|
60a48ff5f3 | ||
|
|
86a0a5bf27 | ||
|
|
ab540c0ea6 | ||
|
|
b4061e3711 | ||
|
|
1244e4263e | ||
|
|
5aa9699a99 | ||
|
|
e0248b8869 | ||
|
|
401fa3e611 | ||
|
|
56e84fc235 | ||
|
|
3085288fda | ||
|
|
292dc55809 | ||
|
|
41cc3df654 | ||
|
|
612cf51124 | ||
|
|
e2360d4ab4 | ||
|
|
a3cb5dd199 | ||
|
|
8578af9ccc | ||
|
|
80a713bac9 | ||
|
|
5f83c83b5b | ||
|
|
9ad394c43a | ||
|
|
4a4d59bc90 | ||
|
|
79b4487206 | ||
|
|
5af851af08 | ||
|
|
78a98d0d18 | ||
|
|
0ffe0228e5 | ||
|
|
b38400c092 | ||
|
|
83c63a7904 | ||
|
|
158c119247 | ||
|
|
9565fe7360 | ||
|
|
b858729c9e | ||
|
|
f76de42b28 | ||
|
|
56108eb373 | ||
|
|
39726f8c4d | ||
|
|
ea81a2f432 | ||
|
|
678c47479c | ||
|
|
4c44dbf3e2 | ||
|
|
c0b59d87a4 | ||
|
|
5d6a5f0987 | ||
|
|
6f3ba0948b | ||
|
|
53b13da0c9 | ||
|
|
8d3dcc637e | ||
|
|
bb9621a588 | ||
|
|
40bbdcb5f0 | ||
|
|
f4fd7230ea | ||
|
|
eb66763078 | ||
|
|
541607cc5d | ||
|
|
49b47a98fc | ||
|
|
a5707617f2 | ||
|
|
d880c1690c | ||
|
|
8c87523c62 | ||
|
|
b93bfa49b0 | ||
|
|
6ae9cbf4aa | ||
|
|
a8144a1d3e | ||
|
|
41c346d5e6 | ||
|
|
bfbcb4b741 | ||
|
|
0407d75d91 | ||
|
|
b24b8686f3 | ||
|
|
bcfb4a6172 | ||
|
|
a06126b7f9 | ||
|
|
62c1da1d22 | ||
|
|
fc7c49f5cc | ||
|
|
85d0d1f0f4 | ||
|
|
19b0163da7 | ||
|
|
ac47cbd75f | ||
|
|
d4028a8d68 | ||
|
|
4e6b98e87a | ||
|
|
67c67f4a48 | ||
|
|
9f5c054f63 | ||
|
|
349645bf32 | ||
|
|
17a5d0d583 | ||
|
|
3e3b6ba92b | ||
|
|
cb810a2d8f | ||
|
|
c849eff33e | ||
|
|
6a75291a67 | ||
|
|
06768bce3e | ||
|
|
7ffb1fb25e | ||
|
|
f630099651 | ||
|
|
7c4d7dc821 | ||
|
|
2d5a7a3606 | ||
|
|
f812f61001 | ||
|
|
dd3dec269f | ||
|
|
0e0d8dca5b | ||
|
|
e1aba57783 | ||
|
|
611a9f7873 | ||
|
|
3a0c796c08 | ||
|
|
aa939edf6d | ||
|
|
113df85e7e | ||
|
|
c00e18de95 | ||
|
|
a9775943be | ||
|
|
9e299d08b9 | ||
|
|
dcd281c5c3 | ||
|
|
a847468a6c | ||
|
|
b44f89d1e8 | ||
|
|
abdf15b895 | ||
|
|
727fbd353b | ||
|
|
6b13db129e | ||
|
|
953d80de1a | ||
|
|
7043bc566a | ||
|
|
841db594c5 | ||
|
|
d563ebf3d2 | ||
|
|
eacf2b9a5a | ||
|
|
fc269d3dd2 | ||
|
|
ed7e9d4a6e | ||
|
|
1d5ba8992c | ||
|
|
5abcb2dca6 | ||
|
|
b65b78a736 | ||
|
|
184c766fa7 | ||
|
|
ba20ad4ecc | ||
|
|
1ed0eae22d | ||
|
|
c3a8473489 | ||
|
|
0dfe416cc0 | ||
|
|
3bade4ccaf | ||
|
|
0a13cc8454 | ||
|
|
ee253820f6 | ||
|
|
857e419e4e | ||
|
|
c9d47f7e8a | ||
|
|
894b49bb76 | ||
|
|
77de5ccce3 | ||
|
|
9c34c19f6a | ||
|
|
637d0319d6 | ||
|
|
72c20fb54f | ||
|
|
868582da58 | ||
|
|
c691ab6d57 | ||
|
|
1d5d088b6f | ||
|
|
ecdcc0b6f6 | ||
|
|
4db99bab7f | ||
|
|
116ca267d6 | ||
|
|
d6ee97816f | ||
|
|
844d7c7e4b | ||
|
|
32a0012146 | ||
|
|
0c01d2ef59 | ||
|
|
dd52d32ed1 | ||
|
|
07c4e6e49e | ||
|
|
cabb27fb74 | ||
|
|
f17ecbf305 | ||
|
|
07a36176de | ||
|
|
e574caf7eb | ||
|
|
91d847c1f1 | ||
|
|
79eb98a3bb | ||
|
|
5bf5558212 | ||
|
|
62383042b0 | ||
|
|
fc644985be | ||
|
|
c93af3e56f | ||
|
|
dbc4f35107 | ||
|
|
38fc8e9110 | ||
|
|
7ca523adef | ||
|
|
d4bcd354dd | ||
|
|
3b915a8289 | ||
|
|
3d8aa2ad24 | ||
|
|
0247bc5012 | ||
|
|
ac21b04fa4 | ||
|
|
c188e0f23b | ||
|
|
dbf81176a3 | ||
|
|
ff79710ac6 | ||
|
|
5934d516f3 | ||
|
|
36cd5e736b | ||
|
|
c7f444a185 | ||
|
|
e43b6110bb | ||
|
|
44c046176e | ||
|
|
c850f49cd4 | ||
|
|
0fa7df9ab2 | ||
|
|
6434ed7c9b | ||
|
|
1918c9305e | ||
|
|
8cc1edc38d | ||
|
|
2326c9f437 | ||
|
|
ce0c9a3364 | ||
|
|
e2fe53187e | ||
|
|
5f0859c202 | ||
|
|
be6285b9d4 | ||
|
|
99c2dfb467 | ||
|
|
7997cd6329 | ||
|
|
7b980e9152 | ||
|
|
e4224fbfa4 | ||
|
|
850d4f8e12 | ||
|
|
a427d02ed1 | ||
|
|
30a8a2555f | ||
|
|
a063333f80 | ||
|
|
2fb72ab0d4 | ||
|
|
3936308f8c | ||
|
|
6218f84861 | ||
|
|
12ec980204 | ||
|
|
59699e17e2 | ||
|
|
11635c6696 | ||
|
|
6139b3fbdf | ||
|
|
a6a60c4221 | ||
|
|
094e775f0b | ||
|
|
982253f1b6 | ||
|
|
1de426070b | ||
|
|
4c2c619285 | ||
|
|
a3e020fe17 | ||
|
|
1ae723b405 | ||
|
|
616ed7a75d | ||
|
|
8c629695ef | ||
|
|
b50fa0fd1e | ||
|
|
97ffdd0975 | ||
|
|
38e7c1c131 | ||
|
|
24302d4fcc | ||
|
|
1a04fcf576 | ||
|
|
247780afa0 | ||
|
|
b23300b1a4 | ||
|
|
29eb65c783 | ||
|
|
315a6cca7e | ||
|
|
3c9565468a | ||
|
|
38dd4a97db | ||
|
|
b9a343602e | ||
|
|
52121fbb76 | ||
|
|
d923a8d242 | ||
|
|
f171b02b62 | ||
|
|
79caedcac4 | ||
|
|
32bedb4e06 | ||
|
|
8d77b398b2 | ||
|
|
6f021b9375 | ||
|
|
6d9a5d8f65 | ||
|
|
f02e1f7d1f | ||
|
|
501430fdd7 | ||
|
|
2b4b047ea3 | ||
|
|
b04ccd94a9 | ||
|
|
53629e6245 | ||
|
|
8cb650dbb1 | ||
|
|
4756347cb6 | ||
|
|
3d02a8ffba | ||
|
|
9ce66cada6 | ||
|
|
8f4e1fbb36 | ||
|
|
5a7fd25548 | ||
|
|
3883796e5a | ||
|
|
9137ba5f7b | ||
|
|
cfe0fde1e2 | ||
|
|
4194ec69d1 | ||
|
|
83e493f2ae | ||
|
|
276d46cb0c | ||
|
|
9512b43011 | ||
|
|
5195f08c2f | ||
|
|
c47fec4648 | ||
|
|
52f22a006d | ||
|
|
e6e228c3d9 | ||
|
|
c56a341a97 | ||
|
|
a76de06087 | ||
|
|
adb65d531d | ||
|
|
752fda0e93 | ||
|
|
6ed634f5d1 | ||
|
|
0277f8f4c7 | ||
|
|
57fb2d01b9 | ||
|
|
deaaccb96a | ||
|
|
0dacc061f1 | ||
|
|
2bde0ac82a | ||
|
|
f22f30b5a9 | ||
|
|
93a8883fe4 | ||
|
|
829a610579 | ||
|
|
63c9bff32e | ||
|
|
18792b7b56 | ||
|
|
a4977938b6 | ||
|
|
be253ff3ec | ||
|
|
3d8111c627 | ||
|
|
abd2651fd2 | ||
|
|
f0c940dab1 | ||
|
|
2a9e8ae788 | ||
|
|
8167fc5a4f | ||
|
|
123917fbec | ||
|
|
119e4efeb6 | ||
|
|
8d63ce5ad9 | ||
|
|
6163f6ca21 | ||
|
|
a1aefd4d9f | ||
|
|
1e70089253 | ||
|
|
b42847d4af | ||
|
|
ffe21713f7 | ||
|
|
f443e513d1 | ||
|
|
2b6fd1224f | ||
|
|
54db29a460 | ||
|
|
3d8cdc245d | ||
|
|
fbbdcbf2f7 | ||
|
|
fa29c95536 | ||
|
|
c523afd224 | ||
|
|
0f7a87d89f | ||
|
|
7c586a1c41 | ||
|
|
3186be06e3 | ||
|
|
da4804828c | ||
|
|
5b2d254d00 | ||
|
|
78e1fb03de | ||
|
|
7552c3f5fa | ||
|
|
d7e9e7c832 | ||
|
|
f42fcb1460 | ||
|
|
7b7609652c | ||
|
|
2d81527bd1 | ||
|
|
7d3b7696be | ||
|
|
5dc5029a75 | ||
|
|
f616b99585 | ||
|
|
835eb3ee0a | ||
|
|
8b2649302c | ||
|
|
a40c019ddb | ||
|
|
e5c8c48d9f | ||
|
|
0d14705fb4 | ||
|
|
d8136fb606 | ||
|
|
e560a4ea4d | ||
|
|
033dc61d54 | ||
|
|
2227dc354b | ||
|
|
2c7260e074 | ||
|
|
5cf6077dfd | ||
|
|
07e96389fb | ||
|
|
9e9b95e29d | ||
|
|
c92d0d8af0 | ||
|
|
7e61cca92d | ||
|
|
92ad7db918 | ||
|
|
0b0e858f24 | ||
|
|
2ef266c676 | ||
|
|
519beb72df | ||
|
|
061b363f96 | ||
|
|
4a9519b6dc | ||
|
|
685f7365e1 | ||
|
|
4ce2b4343a | ||
|
|
07b98cf555 | ||
|
|
355ea24db6 | ||
|
|
149c30b138 | ||
|
|
e36243fc95 | ||
|
|
08157234ab | ||
|
|
a9ab84803e | ||
|
|
828d62dfe9 | ||
|
|
3f8659499b | ||
|
|
93965b563d | ||
|
|
764ecd318c | ||
|
|
1fd72ada36 | ||
|
|
03e7d38482 | ||
|
|
e8d9a8b3a3 | ||
|
|
47547c23dd | ||
|
|
babb8a6808 | ||
|
|
e4ba189594 | ||
|
|
e556a16199 | ||
|
|
6c59bea97c | ||
|
|
9ebc63db56 | ||
|
|
bfc4f17e05 | ||
|
|
930aa35bfe | ||
|
|
48aa8cacfd | ||
|
|
e41795f9c2 | ||
|
|
f6af6d66be | ||
|
|
6fdc9228df | ||
|
|
9c84e19960 | ||
|
|
4f5bb57085 | ||
|
|
de75f0acf7 | ||
|
|
b1e9d8cbba | ||
|
|
088af37960 | ||
|
|
90efc0cb55 | ||
|
|
d54564b75a | ||
|
|
17a4815bb9 | ||
|
|
53e59ac97f | ||
|
|
88aad6be32 | ||
|
|
f660a1cd06 | ||
|
|
56fa393cf0 | ||
|
|
ad74871623 | ||
|
|
2092e120c3 | ||
|
|
a85644fb6b | ||
|
|
2a539da24c | ||
|
|
436b92e59c | ||
|
|
7017f26fcf | ||
|
|
7a6ef173e2 | ||
|
|
295f352ab5 | ||
|
|
1181dacf1d | ||
|
|
601acf5a84 | ||
|
|
a9cde5eaf7 | ||
|
|
381d0b3a67 | ||
|
|
d145dca0e7 | ||
|
|
0ff6d4a6fc | ||
|
|
bc41c57b7b | ||
|
|
f44ba1cc12 | ||
|
|
e77d1c0d43 | ||
|
|
dd44f55747 | ||
|
|
625c56b265 | ||
|
|
51e169ed82 | ||
|
|
e215305700 | ||
|
|
70cb687ef6 | ||
|
|
e6f6090fcf | ||
|
|
c0adf74d39 | ||
|
|
8598ca4927 | ||
|
|
3dcf7750f7 | ||
|
|
e21144bf6b | ||
|
|
7901055c66 | ||
|
|
51406ddbd2 | ||
|
|
4c31556465 | ||
|
|
ee70f26916 | ||
|
|
3933513908 | ||
|
|
1d56f5a0a1 | ||
|
|
95a186e252 | ||
|
|
378737fd9d | ||
|
|
4e1c66331f | ||
|
|
0c70e80420 | ||
|
|
231e6b3143 | ||
|
|
5f86cf4bb0 | ||
|
|
3765ad929a | ||
|
|
d85208513f | ||
|
|
c37596889a | ||
|
|
138794ffed | ||
|
|
5dfcccba7a | ||
|
|
fcacd67d71 | ||
|
|
5ed8ef5a3c | ||
|
|
afc581301f | ||
|
|
11729892cd | ||
|
|
d7734964e7 | ||
|
|
fd7eff93ca | ||
|
|
17d5bd5876 | ||
|
|
b838fd53e5 | ||
|
|
6d4d2d4a7f | ||
|
|
b55f0b9894 | ||
|
|
bd4cbeb393 | ||
|
|
8c16b87ff0 | ||
|
|
2ddea89273 |
1755 changed files with 291351 additions and 46291 deletions
|
|
@ -8,10 +8,4 @@
|
||||||
# You can see what browsers were selected by your queries by running:
|
# You can see what browsers were selected by your queries by running:
|
||||||
# npx browserslist
|
# npx browserslist
|
||||||
|
|
||||||
last 1 Chrome version
|
defaults
|
||||||
last 1 Firefox version
|
|
||||||
last 2 Edge major versions
|
|
||||||
last 2 Safari major versions
|
|
||||||
last 2 iOS major versions
|
|
||||||
Firefox ESR
|
|
||||||
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
# Editor configuration, see https://editorconfig.org
|
# Editor configuration, see https://editorconfig.org
|
||||||
root = true
|
root = true
|
||||||
|
|
||||||
|
|
||||||
[*]
|
[*]
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
@ -22,3 +23,7 @@ indent_size = 2
|
||||||
|
|
||||||
[*.csproj]
|
[*.csproj]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.cs]
|
||||||
|
# Disable SonarLint warning S1075 (Don't use hardcoded url)
|
||||||
|
dotnet_diagnostic.S1075.severity = none
|
||||||
|
|
|
||||||
49
.github/DISCUSSION_TEMPLATE/ideas.yml
vendored
Normal file
49
.github/DISCUSSION_TEMPLATE/ideas.yml
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
title: "[Kavita] Idea / Feature Submission"
|
||||||
|
labels:
|
||||||
|
- "Idea Submission"
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
## Idea Submission for Kavita 💡
|
||||||
|
|
||||||
|
Please fill out the details below, and let's make Kavita even better together!
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: idea-description
|
||||||
|
attributes:
|
||||||
|
label: Idea Description
|
||||||
|
value: |
|
||||||
|
Go into as much detail as possible to explain why your idea should be added to Kavita. Try to present some use cases and examples of how it would help other users. The more detail you have the better.
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: idea-category
|
||||||
|
attributes:
|
||||||
|
label: Idea Category
|
||||||
|
options:
|
||||||
|
- API
|
||||||
|
- Feature Enhancement
|
||||||
|
- User Experience
|
||||||
|
- Performance Improvement
|
||||||
|
- Web UI
|
||||||
|
description: "What area would your idea help with?"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: duration-of-use
|
||||||
|
attributes:
|
||||||
|
label: Duration of Using Kavita
|
||||||
|
description: "How long have you been using Kavita?"
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Before submitting
|
||||||
|
options:
|
||||||
|
- label: "I've already searched for existing ideas before posting."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
### Thank you for contributing to Kavita's future! 🚀
|
||||||
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
|
@ -1,42 +0,0 @@
|
||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Create a report to help us improve
|
|
||||||
title: ''
|
|
||||||
labels: needs-triage
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**If this is a feature request, request [here](https://feats.kavitareader.com/) instead. Feature requests will be deleted from Github.**
|
|
||||||
|
|
||||||
Please put as much information as possible to help me understand your issue. OS, browser, version are very important!
|
|
||||||
|
|
||||||
**Describe the bug**
|
|
||||||
A clear and concise description of what the bug is.
|
|
||||||
|
|
||||||
**To Reproduce**
|
|
||||||
Steps to reproduce the behavior:
|
|
||||||
1. Go to '...'
|
|
||||||
2. Click on '....'
|
|
||||||
3. Scroll down to '....'
|
|
||||||
4. See error
|
|
||||||
|
|
||||||
**Expected behavior**
|
|
||||||
A clear and concise description of what you expected to happen.
|
|
||||||
|
|
||||||
**Screenshots**
|
|
||||||
If applicable, add screenshots to help explain your problem.
|
|
||||||
|
|
||||||
**Desktop (please complete the following information):**
|
|
||||||
- OS: [e.g. iOS, Docker]
|
|
||||||
- Browser [e.g. chrome, safari]
|
|
||||||
- Version [e.g. 22] (can be found on Server Settings -> System tab)
|
|
||||||
|
|
||||||
**Smartphone (please complete the following information):**
|
|
||||||
- Device: [e.g. iPhone6]
|
|
||||||
- OS: [e.g. iOS8.1]
|
|
||||||
- Browser [e.g. stock browser, safari]
|
|
||||||
- Version [e.g. 22]
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context about the problem here.
|
|
||||||
51
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
51
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,24 +1,17 @@
|
||||||
name: Bug Report
|
name: Bug Report
|
||||||
description: Create a report to help us improve
|
description: Help us make Kavita better for everyone by submitting issues you run into while using the program.
|
||||||
title: ""
|
title: "Put a short summary of what went wrong here"
|
||||||
labels: ["needs-triage"]
|
labels: ["needs-triage"]
|
||||||
assignees:
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: "Thanks for taking the time to fill out this bug report!"
|
||||||
Thanks for taking the time to fill out this bug report!
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
If you have a feature request, please go to our [Feature Requests](https://feats.kavitareader.com) page.
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: what-happened
|
id: what-happened
|
||||||
attributes:
|
attributes:
|
||||||
label: What happened?
|
label: What happened?
|
||||||
description: Also tell us, what steps you took so we can try to reproduce.
|
description: Don't forget to tell us what steps you took so we can try to reproduce.
|
||||||
placeholder: Tell us what you see!
|
placeholder: Tell us what you see!
|
||||||
value: ""
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
|
|
@ -26,33 +19,35 @@ body:
|
||||||
attributes:
|
attributes:
|
||||||
label: What did you expect?
|
label: What did you expect?
|
||||||
description: What did you expect to happen?
|
description: What did you expect to happen?
|
||||||
placeholder: Tell us what you expected to see!
|
placeholder: Tell us what you expected to see! Go in as much detail as possible so we can confirm if the behavior is something that is broken.
|
||||||
value: ""
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: dropdown
|
||||||
id: version
|
id: version
|
||||||
attributes:
|
attributes:
|
||||||
label: Version
|
label: Kavita Version Number - If you don't see your version number listed, please update Kavita and see if your issue still persists.
|
||||||
description: What version of our software are you running?
|
multiple: false
|
||||||
placeholder: Can be found by going to Server Settings > System
|
options:
|
||||||
value: ""
|
- 0.8.7 - Stable
|
||||||
|
- Nightly Testing Branch
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: OS
|
id: OS
|
||||||
attributes:
|
attributes:
|
||||||
label: What OS is Kavita being run on?
|
label: What operating system is Kavita being hosted from?
|
||||||
multiple: false
|
multiple: false
|
||||||
options:
|
options:
|
||||||
- Docker
|
- Docker (LSIO Container)
|
||||||
|
- Docker (Dockerhub Container)
|
||||||
|
- Docker (Other)
|
||||||
- Windows
|
- Windows
|
||||||
- Linux
|
- Linux
|
||||||
- Mac
|
- Mac
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: desktop-OS
|
id: desktop-OS
|
||||||
attributes:
|
attributes:
|
||||||
label: If issue being seen on Desktop, what OS are you running where you see the issue?
|
label: If the issue is being seen on Desktop, what OS are you running where you see the issue?
|
||||||
multiple: false
|
multiple: false
|
||||||
options:
|
options:
|
||||||
- Windows
|
- Windows
|
||||||
|
|
@ -61,17 +56,18 @@ body:
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: desktop-browsers
|
id: desktop-browsers
|
||||||
attributes:
|
attributes:
|
||||||
label: If issue being seen on Desktop, what browsers are you seeing the problem on?
|
label: If the issue is being seen in the UI, what browsers are you seeing the problem on?
|
||||||
multiple: true
|
multiple: true
|
||||||
options:
|
options:
|
||||||
- Firefox
|
- Firefox
|
||||||
- Chrome
|
- Chrome
|
||||||
- Safari
|
- Safari
|
||||||
- Microsoft Edge
|
- Microsoft Edge
|
||||||
|
- Other (List in "Additional Notes" box)
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: mobile-OS
|
id: mobile-OS
|
||||||
attributes:
|
attributes:
|
||||||
label: If issue being seen on Mobile, what OS are you running where you see the issue?
|
label: If the issue is being seen on Mobile, what OS are you running where you see the issue?
|
||||||
multiple: false
|
multiple: false
|
||||||
options:
|
options:
|
||||||
- Android
|
- Android
|
||||||
|
|
@ -79,13 +75,13 @@ body:
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: mobile-browsers
|
id: mobile-browsers
|
||||||
attributes:
|
attributes:
|
||||||
label: If issue being seen on Mobile, what browsers are you seeing the problem on?
|
label: If the issue is being seen on the Mobile UI, what browsers are you seeing the problem on?
|
||||||
multiple: true
|
multiple: true
|
||||||
options:
|
options:
|
||||||
- Firefox
|
- Firefox
|
||||||
- Chrome
|
- Chrome
|
||||||
- Safari
|
- Safari
|
||||||
- Microsoft Edge
|
- Other iOS Browser
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: logs
|
id: logs
|
||||||
attributes:
|
attributes:
|
||||||
|
|
@ -97,7 +93,4 @@ body:
|
||||||
attributes:
|
attributes:
|
||||||
label: Additional Notes
|
label: Additional Notes
|
||||||
description: Any other information about the issue not covered in this form?
|
description: Any other information about the issue not covered in this form?
|
||||||
placeholder: e.g. Running Kavita on a raspberry pi
|
placeholder: e.g. Running Kavita on a Raspberry Pi, updating from X version, using LSIO container, etc
|
||||||
value: ""
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
|
||||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
|
|
@ -1 +1,5 @@
|
||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: Feature Requests
|
||||||
|
url: https://github.com/Kareadita/Kavita/discussions
|
||||||
|
about: Suggest an idea for the Kavita project
|
||||||
|
|
|
||||||
48
.github/workflows/build-and-test.yml
vendored
48
.github/workflows/build-and-test.yml
vendored
|
|
@ -10,64 +10,26 @@ jobs:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repo
|
- name: Checkout Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup .NET Core
|
- name: Setup .NET Core
|
||||||
uses: actions/setup-dotnet@v3
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 7.0.x
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
- name: Install Swashbuckle CLI
|
- name: Install Swashbuckle CLI
|
||||||
shell: powershell
|
shell: powershell
|
||||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: dotnet restore
|
run: dotnet restore
|
||||||
|
|
||||||
- name: Set up JDK 17
|
- uses: actions/upload-artifact@v4
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
distribution: 'zulu'
|
|
||||||
java-version: '17'
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
name: csproj
|
name: csproj
|
||||||
path: Kavita.Common/Kavita.Common.csproj
|
path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
||||||
- name: Cache SonarCloud packages
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~\sonar\cache
|
|
||||||
key: ${{ runner.os }}-sonar
|
|
||||||
restore-keys: ${{ runner.os }}-sonar
|
|
||||||
|
|
||||||
- name: Cache SonarCloud scanner
|
|
||||||
id: cache-sonar-scanner
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: .\.sonar\scanner
|
|
||||||
key: ${{ runner.os }}-sonar-scanner
|
|
||||||
restore-keys: ${{ runner.os }}-sonar-scanner
|
|
||||||
|
|
||||||
- name: Install SonarCloud scanner
|
|
||||||
if: steps.cache-sonar-scanner.outputs.cache-hit != 'true'
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
New-Item -Path .\.sonar\scanner -ItemType Directory
|
|
||||||
dotnet tool update dotnet-sonarscanner --tool-path .\.sonar\scanner
|
|
||||||
|
|
||||||
- name: Sonar Scan
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
|
|
||||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
.\.sonar\scanner\dotnet-sonarscanner begin /k:"Kareadita_Kavita" /o:"kareadita" /d:sonar.token="${{ secrets.SONAR_TOKEN }}" /d:sonar.host.url="https://sonarcloud.io"
|
|
||||||
dotnet build --configuration Release
|
|
||||||
.\.sonar\scanner\dotnet-sonarscanner end /d:sonar.token="${{ secrets.SONAR_TOKEN }}"
|
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: dotnet test --no-restore --verbosity normal
|
run: dotnet test --no-restore --verbosity normal
|
||||||
|
|
|
||||||
40
.github/workflows/canary-workflow.yml
vendored
40
.github/workflows/canary-workflow.yml
vendored
|
|
@ -9,14 +9,14 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Upload Kavita.Common for Version Bump
|
name: Upload Kavita.Common for Version Bump
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repo
|
- name: Checkout Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: csproj
|
name: csproj
|
||||||
path: Kavita.Common/Kavita.Common.csproj
|
path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
@ -24,16 +24,16 @@ jobs:
|
||||||
version:
|
version:
|
||||||
name: Bump version
|
name: Bump version
|
||||||
needs: [ build ]
|
needs: [ build ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup .NET Core
|
- name: Setup .NET Core
|
||||||
uses: actions/setup-dotnet@v3
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 7.0.x
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
- name: Bump versions
|
- name: Bump versions
|
||||||
uses: SiqiLu/dotnet-bump-version@2.0.0
|
uses: SiqiLu/dotnet-bump-version@2.0.0
|
||||||
|
|
@ -45,7 +45,7 @@ jobs:
|
||||||
canary:
|
canary:
|
||||||
name: Build Canary Docker
|
name: Build Canary Docker
|
||||||
needs: [ build, version ]
|
needs: [ build, version ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
|
|
@ -59,14 +59,14 @@ jobs:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Check Out Repo
|
- name: Check Out Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: canary
|
ref: canary
|
||||||
|
|
||||||
- name: NodeJS to Compile WebUI
|
- name: NodeJS to Compile WebUI
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '18.13.x'
|
node-version: 20
|
||||||
- run: |
|
- run: |
|
||||||
cd UI/Web || exit
|
cd UI/Web || exit
|
||||||
echo 'Installing web dependencies'
|
echo 'Installing web dependencies'
|
||||||
|
|
@ -81,7 +81,7 @@ jobs:
|
||||||
cd ../ || exit
|
cd ../ || exit
|
||||||
|
|
||||||
- name: Get csproj Version
|
- name: Get csproj Version
|
||||||
uses: kzrnm/get-net-sdk-project-versions-action@v1
|
uses: kzrnm/get-net-sdk-project-versions-action@v2
|
||||||
id: get-version
|
id: get-version
|
||||||
with:
|
with:
|
||||||
proj-path: Kavita.Common/Kavita.Common.csproj
|
proj-path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
@ -96,38 +96,38 @@ jobs:
|
||||||
run: echo "${{steps.get-version.outputs.assembly-version}}"
|
run: echo "${{steps.get-version.outputs.assembly-version}}"
|
||||||
|
|
||||||
- name: Compile dotnet app
|
- name: Compile dotnet app
|
||||||
uses: actions/setup-dotnet@v3
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 7.0.x
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
- name: Install Swashbuckle CLI
|
- name: Install Swashbuckle CLI
|
||||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||||
|
|
||||||
- run: ./monorepo-build.sh
|
- run: ./monorepo-build.sh
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
|
|
|
||||||
19
.github/workflows/codeql.yml
vendored
19
.github/workflows/codeql.yml
vendored
|
|
@ -13,7 +13,7 @@ name: "CodeQL"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ "develop", "main" ]
|
branches: [ "develop"]
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
# The branches below must be a subset of the branches above
|
||||||
branches: [ "develop" ]
|
branches: [ "develop" ]
|
||||||
|
|
@ -38,7 +38,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: [ 'csharp', 'javascript-typescript', 'python' ]
|
language: [ 'csharp', 'javascript-typescript' ]
|
||||||
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
||||||
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||||
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
|
@ -46,15 +46,16 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Swashbuckle CLI
|
- name: Setup .NET
|
||||||
shell: bash
|
uses: actions/setup-dotnet@v4
|
||||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
with:
|
||||||
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v3
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
|
@ -68,7 +69,7 @@ jobs:
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
@ -81,6 +82,6 @@ jobs:
|
||||||
dotnet build Kavita.sln
|
dotnet build Kavita.sln
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v3
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
|
|
||||||
69
.github/workflows/develop-workflow.yml
vendored
69
.github/workflows/develop-workflow.yml
vendored
|
|
@ -2,15 +2,12 @@ name: Nightly Workflow
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ['!release/**']
|
|
||||||
pull_request:
|
|
||||||
branches: [ 'develop', '!release/**' ]
|
branches: [ 'develop', '!release/**' ]
|
||||||
types: [ closed ]
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
debug:
|
debug:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Debug Info
|
- name: Debug Info
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -20,15 +17,15 @@ jobs:
|
||||||
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
||||||
build:
|
build:
|
||||||
name: Upload Kavita.Common for Version Bump
|
name: Upload Kavita.Common for Version Bump
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
|
if: github.ref == 'refs/heads/develop'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repo
|
- name: Checkout Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: csproj
|
name: csproj
|
||||||
path: Kavita.Common/Kavita.Common.csproj
|
path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
@ -36,17 +33,17 @@ jobs:
|
||||||
version:
|
version:
|
||||||
name: Bump version
|
name: Bump version
|
||||||
needs: [ build ]
|
needs: [ build ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
|
if: github.ref == 'refs/heads/develop'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Setup .NET Core
|
- name: Setup .NET Core
|
||||||
uses: actions/setup-dotnet@v3
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 7.0.x
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
- name: Bump versions
|
- name: Bump versions
|
||||||
uses: majora2007/dotnet-bump-version@v0.0.10
|
uses: majora2007/dotnet-bump-version@v0.0.10
|
||||||
|
|
@ -58,8 +55,8 @@ jobs:
|
||||||
develop:
|
develop:
|
||||||
name: Build Nightly Docker
|
name: Build Nightly Docker
|
||||||
needs: [ build, version ]
|
needs: [ build, version ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
if: github.event.pull_request.merged == true && !contains(github.head_ref, 'release')
|
if: github.ref == 'refs/heads/develop'
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
|
|
@ -92,18 +89,18 @@ jobs:
|
||||||
echo "BODY=$body" >> $GITHUB_OUTPUT
|
echo "BODY=$body" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Check Out Repo
|
- name: Check Out Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
|
|
||||||
- name: NodeJS to Compile WebUI
|
- name: NodeJS to Compile WebUI
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '18.13.x'
|
node-version: 20
|
||||||
- run: |
|
- run: |
|
||||||
cd UI/Web || exit
|
cd UI/Web || exit
|
||||||
echo 'Installing web dependencies'
|
echo 'Installing web dependencies'
|
||||||
npm install --legacy-peer-deps
|
npm ci
|
||||||
|
|
||||||
echo 'Building UI'
|
echo 'Building UI'
|
||||||
npm run prod
|
npm run prod
|
||||||
|
|
@ -114,7 +111,7 @@ jobs:
|
||||||
cd ../ || exit
|
cd ../ || exit
|
||||||
|
|
||||||
- name: Get csproj Version
|
- name: Get csproj Version
|
||||||
uses: kzrnm/get-net-sdk-project-versions-action@v1
|
uses: kzrnm/get-net-sdk-project-versions-action@v2
|
||||||
id: get-version
|
id: get-version
|
||||||
with:
|
with:
|
||||||
proj-path: Kavita.Common/Kavita.Common.csproj
|
proj-path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
@ -129,49 +126,63 @@ jobs:
|
||||||
run: echo "${{steps.get-version.outputs.assembly-version}}"
|
run: echo "${{steps.get-version.outputs.assembly-version}}"
|
||||||
|
|
||||||
- name: Compile dotnet app
|
- name: Compile dotnet app
|
||||||
uses: actions/setup-dotnet@v3
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 7.0.x
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
- name: Install Swashbuckle CLI
|
- name: Install Swashbuckle CLI
|
||||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||||
|
|
||||||
- run: ./monorepo-build.sh
|
- run: ./monorepo-build.sh
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
|
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: docker_meta_nightly
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
tags: |
|
||||||
|
type=raw,value=nightly
|
||||||
|
type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||||
|
images: |
|
||||||
|
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||||
|
name=ghcr.io/${{ github.repository }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
|
tags: ${{ steps.docker_meta_nightly.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker_meta_nightly.outputs.labels }}
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image digest
|
||||||
run: echo ${{ steps.docker_build.outputs.digest }}
|
run: echo ${{ steps.docker_build.outputs.digest }}
|
||||||
|
|
||||||
- name: Notify Discord
|
- name: Notify Discord
|
||||||
uses: rjstone/discord-webhook-notify@v1
|
uses: rjstone/discord-webhook-notify@v1
|
||||||
|
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||||
with:
|
with:
|
||||||
severity: info
|
severity: info
|
||||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
||||||
|
|
|
||||||
68
.github/workflows/openapi-gen.yml
vendored
Normal file
68
.github/workflows/openapi-gen.yml
vendored
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
name: Generate OpenAPI Documentation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ 'develop', '!release/**' ]
|
||||||
|
paths:
|
||||||
|
- '**/*.cs'
|
||||||
|
- '**/*.csproj'
|
||||||
|
pull_request:
|
||||||
|
branches: [ 'develop', '!release/**' ]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate-openapi:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Only run on direct pushes to develop, not PRs
|
||||||
|
if: (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && github.repository_owner == 'Kareadita'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup .NET
|
||||||
|
uses: actions/setup-dotnet@v4
|
||||||
|
with:
|
||||||
|
dotnet-version: 9.0.x
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: dotnet restore
|
||||||
|
|
||||||
|
- name: Build project
|
||||||
|
run: dotnet build API/API.csproj --configuration Debug
|
||||||
|
|
||||||
|
- name: Get Swashbuckle version
|
||||||
|
id: swashbuckle-version
|
||||||
|
run: |
|
||||||
|
VERSION=$(grep -o '<PackageReference Include="Swashbuckle.AspNetCore" Version="[^"]*"' API/API.csproj | grep -o 'Version="[^"]*"' | cut -d'"' -f2)
|
||||||
|
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||||
|
echo "Found Swashbuckle.AspNetCore version: $VERSION"
|
||||||
|
|
||||||
|
- name: Install matching Swashbuckle CLI tool
|
||||||
|
run: |
|
||||||
|
dotnet new tool-manifest --force
|
||||||
|
dotnet tool install Swashbuckle.AspNetCore.Cli --version ${{ steps.swashbuckle-version.outputs.VERSION }}
|
||||||
|
|
||||||
|
- name: Generate OpenAPI file
|
||||||
|
run: dotnet swagger tofile --output openapi.json API/bin/Debug/net9.0/API.dll v1
|
||||||
|
|
||||||
|
- name: Check for changes
|
||||||
|
id: git-check
|
||||||
|
run: |
|
||||||
|
git add openapi.json
|
||||||
|
git diff --staged --quiet openapi.json || echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Commit and push if changed
|
||||||
|
if: steps.git-check.outputs.has_changes == 'true'
|
||||||
|
run: |
|
||||||
|
git config --local user.email "action@github.com"
|
||||||
|
git config --local user.name "GitHub Action"
|
||||||
|
|
||||||
|
git commit -m "Update OpenAPI documentation" openapi.json
|
||||||
|
|
||||||
|
# Pull latest changes with rebase to avoid merge commits
|
||||||
|
git pull --rebase origin develop
|
||||||
|
|
||||||
|
git push
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.REPO_GHA_PAT }}
|
||||||
4
.github/workflows/pr-check.yml
vendored
4
.github/workflows/pr-check.yml
vendored
|
|
@ -1,15 +1,13 @@
|
||||||
name: Validate PR Body
|
name: Validate PR Body
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
branches: '**'
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ main, develop, canary ]
|
branches: [ main, develop, canary ]
|
||||||
types: [synchronize]
|
types: [synchronize]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_pr:
|
check_pr:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Extract branch name
|
- name: Extract branch name
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
|
||||||
102
.github/workflows/release-workflow.yml
vendored
102
.github/workflows/release-workflow.yml
vendored
|
|
@ -10,7 +10,7 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
debug:
|
debug:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Debug Info
|
- name: Debug Info
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -20,21 +20,21 @@ jobs:
|
||||||
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
echo "Matches Develop: ${{ github.ref == 'refs/heads/develop' }}"
|
||||||
if_merged:
|
if_merged:
|
||||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- run: |
|
- run: |
|
||||||
echo The PR was merged
|
echo The PR was merged
|
||||||
build:
|
build:
|
||||||
name: Upload Kavita.Common for Version Bump
|
name: Upload Kavita.Common for Version Bump
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Repo
|
- name: Checkout Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: csproj
|
name: csproj
|
||||||
path: Kavita.Common/Kavita.Common.csproj
|
path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
@ -43,7 +43,7 @@ jobs:
|
||||||
name: Build Stable and Nightly Docker if Release
|
name: Build Stable and Nightly Docker if Release
|
||||||
needs: [ build ]
|
needs: [ build ]
|
||||||
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
if: github.event.pull_request.merged == true && contains(github.head_ref, 'release')
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
contents: read
|
contents: read
|
||||||
|
|
@ -58,38 +58,25 @@ jobs:
|
||||||
- name: Parse PR body
|
- name: Parse PR body
|
||||||
id: parse-body
|
id: parse-body
|
||||||
run: |
|
run: |
|
||||||
body="${{ steps.findPr.outputs.body }}"
|
body="Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
|
||||||
if [[ ${#body} -gt 1870 ]] ; then
|
|
||||||
body=${body:0:1870}
|
|
||||||
body="${body}...and much more.
|
|
||||||
|
|
||||||
Read full changelog: https://github.com/Kareadita/Kavita/releases/latest"
|
|
||||||
fi
|
|
||||||
|
|
||||||
body=${body//\'/}
|
|
||||||
body=${body//'%'/'%25'}
|
|
||||||
body=${body//$'\n'/'%0A'}
|
|
||||||
body=${body//$'\r'/'%0D'}
|
|
||||||
body=${body//$'`'/'%60'}
|
|
||||||
body=${body//$'>'/'%3E'}
|
|
||||||
echo $body
|
echo $body
|
||||||
echo "BODY=$body" >> $GITHUB_OUTPUT
|
echo "BODY=$body" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
|
||||||
- name: Check Out Repo
|
- name: Check Out Repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
|
|
||||||
- name: NodeJS to Compile WebUI
|
- name: NodeJS to Compile WebUI
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '18.13.x'
|
node-version: 20
|
||||||
- run: |
|
- run: |
|
||||||
|
|
||||||
cd UI/Web || exit
|
cd UI/Web || exit
|
||||||
echo 'Installing web dependencies'
|
echo 'Installing web dependencies'
|
||||||
npm install --legacy-peer-deps
|
npm ci
|
||||||
|
|
||||||
echo 'Building UI'
|
echo 'Building UI'
|
||||||
npm run prod
|
npm run prod
|
||||||
|
|
@ -100,7 +87,7 @@ jobs:
|
||||||
cd ../ || exit
|
cd ../ || exit
|
||||||
|
|
||||||
- name: Get csproj Version
|
- name: Get csproj Version
|
||||||
uses: kzrnm/get-net-sdk-project-versions-action@v1
|
uses: kzrnm/get-net-sdk-project-versions-action@v2
|
||||||
id: get-version
|
id: get-version
|
||||||
with:
|
with:
|
||||||
proj-path: Kavita.Common/Kavita.Common.csproj
|
proj-path: Kavita.Common/Kavita.Common.csproj
|
||||||
|
|
@ -117,72 +104,79 @@ jobs:
|
||||||
id: parse-version
|
id: parse-version
|
||||||
|
|
||||||
- name: Compile dotnet app
|
- name: Compile dotnet app
|
||||||
uses: actions/setup-dotnet@v3
|
uses: actions/setup-dotnet@v4
|
||||||
with:
|
with:
|
||||||
dotnet-version: 7.0.x
|
dotnet-version: 9.0.x
|
||||||
- name: Install Swashbuckle CLI
|
- name: Install Swashbuckle CLI
|
||||||
run: dotnet tool install -g --version 6.5.0 Swashbuckle.AspNetCore.Cli
|
run: dotnet tool install -g Swashbuckle.AspNetCore.Cli
|
||||||
|
|
||||||
- run: ./monorepo-build.sh
|
- run: ./monorepo-build.sh
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
|
if: ${{ github.repository_owner == 'Kareadita' }}
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: docker_meta_stable
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
tags: |
|
||||||
|
type=raw,value=latest
|
||||||
|
type=raw,value=${{ steps.parse-version.outputs.VERSION }}
|
||||||
|
images: |
|
||||||
|
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||||
|
name=ghcr.io/${{ github.repository }}
|
||||||
|
|
||||||
- name: Build and push stable
|
- name: Build and push stable
|
||||||
id: docker_build_stable
|
id: docker_build_stable
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: jvmilazz0/kavita:latest, jvmilazz0/kavita:${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:latest, ghcr.io/kareadita/kavita:${{ steps.parse-version.outputs.VERSION }}
|
tags: ${{ steps.docker_meta_stable.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker_meta_stable.outputs.labels }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: docker_meta_nightly
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
tags: |
|
||||||
|
type=raw,value=nightly
|
||||||
|
type=raw,value=nightly-${{ steps.parse-version.outputs.VERSION }}
|
||||||
|
images: |
|
||||||
|
name=jvmilazz0/kavita,enable=${{ github.repository_owner == 'Kareadita' }}
|
||||||
|
name=ghcr.io/${{ github.repository }}
|
||||||
|
|
||||||
- name: Build and push nightly
|
- name: Build and push nightly
|
||||||
id: docker_build_nightly
|
id: docker_build_nightly
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: jvmilazz0/kavita:nightly, jvmilazz0/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}, ghcr.io/kareadita/kavita:nightly, ghcr.io/kareadita/kavita:nightly-${{ steps.parse-version.outputs.VERSION }}
|
tags: ${{ steps.docker_meta_nightly.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker_meta_nightly.outputs.labels }}
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image digest
|
||||||
run: echo ${{ steps.docker_build_stable.outputs.digest }}
|
run: echo ${{ steps.docker_build_stable.outputs.digest }}
|
||||||
|
|
||||||
- name: Image digest
|
- name: Image digest
|
||||||
run: echo ${{ steps.docker_build_nightly.outputs.digest }}
|
run: echo ${{ steps.docker_build_nightly.outputs.digest }}
|
||||||
|
|
||||||
- name: Notify Discord
|
|
||||||
uses: rjstone/discord-webhook-notify@v1
|
|
||||||
with:
|
|
||||||
severity: info
|
|
||||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
|
||||||
details: '${{ steps.findPr.outputs.body }}'
|
|
||||||
text: <@&939225192553644133> A new stable build has been released.
|
|
||||||
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
|
|
||||||
|
|
||||||
- name: Notify Discord
|
|
||||||
uses: rjstone/discord-webhook-notify@v1
|
|
||||||
with:
|
|
||||||
severity: info
|
|
||||||
description: v${{steps.get-version.outputs.assembly-version}} - ${{ steps.findPr.outputs.title }}
|
|
||||||
details: '${{ steps.findPr.outputs.body }}'
|
|
||||||
text: <@&939225459156217917> <@&939225350775406643> A new nightly build has been released for docker.
|
|
||||||
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}
|
|
||||||
|
|
|
||||||
14
.gitignore
vendored
14
.gitignore
vendored
|
|
@ -513,6 +513,7 @@ UI/Web/dist/
|
||||||
/API/config/stats/
|
/API/config/stats/
|
||||||
/API/config/bookmarks/
|
/API/config/bookmarks/
|
||||||
/API/config/favicons/
|
/API/config/favicons/
|
||||||
|
/API/config/cache-long/
|
||||||
/API/config/kavita.db
|
/API/config/kavita.db
|
||||||
/API/config/kavita.db-shm
|
/API/config/kavita.db-shm
|
||||||
/API/config/kavita.db-wal
|
/API/config/kavita.db-wal
|
||||||
|
|
@ -520,17 +521,26 @@ UI/Web/dist/
|
||||||
/API/config/*.db
|
/API/config/*.db
|
||||||
/API/config/*.bak
|
/API/config/*.bak
|
||||||
/API/config/*.backup
|
/API/config/*.backup
|
||||||
|
/API/config/*.csv
|
||||||
/API/config/Hangfire.db
|
/API/config/Hangfire.db
|
||||||
/API/config/Hangfire-log.db
|
/API/config/Hangfire-log.db
|
||||||
API/config/covers/
|
API/config/covers/
|
||||||
|
API/config/images/*
|
||||||
API/config/stats/*
|
API/config/stats/*
|
||||||
API/config/stats/app_stats.json
|
API/config/stats/app_stats.json
|
||||||
API/config/pre-metadata/
|
API/config/pre-metadata/
|
||||||
API/config/post-metadata/
|
API/config/post-metadata/
|
||||||
API/config/relations-imported.csv
|
API/config/*.csv
|
||||||
API/config/relations.csv
|
|
||||||
API.Tests/TestResults/
|
API.Tests/TestResults/
|
||||||
UI/Web/.vscode/settings.json
|
UI/Web/.vscode/settings.json
|
||||||
/API.Tests/Services/Test Data/ArchiveService/CoverImages/output/*
|
/API.Tests/Services/Test Data/ArchiveService/CoverImages/output/*
|
||||||
UI/Web/.angular/
|
UI/Web/.angular/
|
||||||
BenchmarkDotNet.Artifacts
|
BenchmarkDotNet.Artifacts
|
||||||
|
|
||||||
|
|
||||||
|
API.Tests/Services/Test Data/ImageService/**/*_output*
|
||||||
|
API.Tests/Services/Test Data/ImageService/**/*_baseline*
|
||||||
|
API.Tests/Services/Test Data/ImageService/**/*.html
|
||||||
|
|
||||||
|
|
||||||
|
API.Tests/Services/Test Data/ScannerService/ScanTests/**/*
|
||||||
|
|
|
||||||
15
.sonarcloud.properties
Normal file
15
.sonarcloud.properties
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
# Path to sources
|
||||||
|
sonar.sources=.
|
||||||
|
sonar.exclusions=API.Benchmark
|
||||||
|
#sonar.inclusions=
|
||||||
|
|
||||||
|
# Path to tests
|
||||||
|
sonar.tests=API.Tests
|
||||||
|
#sonar.test.exclusions=
|
||||||
|
#sonar.test.inclusions=
|
||||||
|
|
||||||
|
# Source encoding
|
||||||
|
sonar.sourceEncoding=UTF-8
|
||||||
|
|
||||||
|
# Exclusions for copy-paste detection
|
||||||
|
#sonar.cpd.exclusions=
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net7.0</TargetFramework>
|
<TargetFramework>net9.0</TargetFramework>
|
||||||
<OutputType>Exe</OutputType>
|
<OutputType>Exe</OutputType>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
|
|
@ -10,9 +10,9 @@
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="BenchmarkDotNet" Version="0.13.10" />
|
<PackageReference Include="BenchmarkDotNet" Version="0.15.1" />
|
||||||
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.13.10" />
|
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.15.1" />
|
||||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
|
|
@ -26,5 +26,10 @@
|
||||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||||
</Content>
|
</Content>
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<None Update="Data\AesopsFables.epub">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</None>
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,7 @@ public class ArchiveServiceBenchmark
|
||||||
public ArchiveServiceBenchmark()
|
public ArchiveServiceBenchmark()
|
||||||
{
|
{
|
||||||
_directoryService = new DirectoryService(null, new FileSystem());
|
_directoryService = new DirectoryService(null, new FileSystem());
|
||||||
_imageService = new ImageService(null, _directoryService, Substitute.For<IEasyCachingProviderFactory>());
|
_imageService = new ImageService(null, _directoryService);
|
||||||
_archiveService = new ArchiveService(new NullLogger<ArchiveService>(), _directoryService, _imageService, Substitute.For<IMediaErrorService>());
|
_archiveService = new ArchiveService(new NullLogger<ArchiveService>(), _directoryService, _imageService, Substitute.For<IMediaErrorService>());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
BIN
API.Benchmark/Data/AesopsFables.epub
Normal file
BIN
API.Benchmark/Data/AesopsFables.epub
Normal file
Binary file not shown.
41
API.Benchmark/KoreaderHashBenchmark.cs
Normal file
41
API.Benchmark/KoreaderHashBenchmark.cs
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using BenchmarkDotNet.Attributes;
|
||||||
|
using BenchmarkDotNet.Order;
|
||||||
|
using System;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
|
||||||
|
namespace API.Benchmark
|
||||||
|
{
|
||||||
|
[StopOnFirstError]
|
||||||
|
[MemoryDiagnoser]
|
||||||
|
[RankColumn]
|
||||||
|
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
|
||||||
|
[SimpleJob(launchCount: 1, warmupCount: 5, invocationCount: 20)]
|
||||||
|
public class KoreaderHashBenchmark
|
||||||
|
{
|
||||||
|
private const string sourceEpub = "./Data/AesopsFables.epub";
|
||||||
|
|
||||||
|
[Benchmark(Baseline = true)]
|
||||||
|
public void TestBuildManga_baseline()
|
||||||
|
{
|
||||||
|
var file = new MangaFileBuilder(sourceEpub, MangaFormat.Epub)
|
||||||
|
.Build();
|
||||||
|
if (file == null)
|
||||||
|
{
|
||||||
|
throw new Exception("Failed to build manga file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Benchmark]
|
||||||
|
public void TestBuildManga_withHash()
|
||||||
|
{
|
||||||
|
var file = new MangaFileBuilder(sourceEpub, MangaFormat.Epub)
|
||||||
|
.WithHash()
|
||||||
|
.Build();
|
||||||
|
if (file == null)
|
||||||
|
{
|
||||||
|
throw new Exception("Failed to build manga file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -25,7 +25,7 @@ public class TestBenchmark
|
||||||
{
|
{
|
||||||
list.Add(new VolumeDto()
|
list.Add(new VolumeDto()
|
||||||
{
|
{
|
||||||
Number = random.Next(10) > 5 ? 1 : 0,
|
MinNumber = random.Next(10) > 5 ? 1 : 0,
|
||||||
Chapters = GenerateChapters()
|
Chapters = GenerateChapters()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -49,7 +49,7 @@ public class TestBenchmark
|
||||||
|
|
||||||
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
|
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
|
||||||
{
|
{
|
||||||
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
|
foreach (var v in volumes.WhereNotLooseLeaf())
|
||||||
{
|
{
|
||||||
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
|
v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,22 +1,22 @@
|
||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net7.0</TargetFramework>
|
<TargetFramework>net9.0</TargetFramework>
|
||||||
<IsPackable>false</IsPackable>
|
<IsPackable>false</IsPackable>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="7.0.13" />
|
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.6" />
|
||||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
|
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
<PackageReference Include="NSubstitute" Version="5.3.0" />
|
||||||
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="19.2.69" />
|
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="22.0.14" />
|
||||||
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="19.2.69" />
|
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="22.0.14" />
|
||||||
<PackageReference Include="xunit" Version="2.6.1" />
|
<PackageReference Include="xunit" Version="2.9.3" />
|
||||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.3">
|
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.1">
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
<PackageReference Include="coverlet.collector" Version="6.0.0">
|
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
|
|
@ -28,11 +28,18 @@
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
|
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
|
||||||
<Folder Include="Services\Test Data\ScannerService\Manga" />
|
<Folder Include="Services\Test Data\CoverDbService\" />
|
||||||
|
<Folder Include="Services\Test Data\ImageService\Covers\" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<None Remove="Extensions\Test Data\modified on run.txt" />
|
<None Remove="Extensions\Test Data\modified on run.txt" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<None Update="Data\AesopsFables.epub">
|
||||||
|
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||||
|
</None>
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
using System.Collections.Generic;
|
using System;
|
||||||
using System.Data.Common;
|
using System.Data.Common;
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
using API.Data;
|
||||||
|
|
@ -10,6 +9,7 @@ using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using AutoMapper;
|
using AutoMapper;
|
||||||
|
using Hangfire;
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||||
|
|
@ -18,36 +18,34 @@ using NSubstitute;
|
||||||
|
|
||||||
namespace API.Tests;
|
namespace API.Tests;
|
||||||
|
|
||||||
public abstract class AbstractDbTest
|
public abstract class AbstractDbTest : AbstractFsTest , IDisposable
|
||||||
{
|
{
|
||||||
protected readonly DbConnection _connection;
|
protected readonly DataContext Context;
|
||||||
protected readonly DataContext _context;
|
protected readonly IUnitOfWork UnitOfWork;
|
||||||
protected readonly IUnitOfWork _unitOfWork;
|
protected readonly IMapper Mapper;
|
||||||
|
private readonly DbConnection _connection;
|
||||||
|
private bool _disposed;
|
||||||
protected const string CacheDirectory = "C:/kavita/config/cache/";
|
|
||||||
protected const string CoverImageDirectory = "C:/kavita/config/covers/";
|
|
||||||
protected const string BackupDirectory = "C:/kavita/config/backups/";
|
|
||||||
protected const string LogDirectory = "C:/kavita/config/logs/";
|
|
||||||
protected const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
|
|
||||||
protected const string SiteThemeDirectory = "C:/kavita/config/themes/";
|
|
||||||
protected const string TempDirectory = "C:/kavita/config/temp/";
|
|
||||||
protected const string DataDirectory = "C:/data/";
|
|
||||||
|
|
||||||
protected AbstractDbTest()
|
protected AbstractDbTest()
|
||||||
{
|
{
|
||||||
var contextOptions = new DbContextOptionsBuilder()
|
var contextOptions = new DbContextOptionsBuilder<DataContext>()
|
||||||
.UseSqlite(CreateInMemoryDatabase())
|
.UseSqlite(CreateInMemoryDatabase())
|
||||||
|
.EnableSensitiveDataLogging()
|
||||||
.Options;
|
.Options;
|
||||||
|
|
||||||
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
||||||
|
|
||||||
_context = new DataContext(contextOptions);
|
Context = new DataContext(contextOptions);
|
||||||
|
|
||||||
|
Context.Database.EnsureCreated(); // Ensure DB schema is created
|
||||||
|
|
||||||
Task.Run(SeedDb).GetAwaiter().GetResult();
|
Task.Run(SeedDb).GetAwaiter().GetResult();
|
||||||
|
|
||||||
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
|
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
|
||||||
var mapper = config.CreateMapper();
|
Mapper = config.CreateMapper();
|
||||||
|
|
||||||
_unitOfWork = new UnitOfWork(_context, mapper, null);
|
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||||
|
UnitOfWork = new UnitOfWork(Context, Mapper, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static DbConnection CreateInMemoryDatabase()
|
private static DbConnection CreateInMemoryDatabase()
|
||||||
|
|
@ -60,47 +58,79 @@ public abstract class AbstractDbTest
|
||||||
|
|
||||||
private async Task<bool> SeedDb()
|
private async Task<bool> SeedDb()
|
||||||
{
|
{
|
||||||
await _context.Database.MigrateAsync();
|
try
|
||||||
var filesystem = CreateFileSystem();
|
{
|
||||||
|
await Context.Database.EnsureCreatedAsync();
|
||||||
|
var filesystem = CreateFileSystem();
|
||||||
|
|
||||||
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
await Seed.SeedSettings(Context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
||||||
|
|
||||||
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
var setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
||||||
setting.Value = CacheDirectory;
|
setting.Value = CacheDirectory;
|
||||||
|
|
||||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
||||||
setting.Value = BackupDirectory;
|
setting.Value = BackupDirectory;
|
||||||
|
|
||||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
|
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
|
||||||
setting.Value = BookmarkDirectory;
|
setting.Value = BookmarkDirectory;
|
||||||
|
|
||||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
|
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
|
||||||
setting.Value = "10";
|
setting.Value = "10";
|
||||||
|
|
||||||
_context.ServerSetting.Update(setting);
|
Context.ServerSetting.Update(setting);
|
||||||
|
|
||||||
_context.Library.Add(new LibraryBuilder("Manga")
|
|
||||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
Context.Library.Add(new LibraryBuilder("Manga")
|
||||||
.Build());
|
.WithAllowMetadataMatching(true)
|
||||||
return await _context.SaveChangesAsync() > 0;
|
.WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
await Seed.SeedMetadataSettings(Context);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Console.WriteLine($"[SeedDb] Error: {ex.Message}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract Task ResetDb();
|
protected abstract Task ResetDb();
|
||||||
|
|
||||||
protected static MockFileSystem CreateFileSystem()
|
public void Dispose()
|
||||||
{
|
{
|
||||||
var fileSystem = new MockFileSystem();
|
Dispose(true);
|
||||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
GC.SuppressFinalize(this);
|
||||||
fileSystem.AddDirectory("C:/kavita/config/");
|
}
|
||||||
fileSystem.AddDirectory(CacheDirectory);
|
|
||||||
fileSystem.AddDirectory(CoverImageDirectory);
|
|
||||||
fileSystem.AddDirectory(BackupDirectory);
|
|
||||||
fileSystem.AddDirectory(BookmarkDirectory);
|
|
||||||
fileSystem.AddDirectory(SiteThemeDirectory);
|
|
||||||
fileSystem.AddDirectory(LogDirectory);
|
|
||||||
fileSystem.AddDirectory(TempDirectory);
|
|
||||||
fileSystem.AddDirectory(DataDirectory);
|
|
||||||
|
|
||||||
return fileSystem;
|
protected virtual void Dispose(bool disposing)
|
||||||
|
{
|
||||||
|
if (_disposed) return;
|
||||||
|
|
||||||
|
if (disposing)
|
||||||
|
{
|
||||||
|
Context?.Dispose();
|
||||||
|
_connection?.Dispose();
|
||||||
|
}
|
||||||
|
|
||||||
|
_disposed = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Add a role to an existing User. Commits.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="userId"></param>
|
||||||
|
/// <param name="roleName"></param>
|
||||||
|
protected async Task AddUserWithRole(int userId, string roleName)
|
||||||
|
{
|
||||||
|
var role = new AppRole { Id = userId, Name = roleName, NormalizedName = roleName.ToUpper() };
|
||||||
|
|
||||||
|
await Context.Roles.AddAsync(role);
|
||||||
|
await Context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
44
API.Tests/AbstractFsTest.cs
Normal file
44
API.Tests/AbstractFsTest.cs
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
|
||||||
|
|
||||||
|
using System.IO;
|
||||||
|
using System.IO.Abstractions;
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
|
||||||
|
namespace API.Tests;
|
||||||
|
|
||||||
|
public abstract class AbstractFsTest
|
||||||
|
{
|
||||||
|
|
||||||
|
protected static readonly string Root = Parser.NormalizePath(Path.GetPathRoot(Directory.GetCurrentDirectory()));
|
||||||
|
protected static readonly string ConfigDirectory = Root + "kavita/config/";
|
||||||
|
protected static readonly string CacheDirectory = ConfigDirectory + "cache/";
|
||||||
|
protected static readonly string CacheLongDirectory = ConfigDirectory + "cache-long/";
|
||||||
|
protected static readonly string CoverImageDirectory = ConfigDirectory + "covers/";
|
||||||
|
protected static readonly string BackupDirectory = ConfigDirectory + "backups/";
|
||||||
|
protected static readonly string LogDirectory = ConfigDirectory + "logs/";
|
||||||
|
protected static readonly string BookmarkDirectory = ConfigDirectory + "bookmarks/";
|
||||||
|
protected static readonly string SiteThemeDirectory = ConfigDirectory + "themes/";
|
||||||
|
protected static readonly string TempDirectory = ConfigDirectory + "temp/";
|
||||||
|
protected static readonly string ThemesDirectory = ConfigDirectory + "theme";
|
||||||
|
protected static readonly string DataDirectory = Root + "data/";
|
||||||
|
|
||||||
|
protected static MockFileSystem CreateFileSystem()
|
||||||
|
{
|
||||||
|
var fileSystem = new MockFileSystem();
|
||||||
|
fileSystem.Directory.SetCurrentDirectory(Root + "kavita/");
|
||||||
|
fileSystem.AddDirectory(Root + "kavita/config/");
|
||||||
|
fileSystem.AddDirectory(CacheDirectory);
|
||||||
|
fileSystem.AddDirectory(CacheLongDirectory);
|
||||||
|
fileSystem.AddDirectory(CoverImageDirectory);
|
||||||
|
fileSystem.AddDirectory(BackupDirectory);
|
||||||
|
fileSystem.AddDirectory(BookmarkDirectory);
|
||||||
|
fileSystem.AddDirectory(SiteThemeDirectory);
|
||||||
|
fileSystem.AddDirectory(LogDirectory);
|
||||||
|
fileSystem.AddDirectory(TempDirectory);
|
||||||
|
fileSystem.AddDirectory(DataDirectory);
|
||||||
|
fileSystem.AddDirectory(ThemesDirectory);
|
||||||
|
|
||||||
|
return fileSystem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -4,15 +4,16 @@ using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Comparers;
|
namespace API.Tests.Comparers;
|
||||||
|
|
||||||
public class ChapterSortComparerTest
|
public class ChapterSortComparerDefaultLastTest
|
||||||
{
|
{
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(new[] {1, 2, 0}, new[] {1, 2, 0})]
|
[InlineData(new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||||
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
|
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
|
||||||
[InlineData(new[] {1, 0, 0}, new[] {1, 0, 0})]
|
[InlineData(new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||||
|
[InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||||
public void ChapterSortTest(int[] input, int[] expected)
|
public void ChapterSortTest(int[] input, int[] expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparer()).ToArray());
|
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultLast()).ToArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Comparers;
|
namespace API.Tests.Comparers;
|
||||||
|
|
||||||
public class ChapterSortComparerZeroFirstTests
|
public class ChapterSortComparerDefaultFirstTests
|
||||||
{
|
{
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(new[] {1, 2, 0}, new[] {0, 1, 2,})]
|
[InlineData(new[] {1, 2, 0}, new[] {0, 1, 2,})]
|
||||||
|
|
@ -12,13 +12,13 @@ public class ChapterSortComparerZeroFirstTests
|
||||||
[InlineData(new[] {1, 0, 0}, new[] {0, 0, 1})]
|
[InlineData(new[] {1, 0, 0}, new[] {0, 0, 1})]
|
||||||
public void ChapterSortComparerZeroFirstTest(int[] input, int[] expected)
|
public void ChapterSortComparerZeroFirstTest(int[] input, int[] expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
|
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(new[] {1.0, 0.5, 0.3}, new[] {0.3, 0.5, 1.0})]
|
[InlineData(new [] {1.0f, 0.5f, 0.3f}, new [] {0.3f, 0.5f, 1.0f})]
|
||||||
public void ChapterSortComparerZeroFirstTest_Doubles(double[] input, double[] expected)
|
public void ChapterSortComparerZeroFirstTest_Doubles(float[] input, float[] expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerZeroFirst()).ToArray());
|
Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparerDefaultFirst()).ToArray());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,11 +7,11 @@ namespace API.Tests.Comparers;
|
||||||
public class SortComparerZeroLastTests
|
public class SortComparerZeroLastTests
|
||||||
{
|
{
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(new[] {0, 1, 2,}, new[] {1, 2, 0})]
|
[InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1, 2,}, new[] {1, 2, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||||
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
|
[InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})]
|
||||||
[InlineData(new[] {0, 0, 1}, new[] {1, 0, 0})]
|
[InlineData(new[] {API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, 1}, new[] {1, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapterNumber})]
|
||||||
public void SortComparerZeroLastTest(int[] input, int[] expected)
|
public void SortComparerZeroLastTest(int[] input, int[] expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, input.OrderBy(f => f, SortComparerZeroLast.Default).ToArray());
|
Assert.Equal(expected, input.OrderBy(f => f, ChapterSortComparerDefaultLast.Default).ToArray());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,16 +2,18 @@
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Converters;
|
namespace API.Tests.Converters;
|
||||||
|
#nullable enable
|
||||||
public class CronConverterTests
|
public class CronConverterTests
|
||||||
{
|
{
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("daily", "0 0 * * *")]
|
[InlineData("daily", "0 0 * * *")]
|
||||||
[InlineData("disabled", "0 0 31 2 *")]
|
[InlineData("disabled", "0 0 31 2 *")]
|
||||||
[InlineData("weekly", "0 0 * * 1")]
|
[InlineData("weekly", "0 0 * * 1")]
|
||||||
[InlineData("", "0 0 31 2 *")]
|
[InlineData("0 0 31 2 *", "0 0 31 2 *")]
|
||||||
[InlineData("sdfgdf", "")]
|
[InlineData("sdfgdf", "sdfgdf")]
|
||||||
public void ConvertTest(string input, string expected)
|
[InlineData("* * * * *", "* * * * *")]
|
||||||
|
[InlineData(null, "0 0 * * *")] // daily
|
||||||
|
public void ConvertTest(string? input, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, CronConverter.ConvertToCronNotation(input));
|
Assert.Equal(expected, CronConverter.ConvertToCronNotation(input));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
BIN
API.Tests/Data/AesopsFables.epub
Normal file
BIN
API.Tests/Data/AesopsFables.epub
Normal file
Binary file not shown.
|
|
@ -30,7 +30,7 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var info = new ParserInfo()
|
var info = new ParserInfo()
|
||||||
{
|
{
|
||||||
Chapters = "0",
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||||
Edition = "",
|
Edition = "",
|
||||||
Format = MangaFormat.Archive,
|
Format = MangaFormat.Archive,
|
||||||
FullFilePath = "/manga/darker than black.cbz",
|
FullFilePath = "/manga/darker than black.cbz",
|
||||||
|
|
@ -38,12 +38,12 @@ public class ChapterListExtensionsTests
|
||||||
IsSpecial = false,
|
IsSpecial = false,
|
||||||
Series = "darker than black",
|
Series = "darker than black",
|
||||||
Title = "darker than black",
|
Title = "darker than black",
|
||||||
Volumes = "0"
|
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
|
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
|
CreateChapter("darker than black - Some special", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true)
|
||||||
};
|
};
|
||||||
|
|
||||||
var actualChapter = chapterList.GetChapterByRange(info);
|
var actualChapter = chapterList.GetChapterByRange(info);
|
||||||
|
|
@ -57,7 +57,7 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var info = new ParserInfo()
|
var info = new ParserInfo()
|
||||||
{
|
{
|
||||||
Chapters = "0",
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
|
||||||
Edition = "",
|
Edition = "",
|
||||||
Format = MangaFormat.Archive,
|
Format = MangaFormat.Archive,
|
||||||
FullFilePath = "/manga/darker than black.cbz",
|
FullFilePath = "/manga/darker than black.cbz",
|
||||||
|
|
@ -65,12 +65,12 @@ public class ChapterListExtensionsTests
|
||||||
IsSpecial = true,
|
IsSpecial = true,
|
||||||
Series = "darker than black",
|
Series = "darker than black",
|
||||||
Title = "darker than black",
|
Title = "darker than black",
|
||||||
Volumes = "0"
|
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
|
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
|
CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true)
|
||||||
};
|
};
|
||||||
|
|
||||||
var actualChapter = chapterList.GetChapterByRange(info);
|
var actualChapter = chapterList.GetChapterByRange(info);
|
||||||
|
|
@ -83,7 +83,7 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var info = new ParserInfo()
|
var info = new ParserInfo()
|
||||||
{
|
{
|
||||||
Chapters = "0",
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
|
||||||
Edition = "",
|
Edition = "",
|
||||||
Format = MangaFormat.Archive,
|
Format = MangaFormat.Archive,
|
||||||
FullFilePath = "/manga/detective comics #001.cbz",
|
FullFilePath = "/manga/detective comics #001.cbz",
|
||||||
|
|
@ -91,13 +91,39 @@ public class ChapterListExtensionsTests
|
||||||
IsSpecial = true,
|
IsSpecial = true,
|
||||||
Series = "detective comics",
|
Series = "detective comics",
|
||||||
Title = "detective comics",
|
Title = "detective comics",
|
||||||
Volumes = "0"
|
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
|
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||||
|
};
|
||||||
|
|
||||||
|
var actualChapter = chapterList.GetChapterByRange(info);
|
||||||
|
|
||||||
|
Assert.Equal(chapterList[0], actualChapter);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetChapterByRange_On_FilenameChange_ShouldGetChapter()
|
||||||
|
{
|
||||||
|
var info = new ParserInfo()
|
||||||
|
{
|
||||||
|
Chapters = "1",
|
||||||
|
Edition = "",
|
||||||
|
Format = MangaFormat.Archive,
|
||||||
|
FullFilePath = "/manga/detective comics #001.cbz",
|
||||||
|
Filename = "detective comics #001.cbz",
|
||||||
|
IsSpecial = false,
|
||||||
|
Series = "detective comics",
|
||||||
|
Title = "detective comics",
|
||||||
|
Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume
|
||||||
|
};
|
||||||
|
|
||||||
|
var chapterList = new List<Chapter>()
|
||||||
|
{
|
||||||
|
CreateChapter("1", "1", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), false),
|
||||||
};
|
};
|
||||||
|
|
||||||
var actualChapter = chapterList.GetChapterByRange(info);
|
var actualChapter = chapterList.GetChapterByRange(info);
|
||||||
|
|
@ -112,11 +138,11 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
CreateChapter("darker than black", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||||
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
||||||
};
|
};
|
||||||
|
|
||||||
Assert.Equal(chapterList.First(), chapterList.GetFirstChapterWithFiles());
|
Assert.Equal(chapterList[0], chapterList.GetFirstChapterWithFiles());
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
|
|
@ -124,13 +150,13 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
CreateChapter("darker than black", Parser.DefaultChapter, CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true),
|
||||||
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false),
|
||||||
};
|
};
|
||||||
|
|
||||||
chapterList.First().Files = new List<MangaFile>();
|
chapterList[0].Files = new List<MangaFile>();
|
||||||
|
|
||||||
Assert.Equal(chapterList.Last(), chapterList.GetFirstChapterWithFiles());
|
Assert.Equal(chapterList[^1], chapterList.GetFirstChapterWithFiles());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -151,11 +177,11 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||||
};
|
};
|
||||||
|
|
||||||
chapterList[0].ReleaseDate = new DateTime(10, 1, 1);
|
chapterList[0].ReleaseDate = new DateTime(10, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||||
chapterList[1].ReleaseDate = DateTime.MinValue;
|
chapterList[1].ReleaseDate = DateTime.MinValue;
|
||||||
|
|
||||||
Assert.Equal(0, chapterList.MinimumReleaseYear());
|
Assert.Equal(0, chapterList.MinimumReleaseYear());
|
||||||
|
|
@ -166,12 +192,12 @@ public class ChapterListExtensionsTests
|
||||||
{
|
{
|
||||||
var chapterList = new List<Chapter>()
|
var chapterList = new List<Chapter>()
|
||||||
{
|
{
|
||||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
CreateChapter("detective comics", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||||
};
|
};
|
||||||
|
|
||||||
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1);
|
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||||
chapterList[1].ReleaseDate = new DateTime(2012, 2, 1);
|
chapterList[1].ReleaseDate = new DateTime(2012, 2, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||||
|
|
||||||
Assert.Equal(2002, chapterList.MinimumReleaseYear());
|
Assert.Equal(2002, chapterList.MinimumReleaseYear());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
31
API.Tests/Extensions/EncodeFormatExtensionsTests.cs
Normal file
31
API.Tests/Extensions/EncodeFormatExtensionsTests.cs
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Extensions;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Extensions;
|
||||||
|
|
||||||
|
public class EncodeFormatExtensionsTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void GetExtension_ShouldReturnCorrectExtensionForAllValues()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var expectedExtensions = new Dictionary<EncodeFormat, string>
|
||||||
|
{
|
||||||
|
{ EncodeFormat.PNG, ".png" },
|
||||||
|
{ EncodeFormat.WEBP, ".webp" },
|
||||||
|
{ EncodeFormat.AVIF, ".avif" }
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
foreach (var format in Enum.GetValues(typeof(EncodeFormat)).Cast<EncodeFormat>())
|
||||||
|
{
|
||||||
|
var extension = format.GetExtension();
|
||||||
|
Assert.Equal(expectedExtensions[format], extension);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@ -74,10 +74,10 @@ public class EnumerableExtensionsTests
|
||||||
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"},
|
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"},
|
||||||
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"}
|
new[] {@"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\001.jpg", @"F:\/Anime_Series_Pelis/MANGA/Mangahere (EN)\Kirara Fantasia\_Ch.001\002.jpg"}
|
||||||
)]
|
)]
|
||||||
[InlineData(
|
[InlineData(
|
||||||
new[] {"01/001.jpg", "001.jpg"},
|
new[] {"01/001.jpg", "001.jpg"},
|
||||||
new[] {"001.jpg", "01/001.jpg"}
|
new[] {"001.jpg", "01/001.jpg"}
|
||||||
)]
|
)]
|
||||||
public void TestNaturalSort(string[] input, string[] expected)
|
public void TestNaturalSort(string[] input, string[] expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, input.OrderByNatural(x => x).ToArray());
|
Assert.Equal(expected, input.OrderByNatural(x => x).ToArray());
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
|
|
@ -6,7 +7,6 @@ using API.Extensions;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using API.Services.Tasks.Scanner.Parser;
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
using API.Tests.Helpers;
|
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
@ -18,9 +18,8 @@ public class ParserInfoListExtensions
|
||||||
private readonly IDefaultParser _defaultParser;
|
private readonly IDefaultParser _defaultParser;
|
||||||
public ParserInfoListExtensions()
|
public ParserInfoListExtensions()
|
||||||
{
|
{
|
||||||
_defaultParser =
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||||
new DefaultParser(new DirectoryService(Substitute.For<ILogger<DirectoryService>>(),
|
_defaultParser = new BasicParser(ds, new ImageParser(ds));
|
||||||
new MockFileSystem()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
|
|
@ -33,7 +32,7 @@ public class ParserInfoListExtensions
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
|
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
|
||||||
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)]
|
[InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
|
||||||
[InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
|
[InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)]
|
||||||
public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo)
|
public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo)
|
||||||
{
|
{
|
||||||
|
|
@ -41,8 +40,8 @@ public class ParserInfoListExtensions
|
||||||
foreach (var filename in inputInfos)
|
foreach (var filename in inputInfos)
|
||||||
{
|
{
|
||||||
infos.Add(_defaultParser.Parse(
|
infos.Add(_defaultParser.Parse(
|
||||||
filename,
|
Path.Join("E:/Manga/Cynthia the Mission/", filename),
|
||||||
string.Empty));
|
"E:/Manga/", "E:/Manga/", LibraryType.Manga));
|
||||||
}
|
}
|
||||||
|
|
||||||
var files = inputChapters.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build()).ToList();
|
var files = inputChapters.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build()).ToList();
|
||||||
|
|
@ -52,4 +51,26 @@ public class ParserInfoListExtensions
|
||||||
|
|
||||||
Assert.Equal(expectedHasInfo, infos.HasInfo(chapter));
|
Assert.Equal(expectedHasInfo, infos.HasInfo(chapter));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void HasInfoTest_SuccessWhenSpecial()
|
||||||
|
{
|
||||||
|
var infos = new[]
|
||||||
|
{
|
||||||
|
_defaultParser.Parse(
|
||||||
|
"E:/Manga/Cynthia the Mission/Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip",
|
||||||
|
"E:/Manga/", "E:/Manga/", LibraryType.Manga)
|
||||||
|
};
|
||||||
|
|
||||||
|
var files = new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip"}
|
||||||
|
.Select(s => new MangaFileBuilder(s, MangaFormat.Archive, 199).Build())
|
||||||
|
.ToList();
|
||||||
|
var chapter = new ChapterBuilder("Cynthia The Mission The Special SP01 [Desudesu&Brolen].zip")
|
||||||
|
.WithRange("Cynthia The Mission The Special SP01 [Desudesu&Brolen]")
|
||||||
|
.WithFiles(files)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Assert.True(infos.HasInfo(chapter));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using API.Data;
|
|
||||||
using API.Data.Misc;
|
using API.Data.Misc;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Entities.Metadata;
|
using API.Entities.Person;
|
||||||
using API.Extensions;
|
|
||||||
using API.Extensions.QueryExtensions;
|
using API.Extensions.QueryExtensions;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
@ -45,17 +43,17 @@ public class QueryableExtensionsTests
|
||||||
[InlineData(false, 1)]
|
[InlineData(false, 1)]
|
||||||
public void RestrictAgainstAgeRestriction_CollectionTag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
public void RestrictAgainstAgeRestriction_CollectionTag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||||
{
|
{
|
||||||
var items = new List<CollectionTag>()
|
var items = new List<AppUserCollection>()
|
||||||
{
|
{
|
||||||
new CollectionTagBuilder("Test")
|
new AppUserCollectionBuilder("Test")
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
.WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
new CollectionTagBuilder("Test 2")
|
new AppUserCollectionBuilder("Test 2")
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
|
.WithItem(new SeriesBuilder("S2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build()).Build())
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
.WithItem(new SeriesBuilder("S1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build()).Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
new CollectionTagBuilder("Test 3")
|
new AppUserCollectionBuilder("Test 3")
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
|
.WithItem(new SeriesBuilder("S3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build()).Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -69,7 +67,7 @@ public class QueryableExtensionsTests
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(true, 2)]
|
[InlineData(true, 2)]
|
||||||
[InlineData(false, 1)]
|
[InlineData(false, 2)]
|
||||||
public void RestrictAgainstAgeRestriction_Genre_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
public void RestrictAgainstAgeRestriction_Genre_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||||
{
|
{
|
||||||
var items = new List<Genre>()
|
var items = new List<Genre>()
|
||||||
|
|
@ -96,7 +94,7 @@ public class QueryableExtensionsTests
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(true, 2)]
|
[InlineData(true, 2)]
|
||||||
[InlineData(false, 1)]
|
[InlineData(false, 2)]
|
||||||
public void RestrictAgainstAgeRestriction_Tag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
public void RestrictAgainstAgeRestriction_Tag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
||||||
{
|
{
|
||||||
var items = new List<Tag>()
|
var items = new List<Tag>()
|
||||||
|
|
@ -123,29 +121,46 @@ public class QueryableExtensionsTests
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(true, 2)]
|
[InlineData(true, 2)]
|
||||||
[InlineData(false, 1)]
|
[InlineData(false, 2)]
|
||||||
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
|
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedPeopleCount)
|
||||||
{
|
{
|
||||||
var items = new List<Person>()
|
// Arrange
|
||||||
|
var items = new List<Person>
|
||||||
{
|
{
|
||||||
new PersonBuilder("Test", PersonRole.Character)
|
CreatePersonWithSeriesMetadata("Test1", AgeRating.Teen),
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
CreatePersonWithSeriesMetadata("Test2", AgeRating.Unknown, AgeRating.Teen), // 2 series on this person, restrict will still allow access
|
||||||
.Build(),
|
CreatePersonWithSeriesMetadata("Test3", AgeRating.X18Plus)
|
||||||
new PersonBuilder("Test", PersonRole.Character)
|
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
|
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
|
|
||||||
.Build(),
|
|
||||||
new PersonBuilder("Test", PersonRole.Character)
|
|
||||||
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
|
|
||||||
.Build(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
|
var ageRestriction = new AgeRestriction
|
||||||
{
|
{
|
||||||
AgeRating = AgeRating.Teen,
|
AgeRating = AgeRating.Teen,
|
||||||
IncludeUnknowns = includeUnknowns
|
IncludeUnknowns = includeUnknowns
|
||||||
});
|
};
|
||||||
Assert.Equal(expectedCount, filtered.Count());
|
|
||||||
|
// Act
|
||||||
|
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(ageRestriction);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(expectedPeopleCount, filtered.Count());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Person CreatePersonWithSeriesMetadata(string name, params AgeRating[] ageRatings)
|
||||||
|
{
|
||||||
|
var person = new PersonBuilder(name).Build();
|
||||||
|
|
||||||
|
foreach (var ageRating in ageRatings)
|
||||||
|
{
|
||||||
|
var seriesMetadata = new SeriesMetadataBuilder().WithAgeRating(ageRating).Build();
|
||||||
|
person.SeriesMetadataPeople.Add(new SeriesMetadataPeople
|
||||||
|
{
|
||||||
|
SeriesMetadata = seriesMetadata,
|
||||||
|
Person = person,
|
||||||
|
Role = PersonRole.Character // Role is now part of the relationship
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return person;
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
using System.Collections.Generic;
|
using System.Linq;
|
||||||
using System.Globalization;
|
|
||||||
using System.Linq;
|
|
||||||
using API.Comparators;
|
using API.Comparators;
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Extensions;
|
namespace API.Tests.Extensions;
|
||||||
|
|
@ -17,22 +15,23 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
|
||||||
.WithCoverImage("Special 1")
|
.WithCoverImage("Special 1")
|
||||||
.WithIsSpecial(true)
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||||
.Build())
|
.Build())
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithCoverImage("Special 2")
|
.WithCoverImage("Special 2")
|
||||||
.WithIsSpecial(true)
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(Parser.SpecialVolumeNumber + 2)
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Special 1", series.GetCoverImage());
|
Assert.Equal("Special 1", series.GetCoverImage());
|
||||||
|
|
@ -43,8 +42,8 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("13")
|
.WithChapter(new ChapterBuilder("13")
|
||||||
.WithCoverImage("Chapter 13")
|
.WithCoverImage("Chapter 13")
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -59,7 +58,7 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithName("Volume 2")
|
.WithName("Volume 2")
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithCoverImage("Volume 2")
|
.WithCoverImage("Volume 2")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -67,12 +66,83 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCoverImage_LooseChapters_WithSub1_Chapter()
|
||||||
|
{
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
|
.WithName(Parser.LooseLeafVolume)
|
||||||
|
.WithChapter(new ChapterBuilder("-1")
|
||||||
|
.WithCoverImage("Chapter -1")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("0.5")
|
||||||
|
.WithCoverImage("Chapter 0.5")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithCoverImage("Chapter 2")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithCoverImage("Chapter 1")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("3")
|
||||||
|
.WithCoverImage("Chapter 3")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("4AU")
|
||||||
|
.WithCoverImage("Chapter 4AU")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
|
||||||
|
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Checks the case where there are specials and loose leafs, loose leaf chapters should be preferred
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void GetCoverImage_LooseChapters_WithSub1_Chapter_WithSpecials()
|
||||||
|
{
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithName(Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder("I am a Special")
|
||||||
|
.WithCoverImage("I am a Special")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("I am a Special 2")
|
||||||
|
.WithCoverImage("I am a Special 2")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
|
.WithName(Parser.LooseLeafVolume)
|
||||||
|
.WithChapter(new ChapterBuilder("0.5")
|
||||||
|
.WithCoverImage("Chapter 0.5")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithCoverImage("Chapter 2")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithCoverImage("Chapter 1")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
|
||||||
|
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void GetCoverImage_JustVolumes()
|
public void GetCoverImage_JustVolumes()
|
||||||
{
|
{
|
||||||
|
|
@ -81,14 +151,14 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithName("Volume 1")
|
.WithName("Volume 1")
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithCoverImage("Volume 1 Chapter 1")
|
.WithCoverImage("Volume 1 Chapter 1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithName("Volume 2")
|
.WithName("Volume 2")
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithCoverImage("Volume 2")
|
.WithCoverImage("Volume 2")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -109,19 +179,48 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCoverImage_JustVolumes_ButVolume0()
|
||||||
|
{
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder("0")
|
||||||
|
.WithName("Volume 0")
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
|
.WithCoverImage("Volume 0")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithName("Volume 1")
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
|
.WithCoverImage("Volume 1")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
foreach (var vol in series.Volumes)
|
||||||
|
{
|
||||||
|
vol.CoverImage = vol.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
|
}
|
||||||
|
|
||||||
|
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void GetCoverImage_JustSpecials_WithDecimal()
|
public void GetCoverImage_JustSpecials_WithDecimal()
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("2.5")
|
.WithChapter(new ChapterBuilder("2.5")
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Special 1")
|
.WithCoverImage("Special 1")
|
||||||
|
|
@ -135,7 +234,7 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Special 2", series.GetCoverImage());
|
Assert.Equal("Special 2", series.GetCoverImage());
|
||||||
|
|
@ -146,8 +245,8 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("2.5")
|
.WithChapter(new ChapterBuilder("2.5")
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2.5")
|
.WithCoverImage("Chapter 2.5")
|
||||||
|
|
@ -156,16 +255,19 @@ public class SeriesExtensionsTests
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2")
|
.WithCoverImage("Chapter 2")
|
||||||
.Build())
|
.Build())
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(true)
|
.WithIsSpecial(true)
|
||||||
.WithCoverImage("Special 1")
|
.WithCoverImage("Special 1")
|
||||||
|
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Chapter 2", series.GetCoverImage());
|
Assert.Equal("Chapter 2", series.GetCoverImage());
|
||||||
|
|
@ -176,8 +278,8 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("2.5")
|
.WithChapter(new ChapterBuilder("2.5")
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2.5")
|
.WithCoverImage("Chapter 2.5")
|
||||||
|
|
@ -186,14 +288,17 @@ public class SeriesExtensionsTests
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2")
|
.WithCoverImage("Chapter 2")
|
||||||
.Build())
|
.Build())
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(true)
|
.WithIsSpecial(true)
|
||||||
.WithCoverImage("Special 3")
|
.WithCoverImage("Special 3")
|
||||||
|
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Volume 1")
|
.WithCoverImage("Volume 1")
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -202,7 +307,7 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||||
|
|
@ -213,8 +318,8 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("2.5")
|
.WithChapter(new ChapterBuilder("2.5")
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2.5")
|
.WithCoverImage("Chapter 2.5")
|
||||||
|
|
@ -223,14 +328,17 @@ public class SeriesExtensionsTests
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2")
|
.WithCoverImage("Chapter 2")
|
||||||
.Build())
|
.Build())
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(true)
|
.WithIsSpecial(true)
|
||||||
.WithCoverImage("Special 1")
|
.WithCoverImage("Special 1")
|
||||||
|
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Volume 1")
|
.WithCoverImage("Volume 1")
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -239,7 +347,7 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||||
|
|
@ -250,8 +358,8 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Ippo")
|
var series = new SeriesBuilder("Ippo")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1426")
|
.WithChapter(new ChapterBuilder("1426")
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 1426")
|
.WithCoverImage("Chapter 1426")
|
||||||
|
|
@ -260,21 +368,24 @@ public class SeriesExtensionsTests
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 1425")
|
.WithCoverImage("Chapter 1425")
|
||||||
.Build())
|
.Build())
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(true)
|
.WithIsSpecial(true)
|
||||||
.WithCoverImage("Special 1")
|
.WithCoverImage("Special 3")
|
||||||
|
.WithSortOrder(Parser.SpecialVolumeNumber + 1)
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Volume 1")
|
.WithCoverImage("Volume 1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("137")
|
.WithVolume(new VolumeBuilder("137")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Volume 137")
|
.WithCoverImage("Volume 137")
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -283,7 +394,7 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Volume 1", series.GetCoverImage());
|
Assert.Equal("Volume 1", series.GetCoverImage());
|
||||||
|
|
@ -294,8 +405,8 @@ public class SeriesExtensionsTests
|
||||||
{
|
{
|
||||||
var series = new SeriesBuilder("Test 1")
|
var series = new SeriesBuilder("Test 1")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
.WithName(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithName(Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("2.5")
|
.WithChapter(new ChapterBuilder("2.5")
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Chapter 2.5")
|
.WithCoverImage("Chapter 2.5")
|
||||||
|
|
@ -306,8 +417,8 @@ public class SeriesExtensionsTests
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("4")
|
.WithVolume(new VolumeBuilder("4")
|
||||||
.WithNumber(4)
|
.WithMinNumber(4)
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
.WithIsSpecial(false)
|
.WithIsSpecial(false)
|
||||||
.WithCoverImage("Volume 4")
|
.WithCoverImage("Volume 4")
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -316,11 +427,77 @@ public class SeriesExtensionsTests
|
||||||
|
|
||||||
foreach (var vol in series.Volumes)
|
foreach (var vol in series.Volumes)
|
||||||
{
|
{
|
||||||
vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number, CultureInfo.InvariantCulture), ChapterSortComparerZeroFirst.Default)?.CoverImage;
|
vol.CoverImage = vol.Chapters.MinBy(x => x.MinNumber, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.Equal("Chapter 2", series.GetCoverImage());
|
Assert.Equal("Chapter 2", series.GetCoverImage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void GetCoverImage_LessThanIssue1()
|
||||||
|
{
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
|
.WithName(Parser.LooseLeafVolume)
|
||||||
|
.WithChapter(new ChapterBuilder("0")
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Chapter 0")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Chapter 1")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithMinNumber(4)
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Volume 4")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Ensure that Series cover is issue 1, when there are less than 1 entities and specials
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void GetCoverImage_LessThanIssue1_WithNegative()
|
||||||
|
{
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.LooseLeafVolume)
|
||||||
|
.WithName(Parser.LooseLeafVolume)
|
||||||
|
.WithChapter(new ChapterBuilder("-1")
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Chapter -1")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("0")
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Chapter 0")
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Chapter 1")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder(Parser.SpecialVolume)
|
||||||
|
.WithMinNumber(4)
|
||||||
|
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(false)
|
||||||
|
.WithCoverImage("Volume 4")
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Assert.Equal("Chapter 1", series.GetCoverImage());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
81
API.Tests/Extensions/VersionExtensionTests.cs
Normal file
81
API.Tests/Extensions/VersionExtensionTests.cs
Normal file
|
|
@ -0,0 +1,81 @@
|
||||||
|
using System;
|
||||||
|
using API.Extensions;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Extensions;
|
||||||
|
|
||||||
|
public class VersionHelperTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void CompareWithoutRevision_ShouldReturnTrue_WhenMajorMinorBuildMatch()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var v1 = new Version(1, 2, 3, 4);
|
||||||
|
var v2 = new Version(1, 2, 3, 5);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = v1.CompareWithoutRevision(v2);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void CompareWithoutRevision_ShouldHandleBuildlessVersions()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var v1 = new Version(1, 2);
|
||||||
|
var v2 = new Version(1, 2);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = v1.CompareWithoutRevision(v2);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(1, 2, 3, 1, 2, 4)]
|
||||||
|
[InlineData(1, 2, 3, 1, 2, 0)]
|
||||||
|
public void CompareWithoutRevision_ShouldReturnFalse_WhenBuildDiffers(
|
||||||
|
int major1, int minor1, int build1,
|
||||||
|
int major2, int minor2, int build2)
|
||||||
|
{
|
||||||
|
var v1 = new Version(major1, minor1, build1);
|
||||||
|
var v2 = new Version(major2, minor2, build2);
|
||||||
|
|
||||||
|
var result = v1.CompareWithoutRevision(v2);
|
||||||
|
|
||||||
|
Assert.False(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(1, 2, 3, 1, 3, 3)]
|
||||||
|
[InlineData(1, 2, 3, 1, 0, 3)]
|
||||||
|
public void CompareWithoutRevision_ShouldReturnFalse_WhenMinorDiffers(
|
||||||
|
int major1, int minor1, int build1,
|
||||||
|
int major2, int minor2, int build2)
|
||||||
|
{
|
||||||
|
var v1 = new Version(major1, minor1, build1);
|
||||||
|
var v2 = new Version(major2, minor2, build2);
|
||||||
|
|
||||||
|
var result = v1.CompareWithoutRevision(v2);
|
||||||
|
|
||||||
|
Assert.False(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(1, 2, 3, 2, 2, 3)]
|
||||||
|
[InlineData(1, 2, 3, 0, 2, 3)]
|
||||||
|
public void CompareWithoutRevision_ShouldReturnFalse_WhenMajorDiffers(
|
||||||
|
int major1, int minor1, int build1,
|
||||||
|
int major2, int minor2, int build2)
|
||||||
|
{
|
||||||
|
var v1 = new Version(major1, minor1, build1);
|
||||||
|
var v2 = new Version(major2, minor2, build2);
|
||||||
|
|
||||||
|
var result = v1.CompareWithoutRevision(v2);
|
||||||
|
|
||||||
|
Assert.False(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -3,7 +3,6 @@ using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Tests.Helpers;
|
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Extensions;
|
namespace API.Tests.Extensions;
|
||||||
|
|
@ -21,13 +20,44 @@ public class VolumeListExtensionsTests
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.WithChapter(new ChapterBuilder("4").Build())
|
.WithChapter(new ChapterBuilder("4").Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
.Build(),
|
||||||
|
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||||
|
.Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
Assert.Equal(volumes[0].Number, volumes.GetCoverImage(MangaFormat.Archive).Number);
|
var v = volumes.GetCoverImage(MangaFormat.Archive);
|
||||||
|
Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCoverImage_ChoosesVolume1_WhenHalf()
|
||||||
|
{
|
||||||
|
var volumes = new List<Volume>()
|
||||||
|
{
|
||||||
|
new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
|
||||||
|
.Build(),
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
|
.WithChapter(new ChapterBuilder("0.5").Build())
|
||||||
|
.Build(),
|
||||||
|
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||||
|
.Build())
|
||||||
|
.Build(),
|
||||||
|
};
|
||||||
|
|
||||||
|
var v = volumes.GetCoverImage(MangaFormat.Archive);
|
||||||
|
Assert.Equal(volumes[0].MinNumber, volumes.GetCoverImage(MangaFormat.Archive).MinNumber);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
|
|
@ -39,9 +69,14 @@ public class VolumeListExtensionsTests
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.WithChapter(new ChapterBuilder("4").Build())
|
.WithChapter(new ChapterBuilder("4").Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
.Build(),
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||||
|
.Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -57,9 +92,14 @@ public class VolumeListExtensionsTests
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.WithChapter(new ChapterBuilder("4").Build())
|
.WithChapter(new ChapterBuilder("4").Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
.Build(),
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||||
|
.Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -75,9 +115,14 @@ public class VolumeListExtensionsTests
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.WithChapter(new ChapterBuilder("4").Build())
|
.WithChapter(new ChapterBuilder("4").Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithIsSpecial(true).Build())
|
.Build(),
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||||
|
.Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -95,7 +140,12 @@ public class VolumeListExtensionsTests
|
||||||
.Build(),
|
.Build(),
|
||||||
new VolumeBuilder("1")
|
new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).Build())
|
.Build(),
|
||||||
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume)
|
||||||
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithIsSpecial(true)
|
||||||
|
.WithSortOrder(API.Services.Tasks.Scanner.Parser.Parser.SpecialVolumeNumber + 1)
|
||||||
|
.Build())
|
||||||
.Build(),
|
.Build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
178
API.Tests/Helpers/BookSortTitlePrefixHelperTests.cs
Normal file
178
API.Tests/Helpers/BookSortTitlePrefixHelperTests.cs
Normal file
|
|
@ -0,0 +1,178 @@
|
||||||
|
using API.Helpers;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
|
public class BookSortTitlePrefixHelperTests
|
||||||
|
{
|
||||||
|
[Theory]
|
||||||
|
[InlineData("The Avengers", "Avengers")]
|
||||||
|
[InlineData("A Game of Thrones", "Game of Thrones")]
|
||||||
|
[InlineData("An American Tragedy", "American Tragedy")]
|
||||||
|
public void TestEnglishPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("El Quijote", "Quijote")]
|
||||||
|
[InlineData("La Casa de Papel", "Casa de Papel")]
|
||||||
|
[InlineData("Los Miserables", "Miserables")]
|
||||||
|
[InlineData("Las Vegas", "Vegas")]
|
||||||
|
[InlineData("Un Mundo Feliz", "Mundo Feliz")]
|
||||||
|
[InlineData("Una Historia", "Historia")]
|
||||||
|
public void TestSpanishPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Le Petit Prince", "Petit Prince")]
|
||||||
|
[InlineData("La Belle et la Bête", "Belle et la Bête")]
|
||||||
|
[InlineData("Les Misérables", "Misérables")]
|
||||||
|
[InlineData("Un Amour de Swann", "Amour de Swann")]
|
||||||
|
[InlineData("Une Vie", "Vie")]
|
||||||
|
[InlineData("Des Souris et des Hommes", "Souris et des Hommes")]
|
||||||
|
public void TestFrenchPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Der Herr der Ringe", "Herr der Ringe")]
|
||||||
|
[InlineData("Die Verwandlung", "Verwandlung")]
|
||||||
|
[InlineData("Das Kapital", "Kapital")]
|
||||||
|
[InlineData("Ein Sommernachtstraum", "Sommernachtstraum")]
|
||||||
|
[InlineData("Eine Geschichte", "Geschichte")]
|
||||||
|
public void TestGermanPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Il Nome della Rosa", "Nome della Rosa")]
|
||||||
|
[InlineData("La Divina Commedia", "Divina Commedia")]
|
||||||
|
[InlineData("Lo Hobbit", "Hobbit")]
|
||||||
|
[InlineData("Gli Ultimi", "Ultimi")]
|
||||||
|
[InlineData("Le Città Invisibili", "Città Invisibili")]
|
||||||
|
[InlineData("Un Giorno", "Giorno")]
|
||||||
|
[InlineData("Una Notte", "Notte")]
|
||||||
|
public void TestItalianPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("O Alquimista", "Alquimista")]
|
||||||
|
[InlineData("A Moreninha", "Moreninha")]
|
||||||
|
[InlineData("Os Lusíadas", "Lusíadas")]
|
||||||
|
[InlineData("As Meninas", "Meninas")]
|
||||||
|
[InlineData("Um Defeito de Cor", "Defeito de Cor")]
|
||||||
|
[InlineData("Uma História", "História")]
|
||||||
|
public void TestPortuguesePrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("", "")] // Empty string returns empty
|
||||||
|
[InlineData("Book", "Book")] // Single word, no change
|
||||||
|
[InlineData("Avengers", "Avengers")] // No prefix, no change
|
||||||
|
public void TestNoPrefixCases(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("The", "The")] // Just a prefix word alone
|
||||||
|
[InlineData("A", "A")] // Just single letter prefix alone
|
||||||
|
[InlineData("Le", "Le")] // French prefix alone
|
||||||
|
public void TestPrefixWordAlone(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("THE AVENGERS", "AVENGERS")] // All caps
|
||||||
|
[InlineData("the avengers", "avengers")] // All lowercase
|
||||||
|
[InlineData("The AVENGERS", "AVENGERS")] // Mixed case
|
||||||
|
[InlineData("tHe AvEnGeRs", "AvEnGeRs")] // Random case
|
||||||
|
public void TestCaseInsensitivity(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Then Came You", "Then Came You")] // "The" + "n" = not a prefix
|
||||||
|
[InlineData("And Then There Were None", "And Then There Were None")] // "An" + "d" = not a prefix
|
||||||
|
[InlineData("Elsewhere", "Elsewhere")] // "El" + "sewhere" = not a prefix (no space)
|
||||||
|
[InlineData("Lesson Plans", "Lesson Plans")] // "Les" + "son" = not a prefix (no space)
|
||||||
|
[InlineData("Theory of Everything", "Theory of Everything")] // "The" + "ory" = not a prefix
|
||||||
|
public void TestFalsePositivePrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("The ", "The ")] // Prefix with only space after - returns original
|
||||||
|
[InlineData("La ", "La ")] // Same for other languages
|
||||||
|
[InlineData("El ", "El ")] // Same for Spanish
|
||||||
|
public void TestPrefixWithOnlySpaceAfter(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("The Multiple Spaces", " Multiple Spaces")] // Doesn't trim extra spaces from remainder
|
||||||
|
[InlineData("Le Petit Prince", " Petit Prince")] // Leading space preserved in remainder
|
||||||
|
public void TestSpaceHandling(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("The The Matrix", "The Matrix")] // Removes first "The", leaves second
|
||||||
|
[InlineData("A A Clockwork Orange", "A Clockwork Orange")] // Removes first "A", leaves second
|
||||||
|
[InlineData("El El Cid", "El Cid")] // Spanish version
|
||||||
|
public void TestRepeatedPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("L'Étranger", "L'Étranger")] // French contraction - no space, no change
|
||||||
|
[InlineData("D'Artagnan", "D'Artagnan")] // Contraction - no space, no change
|
||||||
|
[InlineData("The-Matrix", "The-Matrix")] // Hyphen instead of space - no change
|
||||||
|
[InlineData("The.Avengers", "The.Avengers")] // Period instead of space - no change
|
||||||
|
public void TestNonSpaceSeparators(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("三国演义", "三国演义")] // Chinese - no processing due to CJK detection
|
||||||
|
[InlineData("한국어", "한국어")] // Korean - not in CJK range, would be processed normally
|
||||||
|
public void TestCjkLanguages(string inputString, string expected)
|
||||||
|
{
|
||||||
|
// NOTE: These don't do anything, I am waiting for user input on if these are needed
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("नमस्ते दुनिया", "नमस्ते दुनिया")] // Hindi - not CJK, processed normally
|
||||||
|
[InlineData("مرحبا بالعالم", "مرحبا بالعالم")] // Arabic - not CJK, processed normally
|
||||||
|
[InlineData("שלום עולם", "שלום עולם")] // Hebrew - not CJK, processed normally
|
||||||
|
public void TestNonLatinNonCjkScripts(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("в мире", "мире")] // Russian "в" (in) - should be removed
|
||||||
|
[InlineData("на столе", "столе")] // Russian "на" (on) - should be removed
|
||||||
|
[InlineData("с друзьями", "друзьями")] // Russian "с" (with) - should be removed
|
||||||
|
public void TestRussianPrefixes(string inputString, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -2,7 +2,6 @@
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
|
|
@ -11,9 +10,9 @@ using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Helpers;
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
public class CacheHelperTests
|
public class CacheHelperTests: AbstractFsTest
|
||||||
{
|
{
|
||||||
private const string TestCoverImageDirectory = @"c:\";
|
private static readonly string TestCoverImageDirectory = Root;
|
||||||
private const string TestCoverImageFile = "thumbnail.jpg";
|
private const string TestCoverImageFile = "thumbnail.jpg";
|
||||||
private readonly string _testCoverPath = Path.Join(TestCoverImageDirectory, TestCoverImageFile);
|
private readonly string _testCoverPath = Path.Join(TestCoverImageDirectory, TestCoverImageFile);
|
||||||
private const string TestCoverArchive = @"file in folder.zip";
|
private const string TestCoverArchive = @"file in folder.zip";
|
||||||
|
|
@ -37,24 +36,29 @@ public class CacheHelperTests
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("", false)]
|
[InlineData("", false)]
|
||||||
[InlineData("C:/", false)]
|
|
||||||
[InlineData(null, false)]
|
[InlineData(null, false)]
|
||||||
public void CoverImageExists_DoesFileExist(string coverImage, bool exists)
|
public void CoverImageExists_DoesFileExist(string coverImage, bool exists)
|
||||||
{
|
{
|
||||||
Assert.Equal(exists, _cacheHelper.CoverImageExists(coverImage));
|
Assert.Equal(exists, _cacheHelper.CoverImageExists(coverImage));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void CoverImageExists_DoesFileExistRoot()
|
||||||
|
{
|
||||||
|
Assert.False(_cacheHelper.CoverImageExists(Root));
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void CoverImageExists_FileExists()
|
public void CoverImageExists_FileExists()
|
||||||
{
|
{
|
||||||
Assert.True(_cacheHelper.CoverImageExists(TestCoverArchive));
|
Assert.True(_cacheHelper.CoverImageExists(Path.Join(TestCoverImageDirectory, TestCoverArchive)));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void ShouldUpdateCoverImage_OnFirstRun()
|
public void ShouldUpdateCoverImage_OnFirstRun()
|
||||||
{
|
{
|
||||||
|
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(DateTime.Now)
|
.WithLastModified(DateTime.Now)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.True(_cacheHelper.ShouldUpdateCoverImage(null, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
Assert.True(_cacheHelper.ShouldUpdateCoverImage(null, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||||
|
|
@ -65,7 +69,7 @@ public class CacheHelperTests
|
||||||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked()
|
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked()
|
||||||
{
|
{
|
||||||
// Represents first run
|
// Represents first run
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(DateTime.Now)
|
.WithLastModified(DateTime.Now)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||||
|
|
@ -76,7 +80,7 @@ public class CacheHelperTests
|
||||||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked_2()
|
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetNotLocked_2()
|
||||||
{
|
{
|
||||||
// Represents first run
|
// Represents first run
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(DateTime.Now)
|
.WithLastModified(DateTime.Now)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now,
|
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now,
|
||||||
|
|
@ -87,7 +91,7 @@ public class CacheHelperTests
|
||||||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked()
|
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked()
|
||||||
{
|
{
|
||||||
// Represents first run
|
// Represents first run
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(DateTime.Now)
|
.WithLastModified(DateTime.Now)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||||
|
|
@ -98,7 +102,7 @@ public class CacheHelperTests
|
||||||
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked_Modified()
|
public void ShouldUpdateCoverImage_ShouldNotUpdateOnSecondRunWithCoverImageSetLocked_Modified()
|
||||||
{
|
{
|
||||||
// Represents first run
|
// Represents first run
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(DateTime.Now)
|
.WithLastModified(DateTime.Now)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
Assert.False(_cacheHelper.ShouldUpdateCoverImage(_testCoverPath, file, DateTime.Now.Subtract(TimeSpan.FromMinutes(1)),
|
||||||
|
|
@ -122,7 +126,7 @@ public class CacheHelperTests
|
||||||
var cacheHelper = new CacheHelper(fileService);
|
var cacheHelper = new CacheHelper(fileService);
|
||||||
|
|
||||||
var created = DateTime.Now.Subtract(TimeSpan.FromHours(1));
|
var created = DateTime.Now.Subtract(TimeSpan.FromHours(1));
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(1)))
|
.WithLastModified(DateTime.Now.Subtract(TimeSpan.FromMinutes(1)))
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
|
|
@ -133,9 +137,10 @@ public class CacheHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceCreated()
|
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceCreated()
|
||||||
{
|
{
|
||||||
|
var now = DateTimeOffset.Now;
|
||||||
var filesystemFile = new MockFileData("")
|
var filesystemFile = new MockFileData("")
|
||||||
{
|
{
|
||||||
LastWriteTime = DateTimeOffset.Now
|
LastWriteTime =now,
|
||||||
};
|
};
|
||||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||||
{
|
{
|
||||||
|
|
@ -147,12 +152,12 @@ public class CacheHelperTests
|
||||||
var cacheHelper = new CacheHelper(fileService);
|
var cacheHelper = new CacheHelper(fileService);
|
||||||
|
|
||||||
var chapter = new ChapterBuilder("1")
|
var chapter = new ChapterBuilder("1")
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
.WithCreated(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||||
}
|
}
|
||||||
|
|
@ -160,9 +165,10 @@ public class CacheHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified()
|
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified()
|
||||||
{
|
{
|
||||||
|
var now = DateTimeOffset.Now;
|
||||||
var filesystemFile = new MockFileData("")
|
var filesystemFile = new MockFileData("")
|
||||||
{
|
{
|
||||||
LastWriteTime = DateTimeOffset.Now
|
LastWriteTime = now,
|
||||||
};
|
};
|
||||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||||
{
|
{
|
||||||
|
|
@ -174,12 +180,12 @@ public class CacheHelperTests
|
||||||
var cacheHelper = new CacheHelper(fileService);
|
var cacheHelper = new CacheHelper(fileService);
|
||||||
|
|
||||||
var chapter = new ChapterBuilder("1")
|
var chapter = new ChapterBuilder("1")
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
.WithCreated(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||||
|
|
@ -188,9 +194,10 @@ public class CacheHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified_ForceUpdate()
|
public void HasFileNotChangedSinceCreationOrLastScan_NotChangedSinceLastModified_ForceUpdate()
|
||||||
{
|
{
|
||||||
|
var now = DateTimeOffset.Now;
|
||||||
var filesystemFile = new MockFileData("")
|
var filesystemFile = new MockFileData("")
|
||||||
{
|
{
|
||||||
LastWriteTime = DateTimeOffset.Now
|
LastWriteTime = now.DateTime,
|
||||||
};
|
};
|
||||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||||
{
|
{
|
||||||
|
|
@ -202,12 +209,12 @@ public class CacheHelperTests
|
||||||
var cacheHelper = new CacheHelper(fileService);
|
var cacheHelper = new CacheHelper(fileService);
|
||||||
|
|
||||||
var chapter = new ChapterBuilder("1")
|
var chapter = new ChapterBuilder("1")
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.WithCreated(filesystemFile.LastWriteTime.DateTime)
|
.WithCreated(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file));
|
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file));
|
||||||
}
|
}
|
||||||
|
|
@ -215,10 +222,11 @@ public class CacheHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan()
|
public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan()
|
||||||
{
|
{
|
||||||
|
var now = DateTimeOffset.Now;
|
||||||
var filesystemFile = new MockFileData("")
|
var filesystemFile = new MockFileData("")
|
||||||
{
|
{
|
||||||
LastWriteTime = DateTimeOffset.Now,
|
LastWriteTime = now.DateTime,
|
||||||
CreationTime = DateTimeOffset.Now
|
CreationTime = now.DateTime
|
||||||
};
|
};
|
||||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||||
{
|
{
|
||||||
|
|
@ -234,8 +242,8 @@ public class CacheHelperTests
|
||||||
.WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
|
.WithCreated(DateTime.Now.Subtract(TimeSpan.FromMinutes(10)))
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
var file = new MangaFileBuilder(TestCoverArchive, MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||||
}
|
}
|
||||||
|
|
@ -243,9 +251,10 @@ public class CacheHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan_ButLastModifiedSame()
|
public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan_ButLastModifiedSame()
|
||||||
{
|
{
|
||||||
|
var now = DateTimeOffset.Now;
|
||||||
var filesystemFile = new MockFileData("")
|
var filesystemFile = new MockFileData("")
|
||||||
{
|
{
|
||||||
LastWriteTime = DateTimeOffset.Now
|
LastWriteTime =now.DateTime
|
||||||
};
|
};
|
||||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||||
{
|
{
|
||||||
|
|
@ -262,7 +271,7 @@ public class CacheHelperTests
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
var file = new MangaFileBuilder(Path.Join(TestCoverImageDirectory, TestCoverArchive), MangaFormat.Archive)
|
||||||
.WithLastModified(filesystemFile.LastWriteTime.DateTime)
|
.WithLastModified(now.DateTime)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file));
|
||||||
|
|
|
||||||
|
|
@ -1,118 +0,0 @@
|
||||||
using System.Collections.Generic;
|
|
||||||
using API.Data;
|
|
||||||
using API.Entities;
|
|
||||||
using API.Helpers;
|
|
||||||
using API.Helpers.Builders;
|
|
||||||
using Xunit;
|
|
||||||
|
|
||||||
namespace API.Tests.Helpers;
|
|
||||||
|
|
||||||
public class GenreHelperTests
|
|
||||||
{
|
|
||||||
[Fact]
|
|
||||||
public void UpdateGenre_ShouldAddNewGenre()
|
|
||||||
{
|
|
||||||
var allGenres = new List<Genre>
|
|
||||||
{
|
|
||||||
new GenreBuilder("Action").Build(),
|
|
||||||
new GenreBuilder("action").Build(),
|
|
||||||
new GenreBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
var genreAdded = new List<Genre>();
|
|
||||||
|
|
||||||
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Adventure"}, genre =>
|
|
||||||
{
|
|
||||||
genreAdded.Add(genre);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(2, genreAdded.Count);
|
|
||||||
Assert.Equal(4, allGenres.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void UpdateGenre_ShouldNotAddDuplicateGenre()
|
|
||||||
{
|
|
||||||
var allGenres = new List<Genre>
|
|
||||||
{
|
|
||||||
new GenreBuilder("Action").Build(),
|
|
||||||
new GenreBuilder("action").Build(),
|
|
||||||
new GenreBuilder("Sci-fi").Build(),
|
|
||||||
|
|
||||||
};
|
|
||||||
var genreAdded = new List<Genre>();
|
|
||||||
|
|
||||||
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Scifi"}, genre =>
|
|
||||||
{
|
|
||||||
genreAdded.Add(genre);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(3, allGenres.Count);
|
|
||||||
Assert.Equal(2, genreAdded.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddGenre_ShouldAddOnlyNonExistingGenre()
|
|
||||||
{
|
|
||||||
var existingGenres = new List<Genre>
|
|
||||||
{
|
|
||||||
new GenreBuilder("Action").Build(),
|
|
||||||
new GenreBuilder("action").Build(),
|
|
||||||
new GenreBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Action").Build());
|
|
||||||
Assert.Equal(3, existingGenres.Count);
|
|
||||||
|
|
||||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("action").Build());
|
|
||||||
Assert.Equal(3, existingGenres.Count);
|
|
||||||
|
|
||||||
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Shonen").Build());
|
|
||||||
Assert.Equal(4, existingGenres.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void KeepOnlySamePeopleBetweenLists()
|
|
||||||
{
|
|
||||||
var existingGenres = new List<Genre>
|
|
||||||
{
|
|
||||||
new GenreBuilder("Action").Build(),
|
|
||||||
new GenreBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var peopleFromChapters = new List<Genre>
|
|
||||||
{
|
|
||||||
new GenreBuilder("Action").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var genreRemoved = new List<Genre>();
|
|
||||||
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
|
|
||||||
peopleFromChapters, genre =>
|
|
||||||
{
|
|
||||||
genreRemoved.Add(genre);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Single(genreRemoved);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void RemoveEveryoneIfNothingInRemoveAllExcept()
|
|
||||||
{
|
|
||||||
var existingGenres = new List<Genre>
|
|
||||||
{
|
|
||||||
new GenreBuilder("Action").Build(),
|
|
||||||
new GenreBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var peopleFromChapters = new List<Genre>();
|
|
||||||
|
|
||||||
var genreRemoved = new List<Genre>();
|
|
||||||
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
|
|
||||||
peopleFromChapters, genre =>
|
|
||||||
{
|
|
||||||
genreRemoved.Add(genre);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(2, genreRemoved.Count);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
60
API.Tests/Helpers/KoreaderHelperTests.cs
Normal file
60
API.Tests/Helpers/KoreaderHelperTests.cs
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
using API.DTOs.Koreader;
|
||||||
|
using API.DTOs.Progress;
|
||||||
|
using API.Helpers;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
|
|
||||||
|
public class KoreaderHelperTests
|
||||||
|
{
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("/body/DocFragment[11]/body/div/a", 10, null)]
|
||||||
|
[InlineData("/body/DocFragment[1]/body/div/p[40]", 0, 40)]
|
||||||
|
[InlineData("/body/DocFragment[8]/body/div/p[28]/text().264", 7, 28)]
|
||||||
|
public void GetEpubPositionDto(string koreaderPosition, int page, int? pNumber)
|
||||||
|
{
|
||||||
|
var expected = EmptyProgressDto();
|
||||||
|
expected.BookScrollId = pNumber.HasValue ? $"//html[1]/BODY/APP-ROOT[1]/DIV[1]/DIV[1]/DIV[1]/APP-BOOK-READER[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/DIV[1]/P[{pNumber}]" : null;
|
||||||
|
expected.PageNum = page;
|
||||||
|
var actual = EmptyProgressDto();
|
||||||
|
|
||||||
|
KoreaderHelper.UpdateProgressDto(actual, koreaderPosition);
|
||||||
|
Assert.Equal(expected.BookScrollId, actual.BookScrollId);
|
||||||
|
Assert.Equal(expected.PageNum, actual.PageNum);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("//html[1]/BODY/APP-ROOT[1]/DIV[1]/DIV[1]/DIV[1]/APP-BOOK-READER[1]/DIV[1]/DIV[2]/DIV[1]/DIV[1]/DIV[1]/P[20]", 5, "/body/DocFragment[6]/body/div/p[20]")]
|
||||||
|
[InlineData(null, 10, "/body/DocFragment[11]/body/div/a")]
|
||||||
|
public void GetKoreaderPosition(string scrollId, int page, string koreaderPosition)
|
||||||
|
{
|
||||||
|
var given = EmptyProgressDto();
|
||||||
|
given.BookScrollId = scrollId;
|
||||||
|
given.PageNum = page;
|
||||||
|
|
||||||
|
Assert.Equal(koreaderPosition, KoreaderHelper.GetKoreaderPosition(given));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("./Data/AesopsFables.epub", "8795ACA4BF264B57C1EEDF06A0CEE688")]
|
||||||
|
public void GetKoreaderHash(string filePath, string hash)
|
||||||
|
{
|
||||||
|
Assert.Equal(KoreaderHelper.HashContents(filePath), hash);
|
||||||
|
}
|
||||||
|
|
||||||
|
private ProgressDto EmptyProgressDto()
|
||||||
|
{
|
||||||
|
return new ProgressDto
|
||||||
|
{
|
||||||
|
ChapterId = 0,
|
||||||
|
PageNum = 0,
|
||||||
|
VolumeId = 0,
|
||||||
|
SeriesId = 0,
|
||||||
|
LibraryId = 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
using System.Collections.Generic;
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
|
|
@ -49,17 +50,14 @@ public class OrderableHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void ReorderItems_InvalidPosition_NoChange()
|
public void ReorderItems_InvalidPosition_NoChange()
|
||||||
{
|
{
|
||||||
// Arrange
|
|
||||||
var items = new List<AppUserSideNavStream>
|
var items = new List<AppUserSideNavStream>
|
||||||
{
|
{
|
||||||
new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
|
new AppUserSideNavStream { Id = 1, Order = 0, Name = "A" },
|
||||||
new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
|
new AppUserSideNavStream { Id = 2, Order = 1, Name = "A" },
|
||||||
};
|
};
|
||||||
|
|
||||||
// Act
|
|
||||||
OrderableHelper.ReorderItems(items, 2, 3); // Position 3 is out of range
|
OrderableHelper.ReorderItems(items, 2, 3); // Position 3 is out of range
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
|
Assert.Equal(1, items[0].Id); // Item 1 should remain at position 0
|
||||||
Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
|
Assert.Equal(2, items[1].Id); // Item 2 should remain at position 1
|
||||||
}
|
}
|
||||||
|
|
@ -80,7 +78,6 @@ public class OrderableHelperTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void ReorderItems_DoubleMove()
|
public void ReorderItems_DoubleMove()
|
||||||
{
|
{
|
||||||
// Arrange
|
|
||||||
var items = new List<AppUserSideNavStream>
|
var items = new List<AppUserSideNavStream>
|
||||||
{
|
{
|
||||||
new AppUserSideNavStream { Id = 1, Order = 0, Name = "0" },
|
new AppUserSideNavStream { Id = 1, Order = 0, Name = "0" },
|
||||||
|
|
@ -94,7 +91,6 @@ public class OrderableHelperTests
|
||||||
// Move 4 -> 1
|
// Move 4 -> 1
|
||||||
OrderableHelper.ReorderItems(items, 5, 1);
|
OrderableHelper.ReorderItems(items, 5, 1);
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Equal(1, items[0].Id);
|
Assert.Equal(1, items[0].Id);
|
||||||
Assert.Equal(0, items[0].Order);
|
Assert.Equal(0, items[0].Order);
|
||||||
Assert.Equal(5, items[1].Id);
|
Assert.Equal(5, items[1].Id);
|
||||||
|
|
@ -109,4 +105,98 @@ public class OrderableHelperTests
|
||||||
|
|
||||||
Assert.Equal("034125", string.Join("", items.Select(s => s.Name)));
|
Assert.Equal("034125", string.Join("", items.Select(s => s.Name)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static List<ReadingListItem> CreateTestReadingListItems(int count = 4)
|
||||||
|
{
|
||||||
|
var items = new List<ReadingListItem>();
|
||||||
|
|
||||||
|
for (var i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
items.Add(new ReadingListItem() { Id = i + 1, Order = count, ReadingListId = i + 1});
|
||||||
|
}
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ReorderItems_MoveItemToBeginning_CorrectOrder()
|
||||||
|
{
|
||||||
|
var items = CreateTestReadingListItems();
|
||||||
|
|
||||||
|
OrderableHelper.ReorderItems(items, 3, 0);
|
||||||
|
|
||||||
|
Assert.Equal(3, items[0].Id);
|
||||||
|
Assert.Equal(1, items[1].Id);
|
||||||
|
Assert.Equal(2, items[2].Id);
|
||||||
|
Assert.Equal(4, items[3].Id);
|
||||||
|
|
||||||
|
for (var i = 0; i < items.Count; i++)
|
||||||
|
{
|
||||||
|
Assert.Equal(i, items[i].Order);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ReorderItems_MoveItemToEnd_CorrectOrder()
|
||||||
|
{
|
||||||
|
var items = CreateTestReadingListItems();
|
||||||
|
|
||||||
|
OrderableHelper.ReorderItems(items, 1, 3);
|
||||||
|
|
||||||
|
Assert.Equal(2, items[0].Id);
|
||||||
|
Assert.Equal(3, items[1].Id);
|
||||||
|
Assert.Equal(4, items[2].Id);
|
||||||
|
Assert.Equal(1, items[3].Id);
|
||||||
|
|
||||||
|
for (var i = 0; i < items.Count; i++)
|
||||||
|
{
|
||||||
|
Assert.Equal(i, items[i].Order);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ReorderItems_MoveItemToMiddle_CorrectOrder()
|
||||||
|
{
|
||||||
|
var items = CreateTestReadingListItems();
|
||||||
|
|
||||||
|
OrderableHelper.ReorderItems(items, 4, 2);
|
||||||
|
|
||||||
|
Assert.Equal(1, items[0].Id);
|
||||||
|
Assert.Equal(2, items[1].Id);
|
||||||
|
Assert.Equal(4, items[2].Id);
|
||||||
|
Assert.Equal(3, items[3].Id);
|
||||||
|
|
||||||
|
for (var i = 0; i < items.Count; i++)
|
||||||
|
{
|
||||||
|
Assert.Equal(i, items[i].Order);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ReorderItems_MoveItemToOutOfBoundsPosition_MovesToEnd()
|
||||||
|
{
|
||||||
|
var items = CreateTestReadingListItems();
|
||||||
|
|
||||||
|
OrderableHelper.ReorderItems(items, 2, 10);
|
||||||
|
|
||||||
|
Assert.Equal(1, items[0].Id);
|
||||||
|
Assert.Equal(3, items[1].Id);
|
||||||
|
Assert.Equal(4, items[2].Id);
|
||||||
|
Assert.Equal(2, items[3].Id);
|
||||||
|
|
||||||
|
for (var i = 0; i < items.Count; i++)
|
||||||
|
{
|
||||||
|
Assert.Equal(i, items[i].Order);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ReorderItems_NegativePosition_ThrowsArgumentException()
|
||||||
|
{
|
||||||
|
var items = CreateTestReadingListItems();
|
||||||
|
|
||||||
|
Assert.Throws<ArgumentException>(() =>
|
||||||
|
OrderableHelper.ReorderItems(items, 2, -1)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,5 @@
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Entities.Metadata;
|
|
||||||
using API.Extensions;
|
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services.Tasks.Scanner;
|
using API.Services.Tasks.Scanner;
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,6 @@
|
||||||
using System;
|
using System.Collections.Generic;
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using API.Data;
|
using System.Threading.Tasks;
|
||||||
using API.DTOs;
|
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
|
|
@ -11,405 +8,219 @@ using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Helpers;
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
public class PersonHelperTests
|
public class PersonHelperTests : AbstractDbTest
|
||||||
{
|
{
|
||||||
#region UpdatePeople
|
protected override async Task ResetDb()
|
||||||
[Fact]
|
|
||||||
public void UpdatePeople_ShouldAddNewPeople()
|
|
||||||
{
|
{
|
||||||
var allPeople = new List<Person>
|
Context.Series.RemoveRange(Context.Series.ToList());
|
||||||
{
|
Context.Person.RemoveRange(Context.Person.ToList());
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
Context.Library.RemoveRange(Context.Library.ToList());
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
Context.Series.RemoveRange(Context.Series.ToList());
|
||||||
};
|
await Context.SaveChangesAsync();
|
||||||
var peopleAdded = new List<Person>();
|
}
|
||||||
|
|
||||||
PersonHelper.UpdatePeople(allPeople, new[] {"Joseph Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
// 1. Test adding new people and keeping existing ones
|
||||||
{
|
[Fact]
|
||||||
peopleAdded.Add(person);
|
public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
|
||||||
});
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
Assert.Equal(2, peopleAdded.Count);
|
var library = new LibraryBuilder("My Library")
|
||||||
Assert.Equal(4, allPeople.Count);
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var existingPerson = new PersonBuilder("Joe Shmo").Build();
|
||||||
|
var chapter = new ChapterBuilder("1").Build();
|
||||||
|
|
||||||
|
// Create an existing person and assign them to the series with a role
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithPerson(existingPerson, PersonRole.Editor)
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Call UpdateChapterPeopleAsync with one existing and one new person
|
||||||
|
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo", "New Person" }, PersonRole.Editor, UnitOfWork);
|
||||||
|
|
||||||
|
// Assert existing person retained and new person added
|
||||||
|
var people = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Contains(people, p => p.Name == "Joe Shmo");
|
||||||
|
Assert.Contains(people, p => p.Name == "New Person");
|
||||||
|
|
||||||
|
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||||
|
Assert.Contains("Joe Shmo", chapterPeople);
|
||||||
|
Assert.Contains("New Person", chapterPeople);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Test removing a person no longer in the list
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateChapterPeopleAsync_RemovePeople()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("My Library")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
|
||||||
|
var existingPerson2 = new PersonBuilder("Jane Doe").Build();
|
||||||
|
var chapter = new ChapterBuilder("1")
|
||||||
|
.WithPerson(existingPerson1, PersonRole.Editor)
|
||||||
|
.WithPerson(existingPerson2, PersonRole.Editor)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(chapter)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Call UpdateChapterPeopleAsync with only one person
|
||||||
|
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
|
||||||
|
|
||||||
|
// PersonHelper does not remove the Person from the global DbSet itself
|
||||||
|
await UnitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
|
||||||
|
|
||||||
|
var people = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
|
||||||
|
|
||||||
|
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||||
|
Assert.Contains("Joe Shmo", chapterPeople);
|
||||||
|
Assert.DoesNotContain("Jane Doe", chapterPeople);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Test no changes when the list of people is the same
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateChapterPeopleAsync_NoChanges()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("My Library")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var existingPerson = new PersonBuilder("Joe Shmo").Build();
|
||||||
|
var chapter = new ChapterBuilder("1").WithPerson(existingPerson, PersonRole.Editor).Build();
|
||||||
|
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(chapter)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Call UpdateChapterPeopleAsync with the same list
|
||||||
|
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
|
||||||
|
|
||||||
|
var people = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Contains(people, p => p.Name == "Joe Shmo");
|
||||||
|
|
||||||
|
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
|
||||||
|
Assert.Contains("Joe Shmo", chapterPeople);
|
||||||
|
Assert.Single(chapter.People); // No duplicate entries
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Test multiple roles for a person
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateChapterPeopleAsync_MultipleRoles()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("My Library")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var person = new PersonBuilder("Joe Shmo").Build();
|
||||||
|
var chapter = new ChapterBuilder("1").WithPerson(person, PersonRole.Writer).Build();
|
||||||
|
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(chapter)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Add same person as Editor
|
||||||
|
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
|
||||||
|
|
||||||
|
// Ensure that the same person is assigned with two roles
|
||||||
|
var chapterPeople = chapter
|
||||||
|
.People
|
||||||
|
.Where(cp =>
|
||||||
|
cp.Person.Name == "Joe Shmo")
|
||||||
|
.ToList();
|
||||||
|
Assert.Equal(2, chapterPeople.Count); // One for each role
|
||||||
|
Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Writer);
|
||||||
|
Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Editor);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void UpdatePeople_ShouldNotAddDuplicatePeople()
|
public async Task UpdateChapterPeopleAsync_MatchOnAlias_NoChanges()
|
||||||
{
|
{
|
||||||
var allPeople = new List<Person>
|
await ResetDb();
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
new PersonBuilder("Sally Ann", PersonRole.CoverArtist).Build(),
|
|
||||||
|
|
||||||
};
|
var library = new LibraryBuilder("My Library")
|
||||||
var peopleAdded = new List<Person>();
|
.Build();
|
||||||
|
|
||||||
PersonHelper.UpdatePeople(allPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.CoverArtist, person =>
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
{
|
await UnitOfWork.CommitAsync();
|
||||||
peopleAdded.Add(person);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(3, allPeople.Count);
|
var person = new PersonBuilder("Joe Doe")
|
||||||
}
|
.WithAlias("Jonny Doe")
|
||||||
#endregion
|
.Build();
|
||||||
|
|
||||||
#region UpdatePeopleList
|
var chapter = new ChapterBuilder("1")
|
||||||
|
.WithPerson(person, PersonRole.Editor)
|
||||||
|
.Build();
|
||||||
|
|
||||||
[Fact]
|
var series = new SeriesBuilder("Test 1")
|
||||||
public void UpdatePeopleList_NullTags_NoChanges()
|
.WithLibraryId(library.Id)
|
||||||
{
|
.WithVolume(new VolumeBuilder("1")
|
||||||
// Arrange
|
.WithChapter(chapter)
|
||||||
ICollection<PersonDto> tags = null;
|
.Build())
|
||||||
var series = new SeriesBuilder("Test Series").Build();
|
.Build();
|
||||||
var allTags = new List<Person>();
|
|
||||||
var handleAddCalled = false;
|
|
||||||
var onModifiedCalled = false;
|
|
||||||
|
|
||||||
// Act
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
PersonHelper.UpdatePeopleList(PersonRole.Writer, tags, series, allTags, p => handleAddCalled = true, () => onModifiedCalled = true);
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
// Assert
|
// Add on Name
|
||||||
Assert.False(handleAddCalled);
|
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Doe" }, PersonRole.Editor, UnitOfWork);
|
||||||
Assert.False(onModifiedCalled);
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Single(allPeople);
|
||||||
|
|
||||||
|
// Add on alias
|
||||||
|
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Jonny Doe" }, PersonRole.Editor, UnitOfWork);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Single(allPeople);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
// TODO: Unit tests for series
|
||||||
public void UpdatePeopleList_AddNewTag_TagAddedAndOnModifiedCalled()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
const PersonRole role = PersonRole.Writer;
|
|
||||||
var tags = new List<PersonDto>
|
|
||||||
{
|
|
||||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
|
||||||
};
|
|
||||||
var series = new SeriesBuilder("Test Series").Build();
|
|
||||||
var allTags = new List<Person>();
|
|
||||||
var handleAddCalled = false;
|
|
||||||
var onModifiedCalled = false;
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
|
||||||
{
|
|
||||||
handleAddCalled = true;
|
|
||||||
series.Metadata.People.Add(p);
|
|
||||||
}, () => onModifiedCalled = true);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.True(handleAddCalled);
|
|
||||||
Assert.True(onModifiedCalled);
|
|
||||||
Assert.Single(series.Metadata.People);
|
|
||||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void UpdatePeopleList_RemoveExistingTag_TagRemovedAndOnModifiedCalled()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
const PersonRole role = PersonRole.Writer;
|
|
||||||
var tags = new List<PersonDto>();
|
|
||||||
var series = new SeriesBuilder("Test Series").Build();
|
|
||||||
var person = new PersonBuilder("John Doe", role).Build();
|
|
||||||
person.Id = 1;
|
|
||||||
series.Metadata.People.Add(person);
|
|
||||||
var allTags = new List<Person>
|
|
||||||
{
|
|
||||||
person
|
|
||||||
};
|
|
||||||
var handleAddCalled = false;
|
|
||||||
var onModifiedCalled = false;
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
|
||||||
{
|
|
||||||
handleAddCalled = true;
|
|
||||||
series.Metadata.People.Add(p);
|
|
||||||
}, () => onModifiedCalled = true);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.False(handleAddCalled);
|
|
||||||
Assert.True(onModifiedCalled);
|
|
||||||
Assert.Empty(series.Metadata.People);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void UpdatePeopleList_UpdateExistingTag_OnModifiedCalled()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
const PersonRole role = PersonRole.Writer;
|
|
||||||
var tags = new List<PersonDto>
|
|
||||||
{
|
|
||||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
|
||||||
};
|
|
||||||
var series = new SeriesBuilder("Test Series").Build();
|
|
||||||
var person = new PersonBuilder("John Doe", role).Build();
|
|
||||||
person.Id = 1;
|
|
||||||
series.Metadata.People.Add(person);
|
|
||||||
var allTags = new List<Person>
|
|
||||||
{
|
|
||||||
person
|
|
||||||
};
|
|
||||||
var handleAddCalled = false;
|
|
||||||
var onModifiedCalled = false;
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
|
||||||
{
|
|
||||||
handleAddCalled = true;
|
|
||||||
series.Metadata.People.Add(p);
|
|
||||||
}, () => onModifiedCalled = true);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.False(handleAddCalled);
|
|
||||||
Assert.False(onModifiedCalled);
|
|
||||||
Assert.Single(series.Metadata.People);
|
|
||||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void UpdatePeopleList_NoChanges_HandleAddAndOnModifiedNotCalled()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
const PersonRole role = PersonRole.Writer;
|
|
||||||
var tags = new List<PersonDto>
|
|
||||||
{
|
|
||||||
new PersonDto { Id = 1, Name = "John Doe", Role = role }
|
|
||||||
};
|
|
||||||
var series = new SeriesBuilder("Test Series").Build();
|
|
||||||
var person = new PersonBuilder("John Doe", role).Build();
|
|
||||||
person.Id = 1;
|
|
||||||
series.Metadata.People.Add(person);
|
|
||||||
var allTags = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("John Doe", role).Build()
|
|
||||||
};
|
|
||||||
var handleAddCalled = false;
|
|
||||||
var onModifiedCalled = false;
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
|
|
||||||
{
|
|
||||||
handleAddCalled = true;
|
|
||||||
series.Metadata.People.Add(p);
|
|
||||||
}, () => onModifiedCalled = true);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.False(handleAddCalled);
|
|
||||||
Assert.False(onModifiedCalled);
|
|
||||||
Assert.Single(series.Metadata.People);
|
|
||||||
Assert.Equal("John Doe", series.Metadata.People.First().Name);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region RemovePeople
|
|
||||||
[Fact]
|
|
||||||
public void RemovePeople_ShouldRemovePeopleOfSameRole()
|
|
||||||
{
|
|
||||||
var existingPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
};
|
|
||||||
var peopleRemoved = new List<Person>();
|
|
||||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
|
||||||
{
|
|
||||||
peopleRemoved.Add(person);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
|
||||||
Assert.Single(peopleRemoved);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void RemovePeople_ShouldRemovePeopleFromBothRoles()
|
|
||||||
{
|
|
||||||
var existingPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
};
|
|
||||||
var peopleRemoved = new List<Person>();
|
|
||||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
|
|
||||||
{
|
|
||||||
peopleRemoved.Add(person);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
|
||||||
Assert.Single(peopleRemoved);
|
|
||||||
|
|
||||||
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo"}, PersonRole.CoverArtist, person =>
|
|
||||||
{
|
|
||||||
peopleRemoved.Add(person);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Empty(existingPeople);
|
|
||||||
Assert.Equal(2, peopleRemoved.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void RemovePeople_ShouldRemovePeopleOfSameRole_WhenNothingPassed()
|
|
||||||
{
|
|
||||||
var existingPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
};
|
|
||||||
var peopleRemoved = new List<Person>();
|
|
||||||
PersonHelper.RemovePeople(existingPeople, new List<string>(), PersonRole.Writer, person =>
|
|
||||||
{
|
|
||||||
peopleRemoved.Add(person);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.NotEqual(existingPeople, peopleRemoved);
|
|
||||||
Assert.Equal(2, peopleRemoved.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region KeepOnlySamePeopleBetweenLists
|
|
||||||
[Fact]
|
|
||||||
public void KeepOnlySamePeopleBetweenLists()
|
|
||||||
{
|
|
||||||
var existingPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
new PersonBuilder("Sally", PersonRole.Writer).Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var peopleFromChapters = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var peopleRemoved = new List<Person>();
|
|
||||||
PersonHelper.KeepOnlySamePeopleBetweenLists(existingPeople,
|
|
||||||
peopleFromChapters, person =>
|
|
||||||
{
|
|
||||||
peopleRemoved.Add(person);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(2, peopleRemoved.Count);
|
|
||||||
}
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region AddPeople
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonDoesNotExist()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
var metadataPeople = new List<Person>();
|
|
||||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Single(metadataPeople);
|
|
||||||
Assert.Contains(person, metadataPeople);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonAlreadyExists()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
var metadataPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("John Smith", PersonRole.Character)
|
|
||||||
.WithId(1)
|
|
||||||
.Build()
|
|
||||||
};
|
|
||||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
|
||||||
// Act
|
|
||||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Single(metadataPeople);
|
|
||||||
Assert.NotNull(metadataPeople.SingleOrDefault(p =>
|
|
||||||
p.Name.Equals(person.Name) && p.Role == person.Role && p.NormalizedName == person.NormalizedName));
|
|
||||||
Assert.Equal(1, metadataPeople.First().Id);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonNameIsNullOrEmpty()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
var metadataPeople = new List<Person>();
|
|
||||||
var person2 = new PersonBuilder(string.Empty, PersonRole.Character).Build();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person2);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Empty(metadataPeople);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsDifferentButRoleIsSame()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
var metadataPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("John Smith", PersonRole.Character).Build()
|
|
||||||
};
|
|
||||||
var person = new PersonBuilder("John Doe", PersonRole.Character).Build();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Equal(2, metadataPeople.Count);
|
|
||||||
Assert.Contains(person, metadataPeople);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsSameButRoleIsDifferent()
|
|
||||||
{
|
|
||||||
// Arrange
|
|
||||||
var metadataPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("John Doe", PersonRole.Writer).Build()
|
|
||||||
};
|
|
||||||
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
Assert.Equal(2, metadataPeople.Count);
|
|
||||||
Assert.Contains(person, metadataPeople);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddPeople_ShouldAddOnlyNonExistingPeople()
|
|
||||||
{
|
|
||||||
var existingPeople = new List<Person>
|
|
||||||
{
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
|
|
||||||
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
|
|
||||||
new PersonBuilder("Sally", PersonRole.Writer).Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build());
|
|
||||||
Assert.Equal(3, existingPeople.Count);
|
|
||||||
|
|
||||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.Writer).Build());
|
|
||||||
Assert.Equal(3, existingPeople.Count);
|
|
||||||
|
|
||||||
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo Two", PersonRole.CoverArtist).Build());
|
|
||||||
Assert.Equal(4, existingPeople.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
124
API.Tests/Helpers/RandfHelper.cs
Normal file
124
API.Tests/Helpers/RandfHelper.cs
Normal file
|
|
@ -0,0 +1,124 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Reflection;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
|
public class RandfHelper
|
||||||
|
{
|
||||||
|
private static readonly Random Random = new ();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Returns true if all simple fields are equal
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="obj1"></param>
|
||||||
|
/// <param name="obj2"></param>
|
||||||
|
/// <param name="ignoreFields">fields to ignore, note that the names are very weird sometimes</param>
|
||||||
|
/// <returns></returns>
|
||||||
|
/// <exception cref="ArgumentNullException"></exception>
|
||||||
|
/// <exception cref="ArgumentException"></exception>
|
||||||
|
public static bool AreSimpleFieldsEqual(object obj1, object obj2, IList<string> ignoreFields)
|
||||||
|
{
|
||||||
|
if (obj1 == null || obj2 == null)
|
||||||
|
throw new ArgumentNullException("Neither object can be null.");
|
||||||
|
|
||||||
|
Type type1 = obj1.GetType();
|
||||||
|
Type type2 = obj2.GetType();
|
||||||
|
|
||||||
|
if (type1 != type2)
|
||||||
|
throw new ArgumentException("Objects must be of the same type.");
|
||||||
|
|
||||||
|
FieldInfo[] fields = type1.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.NonPublic);
|
||||||
|
|
||||||
|
foreach (var field in fields)
|
||||||
|
{
|
||||||
|
if (field.IsInitOnly) continue;
|
||||||
|
if (ignoreFields.Contains(field.Name)) continue;
|
||||||
|
|
||||||
|
Type fieldType = field.FieldType;
|
||||||
|
|
||||||
|
if (IsRelevantType(fieldType))
|
||||||
|
{
|
||||||
|
object value1 = field.GetValue(obj1);
|
||||||
|
object value2 = field.GetValue(obj2);
|
||||||
|
|
||||||
|
if (!Equals(value1, value2))
|
||||||
|
{
|
||||||
|
throw new ArgumentException("Fields must be of the same type: " + field.Name + " was " + value1 + " and " + value2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsRelevantType(Type type)
|
||||||
|
{
|
||||||
|
return type.IsPrimitive
|
||||||
|
|| type == typeof(string)
|
||||||
|
|| type.IsEnum;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sets all simple fields of the given object to a random value
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="obj"></param>
|
||||||
|
/// <remarks>Simple is, primitive, string, or enum</remarks>
|
||||||
|
/// <exception cref="ArgumentNullException"></exception>
|
||||||
|
public static void SetRandomValues(object obj)
|
||||||
|
{
|
||||||
|
if (obj == null) throw new ArgumentNullException(nameof(obj));
|
||||||
|
|
||||||
|
Type type = obj.GetType();
|
||||||
|
FieldInfo[] fields = type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic);
|
||||||
|
|
||||||
|
foreach (var field in fields)
|
||||||
|
{
|
||||||
|
if (field.IsInitOnly) continue; // Skip readonly fields
|
||||||
|
|
||||||
|
object value = GenerateRandomValue(field.FieldType);
|
||||||
|
if (value != null)
|
||||||
|
{
|
||||||
|
field.SetValue(obj, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static object GenerateRandomValue(Type type)
|
||||||
|
{
|
||||||
|
if (type == typeof(int))
|
||||||
|
return Random.Next();
|
||||||
|
if (type == typeof(float))
|
||||||
|
return (float)Random.NextDouble() * 100;
|
||||||
|
if (type == typeof(double))
|
||||||
|
return Random.NextDouble() * 100;
|
||||||
|
if (type == typeof(bool))
|
||||||
|
return Random.Next(2) == 1;
|
||||||
|
if (type == typeof(char))
|
||||||
|
return (char)Random.Next('A', 'Z' + 1);
|
||||||
|
if (type == typeof(byte))
|
||||||
|
return (byte)Random.Next(0, 256);
|
||||||
|
if (type == typeof(short))
|
||||||
|
return (short)Random.Next(short.MinValue, short.MaxValue);
|
||||||
|
if (type == typeof(long))
|
||||||
|
return (long)(Random.NextDouble() * long.MaxValue);
|
||||||
|
if (type == typeof(string))
|
||||||
|
return GenerateRandomString(10);
|
||||||
|
if (type.IsEnum)
|
||||||
|
{
|
||||||
|
var values = Enum.GetValues(type);
|
||||||
|
return values.GetValue(Random.Next(values.Length));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unsupported type
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string GenerateRandomString(int length)
|
||||||
|
{
|
||||||
|
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||||
|
return new string(Enumerable.Repeat(chars, length)
|
||||||
|
.Select(s => s[Random.Next(s.Length)]).ToArray());
|
||||||
|
}
|
||||||
|
}
|
||||||
80
API.Tests/Helpers/RateLimiterTests.cs
Normal file
80
API.Tests/Helpers/RateLimiterTests.cs
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
using System;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Helpers;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
|
public class RateLimiterTests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void AcquireTokens_Successful()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var limiter = new RateLimiter(3, TimeSpan.FromSeconds(1));
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
Assert.True(limiter.TryAcquire("test_key"));
|
||||||
|
Assert.True(limiter.TryAcquire("test_key"));
|
||||||
|
Assert.True(limiter.TryAcquire("test_key"));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void AcquireTokens_ExceedLimit()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
limiter.TryAcquire("test_key");
|
||||||
|
limiter.TryAcquire("test_key");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.False(limiter.TryAcquire("test_key"));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task AcquireTokens_Refill()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
|
||||||
|
|
||||||
|
// Act
|
||||||
|
limiter.TryAcquire("test_key");
|
||||||
|
limiter.TryAcquire("test_key");
|
||||||
|
|
||||||
|
// Wait for refill
|
||||||
|
await Task.Delay(1100);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(limiter.TryAcquire("test_key"));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task AcquireTokens_Refill_WithOff()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
limiter.TryAcquire("test_key");
|
||||||
|
limiter.TryAcquire("test_key");
|
||||||
|
|
||||||
|
// Wait for refill
|
||||||
|
await Task.Delay(2100);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.False(limiter.TryAcquire("test_key"));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void AcquireTokens_MultipleKeys()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
Assert.True(limiter.TryAcquire("key1"));
|
||||||
|
Assert.True(limiter.TryAcquire("key2"));
|
||||||
|
}
|
||||||
|
}
|
||||||
258
API.Tests/Helpers/ReviewHelperTests.cs
Normal file
258
API.Tests/Helpers/ReviewHelperTests.cs
Normal file
|
|
@ -0,0 +1,258 @@
|
||||||
|
using API.Helpers;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using Xunit;
|
||||||
|
using API.DTOs.SeriesDetail;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
|
public class ReviewHelperTests
|
||||||
|
{
|
||||||
|
#region SelectSpectrumOfReviews Tests
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SelectSpectrumOfReviews_WhenLessThan10Reviews_ReturnsAllReviews()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var reviews = CreateReviewList(8);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(8, result.Count);
|
||||||
|
Assert.Equal(reviews, result.OrderByDescending(r => r.Score));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SelectSpectrumOfReviews_WhenMoreThan10Reviews_Returns10Reviews()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var reviews = CreateReviewList(20);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(10, result.Count);
|
||||||
|
Assert.Equal(reviews[0], result.First());
|
||||||
|
Assert.Equal(reviews[19], result.Last());
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SelectSpectrumOfReviews_WithExactly10Reviews_ReturnsAllReviews()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var reviews = CreateReviewList(10);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(10, result.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SelectSpectrumOfReviews_WithLargeNumberOfReviews_ReturnsCorrectSpectrum()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var reviews = CreateReviewList(100);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(10, result.Count);
|
||||||
|
Assert.Contains(reviews[0], result);
|
||||||
|
Assert.Contains(reviews[1], result);
|
||||||
|
Assert.Contains(reviews[98], result);
|
||||||
|
Assert.Contains(reviews[99], result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SelectSpectrumOfReviews_WithEmptyList_ReturnsEmptyList()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var reviews = new List<UserReviewDto>();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Empty(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SelectSpectrumOfReviews_ResultsOrderedByScoreDescending()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var reviews = new List<UserReviewDto>
|
||||||
|
{
|
||||||
|
new UserReviewDto { Tagline = "1", Score = 3 },
|
||||||
|
new UserReviewDto { Tagline = "2", Score = 5 },
|
||||||
|
new UserReviewDto { Tagline = "3", Score = 1 },
|
||||||
|
new UserReviewDto { Tagline = "4", Score = 4 },
|
||||||
|
new UserReviewDto { Tagline = "5", Score = 2 }
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.SelectSpectrumOfReviews(reviews).ToList();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(5, result.Count);
|
||||||
|
Assert.Equal(5, result[0].Score);
|
||||||
|
Assert.Equal(4, result[1].Score);
|
||||||
|
Assert.Equal(3, result[2].Score);
|
||||||
|
Assert.Equal(2, result[3].Score);
|
||||||
|
Assert.Equal(1, result[4].Score);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region GetCharacters Tests
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_WithNullBody_ReturnsNull()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
string body = null;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Null(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_WithEmptyBody_ReturnsEmptyString()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var body = string.Empty;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(string.Empty, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_WithNoTextNodes_ReturnsEmptyString()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
const string body = "<div></div>";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(string.Empty, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_WithLessCharactersThanLimit_ReturnsFullText()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var body = "<p>This is a short review.</p>";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal("This is a short review.…", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_WithMoreCharactersThanLimit_TruncatesText()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var body = "<p>" + new string('a', 200) + "</p>";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(new string('a', 175) + "…", result);
|
||||||
|
Assert.Equal(176, result.Length); // 175 characters + ellipsis
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_IgnoresScriptTags()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
const string body = "<p>Visible text</p><script>console.log('hidden');</script>";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal("Visible text…", result);
|
||||||
|
Assert.DoesNotContain("hidden", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_RemovesMarkdownSymbols()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
const string body = "<p>This is **bold** and _italic_ text with [link](url).</p>";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal("This is bold and italic text with link.…", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void GetCharacters_HandlesComplexMarkdownAndHtml()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
const string body = """
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<h1># Header</h1>
|
||||||
|
<p>This is ~~strikethrough~~ and __underlined__ text</p>
|
||||||
|
<p>~~~code block~~~</p>
|
||||||
|
<p>+++highlighted+++</p>
|
||||||
|
<p>img123(image.jpg)</p>
|
||||||
|
</div>
|
||||||
|
""";
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = ReviewHelper.GetCharacters(body);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.DoesNotContain("~~", result);
|
||||||
|
Assert.DoesNotContain("__", result);
|
||||||
|
Assert.DoesNotContain("~~~", result);
|
||||||
|
Assert.DoesNotContain("+++", result);
|
||||||
|
Assert.DoesNotContain("img123(", result);
|
||||||
|
Assert.Contains("Header", result);
|
||||||
|
Assert.Contains("strikethrough", result);
|
||||||
|
Assert.Contains("underlined", result);
|
||||||
|
Assert.Contains("code block", result);
|
||||||
|
Assert.Contains("highlighted", result);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region Helper Methods
|
||||||
|
|
||||||
|
private static List<UserReviewDto> CreateReviewList(int count)
|
||||||
|
{
|
||||||
|
var reviews = new List<UserReviewDto>();
|
||||||
|
for (var i = 0; i < count; i++)
|
||||||
|
{
|
||||||
|
reviews.Add(new UserReviewDto
|
||||||
|
{
|
||||||
|
Tagline = $"{i + 1}",
|
||||||
|
Score = count - i // This makes them ordered by score descending initially
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return reviews;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
|
||||||
208
API.Tests/Helpers/ScannerHelper.cs
Normal file
208
API.Tests/Helpers/ScannerHelper.cs
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.IO.Abstractions;
|
||||||
|
using System.IO.Compression;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Xml;
|
||||||
|
using System.Xml.Serialization;
|
||||||
|
using API.Data;
|
||||||
|
using API.Data.Metadata;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Helpers;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Plus;
|
||||||
|
using API.Services.Tasks;
|
||||||
|
using API.Services.Tasks.Metadata;
|
||||||
|
using API.Services.Tasks.Scanner;
|
||||||
|
using API.SignalR;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
#nullable enable
|
||||||
|
|
||||||
|
public class ScannerHelper
|
||||||
|
{
|
||||||
|
private readonly IUnitOfWork _unitOfWork;
|
||||||
|
private readonly ITestOutputHelper _testOutputHelper;
|
||||||
|
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
|
||||||
|
private readonly string _testcasesDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/TestCases");
|
||||||
|
private readonly string _imagePath = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/1x1.png");
|
||||||
|
private static readonly string[] ComicInfoExtensions = new[] { ".cbz", ".cbr", ".zip", ".rar" };
|
||||||
|
|
||||||
|
public ScannerHelper(IUnitOfWork unitOfWork, ITestOutputHelper testOutputHelper)
|
||||||
|
{
|
||||||
|
_unitOfWork = unitOfWork;
|
||||||
|
_testOutputHelper = testOutputHelper;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<Library> GenerateScannerData(string testcase, Dictionary<string, ComicInfo> comicInfos = null)
|
||||||
|
{
|
||||||
|
var testDirectoryPath = await GenerateTestDirectory(Path.Join(_testcasesDirectory, testcase), comicInfos);
|
||||||
|
|
||||||
|
var (publisher, type) = SplitPublisherAndLibraryType(Path.GetFileNameWithoutExtension(testcase));
|
||||||
|
|
||||||
|
var library = new LibraryBuilder(publisher, type)
|
||||||
|
.WithFolders([new FolderPath() {Path = testDirectoryPath}])
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var admin = new AppUserBuilder("admin", "admin@kavita.com", Seed.DefaultThemes[0])
|
||||||
|
.WithLibrary(library)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
_unitOfWork.UserRepository.Add(admin); // Admin is needed for generating collections/reading lists
|
||||||
|
_unitOfWork.LibraryRepository.Add(library);
|
||||||
|
await _unitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
return library;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ScannerService CreateServices(DirectoryService ds = null, IFileSystem fs = null)
|
||||||
|
{
|
||||||
|
fs ??= new FileSystem();
|
||||||
|
ds ??= new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
|
var archiveService = new ArchiveService(Substitute.For<ILogger<ArchiveService>>(), ds,
|
||||||
|
Substitute.For<IImageService>(), Substitute.For<IMediaErrorService>());
|
||||||
|
var readingItemService = new ReadingItemService(archiveService, Substitute.For<IBookService>(),
|
||||||
|
Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>());
|
||||||
|
|
||||||
|
|
||||||
|
var processSeries = new ProcessSeries(_unitOfWork, Substitute.For<ILogger<ProcessSeries>>(),
|
||||||
|
Substitute.For<IEventHub>(),
|
||||||
|
ds, Substitute.For<ICacheHelper>(), readingItemService, new FileService(fs),
|
||||||
|
Substitute.For<IMetadataService>(),
|
||||||
|
Substitute.For<IWordCountAnalyzerService>(),
|
||||||
|
Substitute.For<IReadingListService>(),
|
||||||
|
Substitute.For<IExternalMetadataService>());
|
||||||
|
|
||||||
|
var scanner = new ScannerService(_unitOfWork, Substitute.For<ILogger<ScannerService>>(),
|
||||||
|
Substitute.For<IMetadataService>(),
|
||||||
|
Substitute.For<ICacheService>(), Substitute.For<IEventHub>(), ds,
|
||||||
|
readingItemService, processSeries, Substitute.For<IWordCountAnalyzerService>());
|
||||||
|
return scanner;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static (string Publisher, LibraryType Type) SplitPublisherAndLibraryType(string input)
|
||||||
|
{
|
||||||
|
// Split the input string based on " - "
|
||||||
|
var parts = input.Split(" - ", StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
|
||||||
|
if (parts.Length != 2)
|
||||||
|
{
|
||||||
|
throw new ArgumentException("Input must be in the format 'Publisher - LibraryType'");
|
||||||
|
}
|
||||||
|
|
||||||
|
var publisher = parts[0].Trim();
|
||||||
|
var libraryTypeString = parts[1].Trim();
|
||||||
|
|
||||||
|
// Try to parse the right-hand side as a LibraryType enum
|
||||||
|
if (!Enum.TryParse<LibraryType>(libraryTypeString, out var libraryType))
|
||||||
|
{
|
||||||
|
throw new ArgumentException($"'{libraryTypeString}' is not a valid LibraryType");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (publisher, libraryType);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
private async Task<string> GenerateTestDirectory(string mapPath, Dictionary<string, ComicInfo> comicInfos = null)
|
||||||
|
{
|
||||||
|
// Read the map file
|
||||||
|
var mapContent = await File.ReadAllTextAsync(mapPath);
|
||||||
|
|
||||||
|
// Deserialize the JSON content into a list of strings using System.Text.Json
|
||||||
|
var filePaths = JsonSerializer.Deserialize<List<string>>(mapContent);
|
||||||
|
|
||||||
|
// Create a test directory
|
||||||
|
var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(mapPath));
|
||||||
|
if (Directory.Exists(testDirectory))
|
||||||
|
{
|
||||||
|
Directory.Delete(testDirectory, true);
|
||||||
|
}
|
||||||
|
Directory.CreateDirectory(testDirectory);
|
||||||
|
|
||||||
|
// Generate the files and folders
|
||||||
|
await Scaffold(testDirectory, filePaths, comicInfos);
|
||||||
|
|
||||||
|
_testOutputHelper.WriteLine($"Test Directory Path: {testDirectory}");
|
||||||
|
|
||||||
|
return Path.GetFullPath(testDirectory);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public async Task Scaffold(string testDirectory, List<string> filePaths, Dictionary<string, ComicInfo> comicInfos = null)
|
||||||
|
{
|
||||||
|
foreach (var relativePath in filePaths)
|
||||||
|
{
|
||||||
|
var fullPath = Path.Combine(testDirectory, relativePath);
|
||||||
|
var fileDir = Path.GetDirectoryName(fullPath);
|
||||||
|
|
||||||
|
// Create the directory if it doesn't exist
|
||||||
|
if (!Directory.Exists(fileDir))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(fileDir);
|
||||||
|
Console.WriteLine($"Created directory: {fileDir}");
|
||||||
|
}
|
||||||
|
|
||||||
|
var ext = Path.GetExtension(fullPath).ToLower();
|
||||||
|
if (ComicInfoExtensions.Contains(ext) && comicInfos != null && comicInfos.TryGetValue(Path.GetFileName(relativePath), out var info))
|
||||||
|
{
|
||||||
|
CreateMinimalCbz(fullPath, info);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Create an empty file
|
||||||
|
await File.Create(fullPath).DisposeAsync();
|
||||||
|
Console.WriteLine($"Created empty file: {fullPath}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void CreateMinimalCbz(string filePath, ComicInfo? comicInfo = null)
|
||||||
|
{
|
||||||
|
using (var archive = ZipFile.Open(filePath, ZipArchiveMode.Create))
|
||||||
|
{
|
||||||
|
// Add the 1x1 image to the archive
|
||||||
|
archive.CreateEntryFromFile(_imagePath, "1x1.png");
|
||||||
|
|
||||||
|
if (comicInfo != null)
|
||||||
|
{
|
||||||
|
// Serialize ComicInfo object to XML
|
||||||
|
var comicInfoXml = SerializeComicInfoToXml(comicInfo);
|
||||||
|
|
||||||
|
// Create an entry for ComicInfo.xml in the archive
|
||||||
|
var entry = archive.CreateEntry("ComicInfo.xml");
|
||||||
|
using var entryStream = entry.Open();
|
||||||
|
using var writer = new StreamWriter(entryStream, Encoding.UTF8);
|
||||||
|
|
||||||
|
// Write the XML to the archive
|
||||||
|
writer.Write(comicInfoXml);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
Console.WriteLine($"Created minimal CBZ archive: {filePath} with{(comicInfo != null ? "" : "out")} metadata.");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static string SerializeComicInfoToXml(ComicInfo comicInfo)
|
||||||
|
{
|
||||||
|
var xmlSerializer = new XmlSerializer(typeof(ComicInfo));
|
||||||
|
using var stringWriter = new StringWriter();
|
||||||
|
using (var xmlWriter = XmlWriter.Create(stringWriter, new XmlWriterSettings { Indent = true, Encoding = new UTF8Encoding(false), OmitXmlDeclaration = false}))
|
||||||
|
{
|
||||||
|
xmlSerializer.Serialize(xmlWriter, comicInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For the love of god, I spent 2 hours trying to get utf-8 with no BOM
|
||||||
|
return stringWriter.ToString().Replace("""<?xml version="1.0" encoding="utf-16"?>""",
|
||||||
|
@"<?xml version='1.0' encoding='utf-8'?>");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using API.Data;
|
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
|
|
|
||||||
|
|
@ -44,6 +44,17 @@ public class SmartFilterHelperTests
|
||||||
AssertStatementSame(list[0], FilterField.Genres, FilterComparison.Equal, "95");
|
AssertStatementSame(list[0], FilterField.Genres, FilterComparison.Equal, "95");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Test_Decode2()
|
||||||
|
{
|
||||||
|
const string encoded = """
|
||||||
|
name=Test%202&stmts=comparison%253D10%25C2%25A6field%253D1%25C2%25A6value%253DA%EF%BF%BDcomparison%253D0%25C2%25A6field%253D19%25C2%25A6value%253D11&sortOptions=sortField%3D1%C2%A6isAscending%3DTrue&limitTo=0&combination=1
|
||||||
|
""";
|
||||||
|
|
||||||
|
var filter = SmartFilterHelper.Decode(encoded);
|
||||||
|
Assert.True(filter.SortOptions.IsAscending);
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void Test_EncodeDecode()
|
public void Test_EncodeDecode()
|
||||||
{
|
{
|
||||||
|
|
|
||||||
46
API.Tests/Helpers/StringHelperTests.cs
Normal file
46
API.Tests/Helpers/StringHelperTests.cs
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
using API.Helpers;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Helpers;
|
||||||
|
|
||||||
|
public class StringHelperTests
|
||||||
|
{
|
||||||
|
[Theory]
|
||||||
|
[InlineData(
|
||||||
|
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> <br><br><br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>",
|
||||||
|
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>"
|
||||||
|
)]
|
||||||
|
[InlineData(
|
||||||
|
"<p><a href=\"https://blog.goo.ne.jp/tamakiya_web\">Blog</a> | <a href=\"https://twitter.com/tamakinozomu\">Twitter</a> | <a href=\"https://www.pixiv.net/member.php?id=68961\">Pixiv</a> | <a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>",
|
||||||
|
"<p><a href=\"https://blog.goo.ne.jp/tamakiya_web\">Blog</a> | <a href=\"https://twitter.com/tamakinozomu\">Twitter</a> | <a href=\"https://www.pixiv.net/member.php?id=68961\">Pixiv</a> | <a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>"
|
||||||
|
)]
|
||||||
|
public void TestSquashBreaklines(string input, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, StringHelper.SquashBreaklines(input));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(
|
||||||
|
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> (Source: Anime News Network)</p>",
|
||||||
|
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
|
||||||
|
)]
|
||||||
|
[InlineData(
|
||||||
|
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>(Source: Anime News Network)",
|
||||||
|
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
|
||||||
|
)]
|
||||||
|
public void TestRemoveSourceInDescription(string input, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(
|
||||||
|
"""<a href=\"https://pawoo.net/&#64;tamakiya\">Pawoo</a></p>""",
|
||||||
|
"""<a href=\"https://pawoo.net/@tamakiya\">Pawoo</a></p>"""
|
||||||
|
)]
|
||||||
|
public void TestCorrectUrls(string input, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, StringHelper.CorrectUrls(input));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,126 +0,0 @@
|
||||||
using System.Collections.Generic;
|
|
||||||
using API.Data;
|
|
||||||
using API.Entities;
|
|
||||||
using API.Helpers;
|
|
||||||
using API.Helpers.Builders;
|
|
||||||
using Xunit;
|
|
||||||
|
|
||||||
namespace API.Tests.Helpers;
|
|
||||||
|
|
||||||
public class TagHelperTests
|
|
||||||
{
|
|
||||||
[Fact]
|
|
||||||
public void UpdateTag_ShouldAddNewTag()
|
|
||||||
{
|
|
||||||
var allTags = new List<Tag>
|
|
||||||
{
|
|
||||||
new TagBuilder("Action").Build(),
|
|
||||||
new TagBuilder("action").Build(),
|
|
||||||
new TagBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
var tagAdded = new List<Tag>();
|
|
||||||
|
|
||||||
TagHelper.UpdateTag(allTags, new[] {"Action", "Adventure"}, (tag, added) =>
|
|
||||||
{
|
|
||||||
if (added)
|
|
||||||
{
|
|
||||||
tagAdded.Add(tag);
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Single(tagAdded);
|
|
||||||
Assert.Equal(4, allTags.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void UpdateTag_ShouldNotAddDuplicateTag()
|
|
||||||
{
|
|
||||||
var allTags = new List<Tag>
|
|
||||||
{
|
|
||||||
new TagBuilder("Action").Build(),
|
|
||||||
new TagBuilder("action").Build(),
|
|
||||||
new TagBuilder("Sci-fi").Build(),
|
|
||||||
|
|
||||||
};
|
|
||||||
var tagAdded = new List<Tag>();
|
|
||||||
|
|
||||||
TagHelper.UpdateTag(allTags, new[] {"Action", "Scifi"}, (tag, added) =>
|
|
||||||
{
|
|
||||||
if (added)
|
|
||||||
{
|
|
||||||
tagAdded.Add(tag);
|
|
||||||
}
|
|
||||||
TagHelper.AddTagIfNotExists(allTags, tag);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(3, allTags.Count);
|
|
||||||
Assert.Empty(tagAdded);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void AddTag_ShouldAddOnlyNonExistingTag()
|
|
||||||
{
|
|
||||||
var existingTags = new List<Tag>
|
|
||||||
{
|
|
||||||
new TagBuilder("Action").Build(),
|
|
||||||
new TagBuilder("action").Build(),
|
|
||||||
new TagBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Action").Build());
|
|
||||||
Assert.Equal(3, existingTags.Count);
|
|
||||||
|
|
||||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("action").Build());
|
|
||||||
Assert.Equal(3, existingTags.Count);
|
|
||||||
|
|
||||||
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Shonen").Build());
|
|
||||||
Assert.Equal(4, existingTags.Count);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void KeepOnlySamePeopleBetweenLists()
|
|
||||||
{
|
|
||||||
var existingTags = new List<Tag>
|
|
||||||
{
|
|
||||||
new TagBuilder("Action").Build(),
|
|
||||||
new TagBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var peopleFromChapters = new List<Tag>
|
|
||||||
{
|
|
||||||
new TagBuilder("Action").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var tagRemoved = new List<Tag>();
|
|
||||||
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
|
|
||||||
peopleFromChapters, tag =>
|
|
||||||
{
|
|
||||||
tagRemoved.Add(tag);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Single(tagRemoved);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void RemoveEveryoneIfNothingInRemoveAllExcept()
|
|
||||||
{
|
|
||||||
var existingTags = new List<Tag>
|
|
||||||
{
|
|
||||||
new TagBuilder("Action").Build(),
|
|
||||||
new TagBuilder("Sci-fi").Build(),
|
|
||||||
};
|
|
||||||
|
|
||||||
var peopleFromChapters = new List<Tag>();
|
|
||||||
|
|
||||||
var tagRemoved = new List<Tag>();
|
|
||||||
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
|
|
||||||
peopleFromChapters, tag =>
|
|
||||||
{
|
|
||||||
tagRemoved.Add(tag);
|
|
||||||
});
|
|
||||||
|
|
||||||
Assert.Equal(2, tagRemoved.Count);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
using Xunit;
|
|
||||||
|
|
||||||
namespace API.Tests.Parser;
|
|
||||||
|
|
||||||
public class BookParserTests
|
|
||||||
{
|
|
||||||
[Theory]
|
|
||||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
|
|
||||||
[InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
|
|
||||||
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
|
|
||||||
public void ParseSeriesTest(string filename, string expected)
|
|
||||||
{
|
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
[Theory]
|
|
||||||
[InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
|
|
||||||
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
|
|
||||||
public void ParseVolumeTest(string filename, string expected)
|
|
||||||
{
|
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
// [Theory]
|
|
||||||
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")]
|
|
||||||
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")]
|
|
||||||
// public void ReplaceFontSrcUrl(string input, string expected)
|
|
||||||
// {
|
|
||||||
// var apiBase = "TEST/";
|
|
||||||
// var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
|
|
||||||
// Assert.Equal(expected, actual);
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// [Theory]
|
|
||||||
// [InlineData("@import url('font.css');", "@import url('TEST/font.css');")]
|
|
||||||
// public void ReplaceImportSrcUrl(string input, string expected)
|
|
||||||
// {
|
|
||||||
// var apiBase = "TEST/";
|
|
||||||
// var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
|
|
||||||
// Assert.Equal(expected, actual);
|
|
||||||
// }
|
|
||||||
|
|
||||||
}
|
|
||||||
249
API.Tests/Parsers/BasicParserTests.cs
Normal file
249
API.Tests/Parsers/BasicParserTests.cs
Normal file
|
|
@ -0,0 +1,249 @@
|
||||||
|
using System.IO;
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsers;
|
||||||
|
|
||||||
|
public class BasicParserTests : AbstractFsTest
|
||||||
|
{
|
||||||
|
private readonly BasicParser _parser;
|
||||||
|
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||||
|
private readonly string _rootDirectory;
|
||||||
|
|
||||||
|
public BasicParserTests()
|
||||||
|
{
|
||||||
|
var fileSystem = CreateFileSystem();
|
||||||
|
_rootDirectory = Path.Join(DataDirectory, "Books/");
|
||||||
|
fileSystem.AddDirectory(_rootDirectory);
|
||||||
|
fileSystem.AddFile($"{_rootDirectory}Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
|
||||||
|
|
||||||
|
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1.cbz", new MockFileData(""));
|
||||||
|
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Volume 1 Chapter 2.cbz", new MockFileData(""));
|
||||||
|
fileSystem.AddFile($"{_rootDirectory}Accel World/Accel World - Chapter 3.cbz", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("$\"{RootDirectory}Accel World/Accel World Gaiden SP01.cbz", new MockFileData(""));
|
||||||
|
|
||||||
|
|
||||||
|
fileSystem.AddFile($"{_rootDirectory}Accel World/cover.png", new MockFileData(""));
|
||||||
|
|
||||||
|
fileSystem.AddFile($"{_rootDirectory}Batman/Batman #1.cbz", new MockFileData(""));
|
||||||
|
|
||||||
|
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||||
|
_parser = new BasicParser(ds, new ImageParser(ds));
|
||||||
|
}
|
||||||
|
|
||||||
|
#region Parse_Manga
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Accel World/cover.png", $"{_rootDirectory}Accel World/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.Null(actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a loose-leaf cover in the manga library, that it is ignored
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_OtherImage_ShouldReturnNull()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Accel World/page 01.png", $"{_rootDirectory}Accel World/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a volume and chapter in filename, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_VolumeAndChapterInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz", $"{_rootDirectory}Mujaki no Rakuen/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Mujaki no Rakuen", actual.Series);
|
||||||
|
Assert.Equal("12", actual.Volumes);
|
||||||
|
Assert.Equal("76", actual.Chapters);
|
||||||
|
Assert.False(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a volume in filename, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_JustVolumeInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz",
|
||||||
|
$"{_rootDirectory}Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", actual.Series);
|
||||||
|
Assert.Equal("1", actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.False(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a chapter only in filename, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_JustChapterInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Beelzebub/Beelzebub_01_[Noodles].zip",
|
||||||
|
$"{_rootDirectory}Beelzebub/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Beelzebub", actual.Series);
|
||||||
|
Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
|
||||||
|
Assert.Equal("1", actual.Chapters);
|
||||||
|
Assert.False(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a SP Marker in filename, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_SpecialMarkerInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr",
|
||||||
|
$"{_rootDirectory}Summer Time Rendering/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Summer Time Rendering", actual.Series);
|
||||||
|
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.True(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when the filename parses as a special, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_SpecialInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Summer Time Rendering/Volume SP01.cbr",
|
||||||
|
$"{_rootDirectory}Summer Time Rendering/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Summer Time Rendering", actual.Series);
|
||||||
|
Assert.Equal("Volume", actual.Title);
|
||||||
|
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.True(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when the filename parses as a special, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_SpecialInFilename2()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("M:/Kimi wa Midara na Boku no Joou/Specials/[Renzokusei] Special 1 SP02.zip",
|
||||||
|
"M:/Kimi wa Midara na Boku no Joou/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Kimi wa Midara na Boku no Joou", actual.Series);
|
||||||
|
Assert.Equal("[Renzokusei] Special 1", actual.Title);
|
||||||
|
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.True(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when the filename parses as a special, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_SpecialInFilename_StrangeNaming()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}My Dress-Up Darling/SP01 1. Special Name.cbz",
|
||||||
|
_rootDirectory,
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("My Dress-Up Darling", actual.Series);
|
||||||
|
Assert.Equal("1. Special Name", actual.Title);
|
||||||
|
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.True(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is an edition in filename, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaLibrary_EditionInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz",
|
||||||
|
$"{_rootDirectory}Air Gear/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Air Gear", actual.Series);
|
||||||
|
Assert.Equal("1", actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.False(actual.IsSpecial);
|
||||||
|
Assert.Equal("Omnibus", actual.Edition);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region Parse_Books
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when there is a volume in filename, it appropriately parses
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_MangaBooks_JustVolumeInFilename()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse($"{_rootDirectory}Epubs/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub",
|
||||||
|
$"{_rootDirectory}Epubs/",
|
||||||
|
_rootDirectory, LibraryType.Manga);
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
|
||||||
|
Assert.Equal("Harrison, Kim - The Good, The Bad, and the Undead - Hollows", actual.Series);
|
||||||
|
Assert.Equal("2.5", actual.Volumes);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region IsApplicable
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on images and Image library type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.ComicVine));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on images and Image library type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.True(_parser.IsApplicable("something.png", LibraryType.Manga));
|
||||||
|
Assert.True(_parser.IsApplicable("something.png", LibraryType.Comic));
|
||||||
|
Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
|
||||||
|
Assert.True(_parser.IsApplicable("something.epub", LibraryType.LightNovel));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
73
API.Tests/Parsers/BookParserTests.cs
Normal file
73
API.Tests/Parsers/BookParserTests.cs
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsers;
|
||||||
|
|
||||||
|
public class BookParserTests
|
||||||
|
{
|
||||||
|
private readonly BookParser _parser;
|
||||||
|
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||||
|
private const string RootDirectory = "C:/Books/";
|
||||||
|
|
||||||
|
public BookParserTests()
|
||||||
|
{
|
||||||
|
var fileSystem = new MockFileSystem();
|
||||||
|
fileSystem.AddDirectory("C:/Books/");
|
||||||
|
fileSystem.AddFile("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("C:/Books/Adam Freeman - Pro ASP.NET Core 6.epub", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("C:/Books/My Fav Book SP01.epub", new MockFileData(""));
|
||||||
|
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||||
|
_parser = new BookParser(ds, Substitute.For<IBookService>(), new BasicParser(ds, new ImageParser(ds)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#region Parse
|
||||||
|
|
||||||
|
// TODO: I'm not sure how to actually test this as it relies on an epub parser to actually do anything
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
|
||||||
|
/// </summary>
|
||||||
|
// [Fact]
|
||||||
|
// public void Parse_SeriesWithDirectoryName()
|
||||||
|
// {
|
||||||
|
// var actual = _parser.Parse("C:/Books/Harry Potter/Harry Potter - Vol 1.epub", "C:/Books/Birds of Prey/",
|
||||||
|
// RootDirectory, LibraryType.Book, new ComicInfo()
|
||||||
|
// {
|
||||||
|
// Series = "Harry Potter",
|
||||||
|
// Volume = "1"
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// Assert.NotNull(actual);
|
||||||
|
// Assert.Equal("Harry Potter", actual.Series);
|
||||||
|
// Assert.Equal("1", actual.Volumes);
|
||||||
|
// }
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region IsApplicable
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on images and Image library type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Book));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on images and Image library type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.True(_parser.IsApplicable("something.epub", LibraryType.Image));
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
115
API.Tests/Parsers/ComicVineParserTests.cs
Normal file
115
API.Tests/Parsers/ComicVineParserTests.cs
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Data.Metadata;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsers;
|
||||||
|
|
||||||
|
public class ComicVineParserTests
|
||||||
|
{
|
||||||
|
private readonly ComicVineParser _parser;
|
||||||
|
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||||
|
private const string RootDirectory = "C:/Comics/";
|
||||||
|
|
||||||
|
public ComicVineParserTests()
|
||||||
|
{
|
||||||
|
var fileSystem = new MockFileSystem();
|
||||||
|
fileSystem.AddDirectory("C:/Comics/");
|
||||||
|
fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
|
||||||
|
fileSystem.AddFile("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", new MockFileData(""));
|
||||||
|
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||||
|
_parser = new ComicVineParser(ds);
|
||||||
|
}
|
||||||
|
|
||||||
|
#region Parse
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that when Series and Volume are filled out, Kavita uses that for the Series Name
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_SeriesWithComicInfo()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
|
||||||
|
RootDirectory, LibraryType.ComicVine, true, new ComicInfo()
|
||||||
|
{
|
||||||
|
Series = "Birds of Prey",
|
||||||
|
Volume = "2002"
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Birds of Prey (2002)", actual.Series);
|
||||||
|
Assert.Equal("2002", actual.Volumes);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that no ComicInfo, take the Directory Name if it matches "Series (2002)" or "Series (2)"
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_SeriesWithDirectoryNameAsSeriesYear()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/Birds of Prey (2002)/Birds of Prey 001 (2002).cbz", "C:/Comics/Birds of Prey (2002)/",
|
||||||
|
RootDirectory, LibraryType.ComicVine, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Birds of Prey (2002)", actual.Series);
|
||||||
|
Assert.Equal("2002", actual.Volumes);
|
||||||
|
Assert.Equal("1", actual.Chapters);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that no ComicInfo, take a directory name up to root if it matches "Series (2002)" or "Series (2)"
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_SeriesWithADirectoryNameAsSeriesYear()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/DC Comics/Birds of Prey (1999)/Birds of Prey 001 (1999).cbz", "C:/Comics/DC Comics/",
|
||||||
|
RootDirectory, LibraryType.ComicVine, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Birds of Prey (1999)", actual.Series);
|
||||||
|
Assert.Equal("1999", actual.Volumes);
|
||||||
|
Assert.Equal("1", actual.Chapters);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that no ComicInfo and nothing matches Series (Volume), then just take the directory name as the Series
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_FallbackToDirectoryNameOnly()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/DC Comics/Blood Syndicate/Blood Syndicate 001 (1999).cbz", "C:/Comics/DC Comics/",
|
||||||
|
RootDirectory, LibraryType.ComicVine, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Blood Syndicate", actual.Series);
|
||||||
|
Assert.Equal(Parser.LooseLeafVolume, actual.Volumes);
|
||||||
|
Assert.Equal("1", actual.Chapters);
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region IsApplicable
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on ComicVine type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.False(_parser.IsApplicable("", LibraryType.Comic));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on ComicVine type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.True(_parser.IsApplicable("", LibraryType.ComicVine));
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
using System;
|
using System.Collections.Generic;
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using API.Services.Tasks.Scanner.Parser;
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
|
@ -10,7 +8,7 @@ using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using Xunit.Abstractions;
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
namespace API.Tests.Parser;
|
namespace API.Tests.Parsers;
|
||||||
|
|
||||||
public class DefaultParserTests
|
public class DefaultParserTests
|
||||||
{
|
{
|
||||||
|
|
@ -21,10 +19,12 @@ public class DefaultParserTests
|
||||||
{
|
{
|
||||||
_testOutputHelper = testOutputHelper;
|
_testOutputHelper = testOutputHelper;
|
||||||
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||||
_defaultParser = new DefaultParser(directoryService);
|
_defaultParser = new BasicParser(directoryService, new ImageParser(directoryService));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#region ParseFromFallbackFolders
|
#region ParseFromFallbackFolders
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
|
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
|
||||||
|
|
@ -33,7 +33,7 @@ public class DefaultParserTests
|
||||||
[InlineData("C:/", "C:/Something Random/Mujaki no Rakuen SP01.cbz", "Something Random")]
|
[InlineData("C:/", "C:/Something Random/Mujaki no Rakuen SP01.cbz", "Something Random")]
|
||||||
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
|
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
|
||||||
{
|
{
|
||||||
var actual = _defaultParser.Parse(inputPath, rootDir);
|
var actual = _defaultParser.Parse(inputPath, rootDir, rootDir, LibraryType.Manga, true, null);
|
||||||
if (actual == null)
|
if (actual == null)
|
||||||
{
|
{
|
||||||
Assert.NotNull(actual);
|
Assert.NotNull(actual);
|
||||||
|
|
@ -44,19 +44,18 @@ public class DefaultParserTests
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
|
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", new [] {"Btooom!", "1", "1"})]
|
||||||
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
|
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", new [] {"Btooom!", "1", "2"})]
|
||||||
[InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster~0~1")]
|
[InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", new [] {"Monster", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, "1"})]
|
||||||
[InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", "Hajime no Ippo~0~0")]
|
[InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", new [] {"Hajime no Ippo", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter})]
|
||||||
public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string expectedParseInfo)
|
public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string[] expectedParseInfo)
|
||||||
{
|
{
|
||||||
const string rootDirectory = "/manga/";
|
const string rootDirectory = "/manga/";
|
||||||
var tokens = expectedParseInfo.Split("~");
|
var actual = new ParserInfo {Series = "", Chapters = Parser.DefaultChapter, Volumes = Parser.LooseLeafVolume};
|
||||||
var actual = new ParserInfo {Series = "", Chapters = "0", Volumes = "0"};
|
|
||||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||||
Assert.Equal(tokens[0], actual.Series);
|
Assert.Equal(expectedParseInfo[0], actual.Series);
|
||||||
Assert.Equal(tokens[1], actual.Volumes);
|
Assert.Equal(expectedParseInfo[1], actual.Volumes);
|
||||||
Assert.Equal(tokens[2], actual.Chapters);
|
Assert.Equal(expectedParseInfo[2], actual.Chapters);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
|
|
@ -74,8 +73,8 @@ public class DefaultParserTests
|
||||||
fs.AddDirectory(rootDirectory);
|
fs.AddDirectory(rootDirectory);
|
||||||
fs.AddFile(inputFile, new MockFileData(""));
|
fs.AddFile(inputFile, new MockFileData(""));
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
var parser = new DefaultParser(ds);
|
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||||
var actual = parser.Parse(inputFile, rootDirectory);
|
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, true, null);
|
||||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||||
Assert.Equal(expectedParseInfo, actual.Series);
|
Assert.Equal(expectedParseInfo, actual.Series);
|
||||||
}
|
}
|
||||||
|
|
@ -90,8 +89,8 @@ public class DefaultParserTests
|
||||||
fs.AddDirectory(rootDirectory);
|
fs.AddDirectory(rootDirectory);
|
||||||
fs.AddFile(inputFile, new MockFileData(""));
|
fs.AddFile(inputFile, new MockFileData(""));
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
var parser = new DefaultParser(ds);
|
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||||
var actual = parser.Parse(inputFile, rootDirectory);
|
var actual = parser.Parse(inputFile, rootDirectory, rootDirectory, LibraryType.Manga, true, null);
|
||||||
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
|
||||||
Assert.Equal(expectedParseInfo, actual.Series);
|
Assert.Equal(expectedParseInfo, actual.Series);
|
||||||
}
|
}
|
||||||
|
|
@ -101,13 +100,6 @@ public class DefaultParserTests
|
||||||
|
|
||||||
#region Parse
|
#region Parse
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public void Parse_MangaLibrary_JustCover_ShouldReturnNull()
|
|
||||||
{
|
|
||||||
const string rootPath = @"E:/Manga/";
|
|
||||||
var actual = _defaultParser.Parse(@"E:/Manga/Accel World/cover.png", rootPath);
|
|
||||||
Assert.Null(actual);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public void Parse_ParseInfo_Manga()
|
public void Parse_ParseInfo_Manga()
|
||||||
|
|
@ -127,19 +119,20 @@ public class DefaultParserTests
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
|
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
|
||||||
Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
|
filepath = @"E:/Manga/Beelzebub/Beelzebub_01_[Noodles].zip";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Beelzebub", Volumes = "0",
|
Series = "Beelzebub", Volumes = Parser.LooseLeafVolume,
|
||||||
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
|
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
// Note: Lots of duplicates here. I think I can move them to the ParserTests itself
|
||||||
|
filepath = @"E:/Manga/Ichinensei ni Nacchattara/Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Ichinensei ni Nacchattara", Volumes = "1",
|
Series = "Ichinensei ni Nacchattara", Volumes = "1",
|
||||||
|
|
@ -147,71 +140,71 @@ public class DefaultParserTests
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
filepath = @"E:/Manga/Tenjo Tenge (Color)/Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
|
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
|
||||||
Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
|
filepath = @"E:/Manga/Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)/Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
|
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
|
||||||
Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
|
filepath = @"E:/Manga/Dorohedoro/Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Dorohedoro", Volumes = "1", Edition = "",
|
Series = "Dorohedoro", Volumes = "1", Edition = "",
|
||||||
Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
|
filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "APOSIMZ", Volumes = "0", Edition = "",
|
Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
|
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
|
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
|
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
|
filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
|
Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
|
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
|
filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Summer Time Rendering", Volumes = "0", Edition = "",
|
Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "",
|
||||||
Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath, IsSpecial = true
|
FullFilePath = filepath, IsSpecial = true
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
|
filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
|
Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
|
filepath = @"E:/Manga/Kono Subarashii Sekai ni Bakuen wo!/Vol. 00 Ch. 000.cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
|
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
|
||||||
|
|
@ -219,7 +212,7 @@ public class DefaultParserTests
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
|
filepath = @"E:/Manga/Toukyou Akazukin/Vol. 01 Ch. 001.cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
|
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
|
||||||
|
|
@ -228,37 +221,37 @@ public class DefaultParserTests
|
||||||
});
|
});
|
||||||
|
|
||||||
// If an image is cover exclusively, ignore it
|
// If an image is cover exclusively, ignore it
|
||||||
filepath = @"E:\Manga\Seraph of the End\cover.png";
|
filepath = @"E:/Manga/Seraph of the End/cover.png";
|
||||||
expected.Add(filepath, null);
|
expected.Add(filepath, null);
|
||||||
|
|
||||||
filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
|
filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "The Beginning After the End", Volumes = "0", Edition = "",
|
Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
|
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Air Gear\Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
|
filepath = @"E:/Manga/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
|
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
|
||||||
Chapters = "0", Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
filepath = @"E:/Manga/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
|
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
|
||||||
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
foreach (var file in expected.Keys)
|
foreach (var file in expected.Keys)
|
||||||
{
|
{
|
||||||
var expectedInfo = expected[file];
|
var expectedInfo = expected[file];
|
||||||
var actual = _defaultParser.Parse(file, rootPath);
|
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Manga, true, null);
|
||||||
if (expectedInfo == null)
|
if (expectedInfo == null)
|
||||||
{
|
{
|
||||||
Assert.Null(actual);
|
Assert.Null(actual);
|
||||||
|
|
@ -283,20 +276,20 @@ public class DefaultParserTests
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
//[Fact]
|
||||||
public void Parse_ParseInfo_Manga_ImageOnly()
|
public void Parse_ParseInfo_Manga_ImageOnly()
|
||||||
{
|
{
|
||||||
// Images don't have root path as E:\Manga, but rather as the path of the folder
|
// Images don't have root path as E:/Manga, but rather as the path of the folder
|
||||||
|
|
||||||
// Note: Fallback to folder will parse Monster #8 and get Monster
|
// Note: Fallback to folder will parse Monster #8 and get Monster
|
||||||
var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
|
var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg";
|
||||||
var expectedInfo2 = new ParserInfo
|
var expectedInfo2 = new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Monster #8", Volumes = "0", Edition = "",
|
Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
};
|
};
|
||||||
var actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Monster #8");
|
var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, true, null);
|
||||||
Assert.NotNull(actual2);
|
Assert.NotNull(actual2);
|
||||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||||
|
|
@ -314,7 +307,7 @@ public class DefaultParserTests
|
||||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||||
|
|
||||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
|
filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Vol19/ch. 186/Vol. 19 p106.gif";
|
||||||
expectedInfo2 = new ParserInfo
|
expectedInfo2 = new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||||
|
|
@ -322,7 +315,7 @@ public class DefaultParserTests
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
};
|
};
|
||||||
|
|
||||||
actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\");
|
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, true, null);
|
||||||
Assert.NotNull(actual2);
|
Assert.NotNull(actual2);
|
||||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||||
|
|
@ -340,7 +333,7 @@ public class DefaultParserTests
|
||||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||||
|
|
||||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
|
filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Blank Folder/Vol19/ch. 186/Vol. 19 p106.gif";
|
||||||
expectedInfo2 = new ParserInfo
|
expectedInfo2 = new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||||
|
|
@ -348,7 +341,7 @@ public class DefaultParserTests
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
};
|
};
|
||||||
|
|
||||||
actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\");
|
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, true, null);
|
||||||
Assert.NotNull(actual2);
|
Assert.NotNull(actual2);
|
||||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||||
|
|
@ -379,7 +372,7 @@ public class DefaultParserTests
|
||||||
filesystem.AddFile(@"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz", new MockFileData(""));
|
filesystem.AddFile(@"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz", new MockFileData(""));
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var parser = new DefaultParser(ds);
|
var parser = new BasicParser(ds, new ImageParser(ds));
|
||||||
|
|
||||||
var filepath = @"E:/Manga/Foo 50/Foo 50 v1.cbz";
|
var filepath = @"E:/Manga/Foo 50/Foo 50 v1.cbz";
|
||||||
// There is a bad parse for series like "Foo 50", so we have parsed chapter as 50
|
// There is a bad parse for series like "Foo 50", so we have parsed chapter as 50
|
||||||
|
|
@ -390,7 +383,7 @@ public class DefaultParserTests
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
};
|
};
|
||||||
|
|
||||||
var actual = parser.Parse(filepath, rootPath);
|
var actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, true, null);
|
||||||
|
|
||||||
Assert.NotNull(actual);
|
Assert.NotNull(actual);
|
||||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
|
|
@ -414,12 +407,12 @@ public class DefaultParserTests
|
||||||
filepath = @"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz";
|
filepath = @"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz";
|
||||||
expected = new ParserInfo
|
expected = new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Foo 50", Volumes = "0", IsSpecial = true,
|
Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
|
||||||
Chapters = "50", Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
|
Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
};
|
};
|
||||||
|
|
||||||
actual = parser.Parse(filepath, rootPath);
|
actual = parser.Parse(filepath, rootPath, rootPath, LibraryType.Manga, true, null);
|
||||||
Assert.NotNull(actual);
|
Assert.NotNull(actual);
|
||||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
Assert.Equal(expected.Format, actual.Format);
|
Assert.Equal(expected.Format, actual.Format);
|
||||||
|
|
@ -444,26 +437,26 @@ public class DefaultParserTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void Parse_ParseInfo_Comic()
|
public void Parse_ParseInfo_Comic()
|
||||||
{
|
{
|
||||||
const string rootPath = @"E:/Comics/";
|
const string rootPath = "E:/Comics/";
|
||||||
var expected = new Dictionary<string, ParserInfo>();
|
var expected = new Dictionary<string, ParserInfo>();
|
||||||
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
|
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Teen Titans", Volumes = "0",
|
Series = "Teen Titans", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume,
|
||||||
Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
|
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath
|
FullFilePath = filepath
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fallback test with bad naming
|
// Fallback test with bad naming
|
||||||
filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
|
filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Babe", Volumes = "0", Edition = "",
|
Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
|
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
|
filepath = @"E:/Comics/Comics/Publisher/Batman the Detective (2021)/Batman the Detective - v6 - 11 - (2021).cbr";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Batman the Detective", Volumes = "6", Edition = "",
|
Series = "Batman the Detective", Volumes = "6", Edition = "",
|
||||||
|
|
@ -471,10 +464,10 @@ public class DefaultParserTests
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
||||||
filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
|
filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr";
|
||||||
expected.Add(filepath, new ParserInfo
|
expected.Add(filepath, new ParserInfo
|
||||||
{
|
{
|
||||||
Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "",
|
Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
|
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
|
||||||
FullFilePath = filepath, IsSpecial = false
|
FullFilePath = filepath, IsSpecial = false
|
||||||
});
|
});
|
||||||
|
|
@ -482,7 +475,7 @@ public class DefaultParserTests
|
||||||
foreach (var file in expected.Keys)
|
foreach (var file in expected.Keys)
|
||||||
{
|
{
|
||||||
var expectedInfo = expected[file];
|
var expectedInfo = expected[file];
|
||||||
var actual = _defaultParser.Parse(file, rootPath, LibraryType.Comic);
|
var actual = _defaultParser.Parse(file, rootPath, rootPath, LibraryType.Comic, true, null);
|
||||||
if (expectedInfo == null)
|
if (expectedInfo == null)
|
||||||
{
|
{
|
||||||
Assert.Null(actual);
|
Assert.Null(actual);
|
||||||
97
API.Tests/Parsers/ImageParserTests.cs
Normal file
97
API.Tests/Parsers/ImageParserTests.cs
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsers;
|
||||||
|
|
||||||
|
public class ImageParserTests
|
||||||
|
{
|
||||||
|
private readonly ImageParser _parser;
|
||||||
|
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||||
|
private const string RootDirectory = "C:/Comics/";
|
||||||
|
|
||||||
|
public ImageParserTests()
|
||||||
|
{
|
||||||
|
var fileSystem = new MockFileSystem();
|
||||||
|
fileSystem.AddDirectory("C:/Comics/");
|
||||||
|
fileSystem.AddDirectory("C:/Comics/Birds of Prey (2002)");
|
||||||
|
fileSystem.AddFile("C:/Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
|
||||||
|
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||||
|
_parser = new ImageParser(ds);
|
||||||
|
}
|
||||||
|
|
||||||
|
#region Parse
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_SeriesWithDirectoryName()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01/01.jpg", "C:/Comics/Birds of Prey/",
|
||||||
|
RootDirectory, LibraryType.Image, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Birds of Prey", actual.Series);
|
||||||
|
Assert.Equal("1", actual.Chapters);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_SeriesWithNoNestedChapter()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/Birds of Prey/Chapter 01 page 01.jpg", "C:/Comics/",
|
||||||
|
RootDirectory, LibraryType.Image, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Birds of Prey", actual.Series);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that if there is a Series Folder only, the code appropriately identifies the Series name from folder and everything else as a
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_SeriesWithLooseImages()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Comics/Birds of Prey/page 01.jpg", "C:/Comics/",
|
||||||
|
RootDirectory, LibraryType.Image, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("Birds of Prey", actual.Series);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.True(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region IsApplicable
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on images and Image library type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
|
||||||
|
Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on images and Image library type
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.True(_parser.IsApplicable("something.png", LibraryType.Image));
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
71
API.Tests/Parsers/PdfParserTests.cs
Normal file
71
API.Tests/Parsers/PdfParserTests.cs
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsers;
|
||||||
|
|
||||||
|
public class PdfParserTests
|
||||||
|
{
|
||||||
|
private readonly PdfParser _parser;
|
||||||
|
private readonly ILogger<DirectoryService> _dsLogger = Substitute.For<ILogger<DirectoryService>>();
|
||||||
|
private const string RootDirectory = "C:/Books/";
|
||||||
|
|
||||||
|
public PdfParserTests()
|
||||||
|
{
|
||||||
|
var fileSystem = new MockFileSystem();
|
||||||
|
fileSystem.AddDirectory("C:/Books/");
|
||||||
|
fileSystem.AddDirectory("C:/Books/Birds of Prey (2002)");
|
||||||
|
fileSystem.AddFile("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf", new MockFileData(""));
|
||||||
|
fileSystem.AddFile("C:/Comics/DC Comics/Birds of Prey/Chapter 01/01.jpg", new MockFileData(""));
|
||||||
|
var ds = new DirectoryService(_dsLogger, fileSystem);
|
||||||
|
_parser = new PdfParser(ds);
|
||||||
|
}
|
||||||
|
|
||||||
|
#region Parse
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that if there is a Series Folder then Chapter folder, the code appropriately identifies the Series name and Chapter
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void Parse_Book_SeriesWithDirectoryName()
|
||||||
|
{
|
||||||
|
var actual = _parser.Parse("C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/A Dictionary of Japanese Food - Ingredients and Culture.pdf",
|
||||||
|
"C:/Books/A Dictionary of Japanese Food - Ingredients and Culture/",
|
||||||
|
RootDirectory, LibraryType.Book, true, null);
|
||||||
|
|
||||||
|
Assert.NotNull(actual);
|
||||||
|
Assert.Equal("A Dictionary of Japanese Food - Ingredients and Culture", actual.Series);
|
||||||
|
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
|
||||||
|
Assert.True(actual.IsSpecial);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region IsApplicable
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on pdfs
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Fails_WhenNonMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Manga));
|
||||||
|
Assert.False(_parser.IsApplicable("something.cbz", LibraryType.Image));
|
||||||
|
Assert.False(_parser.IsApplicable("something.epub", LibraryType.Image));
|
||||||
|
Assert.False(_parser.IsApplicable("something.png", LibraryType.Book));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Tests that this Parser can only be used on pdfs
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void IsApplicable_Success_WhenMatchingLibraryType()
|
||||||
|
{
|
||||||
|
Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Book));
|
||||||
|
Assert.True(_parser.IsApplicable("something.pdf", LibraryType.Manga));
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
24
API.Tests/Parsing/BookParsingTests.cs
Normal file
24
API.Tests/Parsing/BookParsingTests.cs
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsing;
|
||||||
|
|
||||||
|
public class BookParsingTests
|
||||||
|
{
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")]
|
||||||
|
[InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")]
|
||||||
|
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")]
|
||||||
|
public void ParseSeriesTest(string filename, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Book));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
|
||||||
|
[InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")]
|
||||||
|
public void ParseVolumeTest(string filename, string expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Book));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,26 +1,11 @@
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using API.Entities.Enums;
|
||||||
using API.Services;
|
|
||||||
using API.Services.Tasks.Scanner.Parser;
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
using Microsoft.Extensions.Logging;
|
|
||||||
using NSubstitute;
|
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using Xunit.Abstractions;
|
|
||||||
|
|
||||||
namespace API.Tests.Parser;
|
namespace API.Tests.Parsing;
|
||||||
|
|
||||||
public class ComicParserTests
|
public class ComicParsingTests
|
||||||
{
|
{
|
||||||
private readonly ITestOutputHelper _testOutputHelper;
|
|
||||||
private readonly DefaultParser _defaultParser;
|
|
||||||
|
|
||||||
public ComicParserTests(ITestOutputHelper testOutputHelper)
|
|
||||||
{
|
|
||||||
_testOutputHelper = testOutputHelper;
|
|
||||||
_defaultParser =
|
|
||||||
new DefaultParser(new DirectoryService(Substitute.For<ILogger<DirectoryService>>(),
|
|
||||||
new MockFileSystem()));
|
|
||||||
}
|
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")]
|
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")]
|
||||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")]
|
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")]
|
||||||
|
|
@ -66,56 +51,58 @@ public class ComicParserTests
|
||||||
[InlineData("Demon 012 (Sep 1973) c2c", "Demon")]
|
[InlineData("Demon 012 (Sep 1973) c2c", "Demon")]
|
||||||
[InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")]
|
[InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")]
|
||||||
[InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")]
|
[InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")]
|
||||||
[InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")]
|
[InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire Special - Adam Strange")]
|
||||||
[InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")]
|
[InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis Extra - Rags Morales Sketches")]
|
||||||
[InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")]
|
[InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")]
|
||||||
[InlineData("Batgirl T2000 #57", "Batgirl")]
|
[InlineData("Batgirl T2000 #57", "Batgirl")]
|
||||||
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
|
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
|
||||||
[InlineData("Conquistador_-Tome_2", "Conquistador")]
|
[InlineData("Conquistador_-Tome_2", "Conquistador")]
|
||||||
[InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")]
|
[InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")]
|
||||||
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
|
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
|
||||||
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")]
|
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Bd Fr-Aldebaran-Antares")]
|
||||||
[InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")]
|
[InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")]
|
||||||
[InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")]
|
[InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")]
|
||||||
[InlineData("Kebab Том 1 Глава 1", "Kebab")]
|
[InlineData("Kebab Том 1 Глава 1", "Kebab")]
|
||||||
[InlineData("Манга Глава 1", "Манга")]
|
[InlineData("Манга Глава 1", "Манга")]
|
||||||
|
[InlineData("ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก เล่ม 1", "ReZero รีเซทชีวิต ฝ่าวิกฤตต่างโลก")]
|
||||||
|
[InlineData("SKY WORLD สกายเวิลด์ เล่มที่ 1", "SKY WORLD สกายเวิลด์")]
|
||||||
public void ParseComicSeriesTest(string filename, string expected)
|
public void ParseComicSeriesTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename));
|
Assert.Equal(expected, Parser.ParseComicSeries(filename));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", "0")]
|
[InlineData("01 Spider-Man & Wolverine 01.cbr", Parser.LooseLeafVolume)]
|
||||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
|
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
|
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", "0")]
|
[InlineData("Batman & Catwoman - Trail of the Gun 01", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman & Daredevil - King of New York", "0")]
|
[InlineData("Batman & Daredevil - King of New York", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")]
|
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
|
[InlineData("Batman & Robin the Teen Wonder #0", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman & Wildcat (1 of 3)", "0")]
|
[InlineData("Batman & Wildcat (1 of 3)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman And Superman World's Finest #01", "0")]
|
[InlineData("Batman And Superman World's Finest #01", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Babe 01", "0")]
|
[InlineData("Babe 01", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")]
|
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
|
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
|
||||||
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")]
|
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Superman v1 024 (09-10 1943)", "1")]
|
[InlineData("Superman v1 024 (09-10 1943)", "1")]
|
||||||
[InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
|
[InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
|
||||||
[InlineData("Amazing Man Comics chapter 25", "0")]
|
[InlineData("Amazing Man Comics chapter 25", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")]
|
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
|
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", Parser.LooseLeafVolume)]
|
||||||
[InlineData("spawn-123", "0")]
|
[InlineData("spawn-123", Parser.LooseLeafVolume)]
|
||||||
[InlineData("spawn-chapter-123", "0")]
|
[InlineData("spawn-chapter-123", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")]
|
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman Beyond 04 (of 6) (1999)", "0")]
|
[InlineData("Batman Beyond 04 (of 6) (1999)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman Beyond 001 (2012)", "0")]
|
[InlineData("Batman Beyond 001 (2012)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman Beyond 2.0 001 (2013)", "0")]
|
[InlineData("Batman Beyond 2.0 001 (2013)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")]
|
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
|
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
|
||||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
|
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
|
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
|
||||||
[InlineData("Batgirl V2000 #57", "2000")]
|
[InlineData("Batgirl V2000 #57", "2000")]
|
||||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")]
|
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", Parser.LooseLeafVolume)]
|
||||||
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")]
|
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
|
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
|
||||||
[InlineData("Daredevil - v6.5", "6.5")]
|
[InlineData("Daredevil - v6.5", "6.5")]
|
||||||
// Tome Tests
|
// Tome Tests
|
||||||
|
|
@ -125,22 +112,25 @@ public class ComicParserTests
|
||||||
[InlineData("Conquistador_Tome_2", "2")]
|
[InlineData("Conquistador_Tome_2", "2")]
|
||||||
[InlineData("Max_l_explorateur-_Tome_0", "0")]
|
[InlineData("Max_l_explorateur-_Tome_0", "0")]
|
||||||
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
|
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
|
||||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "0")]
|
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", Parser.LooseLeafVolume)]
|
||||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
|
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
|
||||||
// Russian Tests
|
// Russian Tests
|
||||||
[InlineData("Kebab Том 1 Глава 3", "1")]
|
[InlineData("Kebab Том 1 Глава 3", "1")]
|
||||||
[InlineData("Манга Глава 2", "0")]
|
[InlineData("Манга Глава 2", Parser.LooseLeafVolume)]
|
||||||
|
[InlineData("ย้อนเวลากลับมาร้าย เล่ม 1", "1")]
|
||||||
|
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "1")]
|
||||||
|
[InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", Parser.LooseLeafVolume)]
|
||||||
public void ParseComicVolumeTest(string filename, string expected)
|
public void ParseComicVolumeTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename));
|
Assert.Equal(expected, Parser.ParseComicVolume(filename));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
|
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
|
||||||
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")]
|
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.DefaultChapter)]
|
||||||
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")]
|
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.DefaultChapter)]
|
||||||
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
|
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
|
||||||
[InlineData("Batman & Daredevil - King of New York", "0")]
|
[InlineData("Batman & Daredevil - King of New York", Parser.DefaultChapter)]
|
||||||
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
|
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
|
||||||
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
|
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
|
||||||
[InlineData("Batman & Wildcat (1 of 3)", "1")]
|
[InlineData("Batman & Wildcat (1 of 3)", "1")]
|
||||||
|
|
@ -164,8 +154,8 @@ public class ComicParserTests
|
||||||
[InlineData("Batman Beyond 001 (2012)", "1")]
|
[InlineData("Batman Beyond 001 (2012)", "1")]
|
||||||
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
|
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
|
||||||
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
|
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
|
||||||
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")]
|
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", Parser.DefaultChapter)]
|
||||||
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
|
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.DefaultChapter)]
|
||||||
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
|
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
|
||||||
[InlineData("Batgirl V2000 #57", "57")]
|
[InlineData("Batgirl V2000 #57", "57")]
|
||||||
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
|
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
|
||||||
|
|
@ -174,43 +164,47 @@ public class ComicParserTests
|
||||||
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
|
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
|
||||||
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
|
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
|
||||||
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
|
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
|
||||||
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "0")]
|
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", Parser.DefaultChapter)]
|
||||||
[InlineData("Kebab Том 1 Глава 3", "3")]
|
[InlineData("Kebab Том 1 Глава 3", "3")]
|
||||||
[InlineData("Манга Глава 2", "2")]
|
[InlineData("Манга Глава 2", "2")]
|
||||||
[InlineData("Манга 2 Глава", "2")]
|
[InlineData("Манга 2 Глава", "2")]
|
||||||
[InlineData("Манга Том 1 2 Глава", "2")]
|
[InlineData("Манга Том 1 2 Глава", "2")]
|
||||||
|
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
|
||||||
|
[InlineData("Max Level Returner ตอนที่ 5", "5")]
|
||||||
|
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
|
||||||
public void ParseComicChapterTest(string filename, string expected)
|
public void ParseComicChapterTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(filename));
|
Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Comic));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
|
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", false)]
|
||||||
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
|
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", false)]
|
||||||
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
|
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
|
||||||
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
|
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", false)]
|
||||||
[InlineData("Boule et Bill - THS -Bill à disparu", true)]
|
[InlineData("Boule et Bill - THS -Bill à disparu", false)]
|
||||||
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)]
|
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", false)]
|
||||||
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)]
|
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", false)]
|
||||||
[InlineData("laughs", false)]
|
[InlineData("laughs", false)]
|
||||||
[InlineData("Annual Days of Summer", true)]
|
[InlineData("Annual Days of Summer", false)]
|
||||||
[InlineData("Adventure Time 2013 Annual #001 (2013)", true)]
|
[InlineData("Adventure Time 2013 Annual #001 (2013)", false)]
|
||||||
[InlineData("Adventure Time 2013_Annual_#001 (2013)", true)]
|
[InlineData("Adventure Time 2013_Annual_#001 (2013)", false)]
|
||||||
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)]
|
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", false)]
|
||||||
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
|
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
|
||||||
[InlineData("Mazebook 001", false)]
|
[InlineData("Mazebook 001", false)]
|
||||||
[InlineData("X-23 One Shot (2010)", true)]
|
[InlineData("X-23 One Shot (2010)", false)]
|
||||||
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)]
|
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", false)]
|
||||||
[InlineData("Batman Beyond Annual", true)]
|
[InlineData("Batman Beyond Annual", false)]
|
||||||
[InlineData("Batman Beyond Bonus", true)]
|
[InlineData("Batman Beyond Bonus", false)]
|
||||||
[InlineData("Batman Beyond OneShot", true)]
|
[InlineData("Batman Beyond OneShot", false)]
|
||||||
[InlineData("Batman Beyond Specials", true)]
|
[InlineData("Batman Beyond Specials", false)]
|
||||||
[InlineData("Batman Beyond Omnibus (1999)", true)]
|
[InlineData("Batman Beyond Omnibus (1999)", false)]
|
||||||
[InlineData("Batman Beyond Omnibus", true)]
|
[InlineData("Batman Beyond Omnibus", false)]
|
||||||
[InlineData("01 Annual Batman Beyond", true)]
|
[InlineData("01 Annual Batman Beyond", false)]
|
||||||
|
[InlineData("Blood Syndicate Annual #001", false)]
|
||||||
public void IsComicSpecialTest(string input, bool expected)
|
public void IsComicSpecialTest(string input, bool expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsComicSpecial(input));
|
Assert.Equal(expected, Parser.IsSpecial(input, LibraryType.Comic));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
107
API.Tests/Parsing/ImageParsingTests.cs
Normal file
107
API.Tests/Parsing/ImageParsingTests.cs
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
|
namespace API.Tests.Parsing;
|
||||||
|
|
||||||
|
public class ImageParsingTests
|
||||||
|
{
|
||||||
|
private readonly ITestOutputHelper _testOutputHelper;
|
||||||
|
private readonly ImageParser _parser;
|
||||||
|
|
||||||
|
public ImageParsingTests(ITestOutputHelper testOutputHelper)
|
||||||
|
{
|
||||||
|
_testOutputHelper = testOutputHelper;
|
||||||
|
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||||
|
_parser = new ImageParser(directoryService);
|
||||||
|
}
|
||||||
|
|
||||||
|
//[Fact]
|
||||||
|
public void Parse_ParseInfo_Manga_ImageOnly()
|
||||||
|
{
|
||||||
|
// Images don't have root path as E:\Manga, but rather as the path of the folder
|
||||||
|
|
||||||
|
// Note: Fallback to folder will parse Monster #8 and get Monster
|
||||||
|
var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
|
||||||
|
var expectedInfo2 = new ParserInfo
|
||||||
|
{
|
||||||
|
Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||||
|
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||||
|
FullFilePath = filepath, IsSpecial = false
|
||||||
|
};
|
||||||
|
var actual2 = _parser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Image, true, null);
|
||||||
|
Assert.NotNull(actual2);
|
||||||
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
|
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||||
|
_testOutputHelper.WriteLine("Format ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Series, actual2.Series);
|
||||||
|
_testOutputHelper.WriteLine("Series ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
|
||||||
|
_testOutputHelper.WriteLine("Chapters ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
|
||||||
|
_testOutputHelper.WriteLine("Volumes ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Edition, actual2.Edition);
|
||||||
|
_testOutputHelper.WriteLine("Edition ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Filename, actual2.Filename);
|
||||||
|
_testOutputHelper.WriteLine("Filename ✓");
|
||||||
|
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||||
|
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||||
|
|
||||||
|
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||||
|
expectedInfo2 = new ParserInfo
|
||||||
|
{
|
||||||
|
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||||
|
Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
|
||||||
|
FullFilePath = filepath, IsSpecial = false
|
||||||
|
};
|
||||||
|
|
||||||
|
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, true, null);
|
||||||
|
Assert.NotNull(actual2);
|
||||||
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
|
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||||
|
_testOutputHelper.WriteLine("Format ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Series, actual2.Series);
|
||||||
|
_testOutputHelper.WriteLine("Series ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
|
||||||
|
_testOutputHelper.WriteLine("Chapters ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
|
||||||
|
_testOutputHelper.WriteLine("Volumes ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Edition, actual2.Edition);
|
||||||
|
_testOutputHelper.WriteLine("Edition ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Filename, actual2.Filename);
|
||||||
|
_testOutputHelper.WriteLine("Filename ✓");
|
||||||
|
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||||
|
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||||
|
|
||||||
|
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||||
|
expectedInfo2 = new ParserInfo
|
||||||
|
{
|
||||||
|
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||||
|
Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
|
||||||
|
FullFilePath = filepath, IsSpecial = false
|
||||||
|
};
|
||||||
|
|
||||||
|
actual2 = _parser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Image, true, null);
|
||||||
|
Assert.NotNull(actual2);
|
||||||
|
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||||
|
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||||
|
_testOutputHelper.WriteLine("Format ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Series, actual2.Series);
|
||||||
|
_testOutputHelper.WriteLine("Series ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Chapters, actual2.Chapters);
|
||||||
|
_testOutputHelper.WriteLine("Chapters ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Volumes, actual2.Volumes);
|
||||||
|
_testOutputHelper.WriteLine("Volumes ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Edition, actual2.Edition);
|
||||||
|
_testOutputHelper.WriteLine("Edition ✓");
|
||||||
|
Assert.Equal(expectedInfo2.Filename, actual2.Filename);
|
||||||
|
_testOutputHelper.WriteLine("Filename ✓");
|
||||||
|
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||||
|
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,18 +1,10 @@
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using Xunit.Abstractions;
|
|
||||||
|
|
||||||
namespace API.Tests.Parser;
|
namespace API.Tests.Parsing;
|
||||||
|
|
||||||
public class MangaParserTests
|
public class MangaParsingTests
|
||||||
{
|
{
|
||||||
private readonly ITestOutputHelper _testOutputHelper;
|
|
||||||
|
|
||||||
public MangaParserTests(ITestOutputHelper testOutputHelper)
|
|
||||||
{
|
|
||||||
_testOutputHelper = testOutputHelper;
|
|
||||||
}
|
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
||||||
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
|
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")]
|
||||||
|
|
@ -25,7 +17,7 @@ public class MangaParserTests
|
||||||
[InlineData("v001", "1")]
|
[InlineData("v001", "1")]
|
||||||
[InlineData("Vol 1", "1")]
|
[InlineData("Vol 1", "1")]
|
||||||
[InlineData("vol_356-1", "356")] // Mangapy syntax
|
[InlineData("vol_356-1", "356")] // Mangapy syntax
|
||||||
[InlineData("No Volume", "0")]
|
[InlineData("No Volume", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
|
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
|
||||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1.1")]
|
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1.1")]
|
||||||
[InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")]
|
[InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")]
|
||||||
|
|
@ -40,18 +32,18 @@ public class MangaParserTests
|
||||||
[InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")]
|
[InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")]
|
||||||
[InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")]
|
[InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")]
|
||||||
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")]
|
[InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")]
|
||||||
[InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")]
|
[InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")]
|
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")]
|
||||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")]
|
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
||||||
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
|
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
|
||||||
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
|
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
|
||||||
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
|
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
|
||||||
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
|
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
|
||||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")]
|
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("Vagabond_v03", "3")]
|
[InlineData("Vagabond_v03", "3")]
|
||||||
[InlineData("Mujaki No Rakune Volume 10.cbz", "10")]
|
[InlineData("Mujaki No Rakune Volume 10.cbz", "10")]
|
||||||
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "0")]
|
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
|
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
|
||||||
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
|
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
|
||||||
[InlineData("Gantz.V26.cbz", "26")]
|
[InlineData("Gantz.V26.cbz", "26")]
|
||||||
|
|
@ -60,7 +52,7 @@ public class MangaParserTests
|
||||||
[InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")]
|
[InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")]
|
||||||
[InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")]
|
[InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")]
|
||||||
[InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")]
|
[InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")]
|
||||||
[InlineData("Noblesse - Episode 406 (52 Pages).7z", "0")]
|
[InlineData("Noblesse - Episode 406 (52 Pages).7z", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("X-Men v1 #201 (September 2007).cbz", "1")]
|
[InlineData("X-Men v1 #201 (September 2007).cbz", "1")]
|
||||||
[InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")]
|
[InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")]
|
||||||
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")]
|
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")]
|
||||||
|
|
@ -72,20 +64,21 @@ public class MangaParserTests
|
||||||
[InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")]
|
[InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")]
|
||||||
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "3.5")]
|
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "3.5")]
|
||||||
[InlineData("Kebab Том 1 Глава 3", "1")]
|
[InlineData("Kebab Том 1 Глава 3", "1")]
|
||||||
[InlineData("Манга Глава 2", "0")]
|
[InlineData("Манга Глава 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
|
||||||
[InlineData("Манга Тома 1-4", "1-4")]
|
[InlineData("Манга Тома 1-4", "1-4")]
|
||||||
[InlineData("Манга Том 1-4", "1-4")]
|
[InlineData("Манга Том 1-4", "1-4")]
|
||||||
[InlineData("조선왕조실톡 106화", "106")]
|
[InlineData("조선왕조실톡 106화", "106")]
|
||||||
[InlineData("죽음 13회", "13")]
|
|
||||||
[InlineData("동의보감 13장", "13")]
|
[InlineData("동의보감 13장", "13")]
|
||||||
[InlineData("몰?루 아카이브 7.5권", "7.5")]
|
[InlineData("몰?루 아카이브 7.5권", "7.5")]
|
||||||
[InlineData("63권#200", "63")]
|
[InlineData("63권#200", "63")]
|
||||||
[InlineData("시즌34삽화2", "34")]
|
[InlineData("시즌34삽화2", "34")]
|
||||||
[InlineData("Accel World Chapter 001 Volume 002", "2")]
|
[InlineData("Accel World Chapter 001 Volume 002", "2")]
|
||||||
[InlineData("Accel World Volume 2", "2")]
|
[InlineData("Accel World Volume 2", "2")]
|
||||||
|
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.31 Omake", "30")]
|
||||||
|
[InlineData("Zom 100 - Bucket List of the Dead v01", "1")]
|
||||||
public void ParseVolumeTest(string filename, string expected)
|
public void ParseVolumeTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename));
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Manga));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
|
|
@ -138,7 +131,6 @@ public class MangaParserTests
|
||||||
[InlineData("Vagabond_v03", "Vagabond")]
|
[InlineData("Vagabond_v03", "Vagabond")]
|
||||||
[InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")]
|
[InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")]
|
||||||
[InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")]
|
[InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")]
|
||||||
[InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")]
|
|
||||||
[InlineData("Baketeriya ch01-05.zip", "Baketeriya")]
|
[InlineData("Baketeriya ch01-05.zip", "Baketeriya")]
|
||||||
[InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")]
|
[InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")]
|
||||||
[InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")]
|
[InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")]
|
||||||
|
|
@ -204,21 +196,31 @@ public class MangaParserTests
|
||||||
[InlineData("죠시라쿠! 2년 후 1권", "죠시라쿠! 2년 후")]
|
[InlineData("죠시라쿠! 2년 후 1권", "죠시라쿠! 2년 후")]
|
||||||
[InlineData("test 2 years 1권", "test 2 years")]
|
[InlineData("test 2 years 1권", "test 2 years")]
|
||||||
[InlineData("test 2 years 1화", "test 2 years")]
|
[InlineData("test 2 years 1화", "test 2 years")]
|
||||||
|
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.30 Omake", "Nagasarete Airantou")]
|
||||||
|
[InlineData("Cynthia The Mission - c000 - c006 (v06)", "Cynthia The Mission")]
|
||||||
|
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1", "เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท")]
|
||||||
|
[InlineData("Max Level Returner เล่มที่ 5", "Max Level Returner")]
|
||||||
|
[InlineData("หนึ่งความคิด นิจนิรันดร์ เล่ม 2", "หนึ่งความคิด นิจนิรันดร์")]
|
||||||
|
[InlineData("不安の種\uff0b - 01", "不安の種\uff0b")]
|
||||||
|
[InlineData("Giant Ojou-sama - Ch. 33.5 - Volume 04 Bonus Chapter", "Giant Ojou-sama")]
|
||||||
|
[InlineData("[218565]-(C92) [BRIO (Puyocha)] Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", "")]
|
||||||
|
[InlineData("Monster #8 Ch. 001", "Monster #8")]
|
||||||
|
[InlineData("Zom 100 - Bucket List of the Dead v01", "Zom 100 - Bucket List of the Dead")]
|
||||||
public void ParseSeriesTest(string filename, string expected)
|
public void ParseSeriesTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename));
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename, LibraryType.Manga));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
|
||||||
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
|
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
|
||||||
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")]
|
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")]
|
||||||
[InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")]
|
[InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")]
|
[InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")]
|
[InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")]
|
||||||
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")]
|
[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("c001", "1")]
|
[InlineData("c001", "1")]
|
||||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "0")]
|
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
|
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
|
||||||
[InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")]
|
[InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")]
|
||||||
[InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")]
|
[InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")]
|
||||||
|
|
@ -241,7 +243,7 @@ public class MangaParserTests
|
||||||
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")]
|
[InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")]
|
||||||
[InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")]
|
[InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")]
|
||||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")]
|
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")]
|
||||||
[InlineData("Vol 1", "0")]
|
[InlineData("Vol 1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
||||||
[InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")]
|
[InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")]
|
||||||
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")]
|
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")]
|
||||||
|
|
@ -253,10 +255,10 @@ public class MangaParserTests
|
||||||
[InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")]
|
[InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")]
|
||||||
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
|
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
|
||||||
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
|
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
|
||||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
|
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("Beelzebub_153b_RHS.zip", "153.5")]
|
[InlineData("Beelzebub_153b_RHS.zip", "153.5")]
|
||||||
[InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
|
[InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
|
||||||
[InlineData("Transferred to another world magical swordsman v1.1", "0")]
|
[InlineData("Transferred to another world magical swordsman v1.1", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")]
|
[InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")]
|
||||||
[InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")]
|
[InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")]
|
||||||
[InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")]
|
[InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")]
|
||||||
|
|
@ -275,24 +277,31 @@ public class MangaParserTests
|
||||||
[InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")]
|
[InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")]
|
||||||
[InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")]
|
[InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")]
|
||||||
[InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")]
|
[InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")]
|
||||||
[InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")]
|
[InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")]
|
[InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")]
|
||||||
[InlineData("Samurai Jack Vol. 01 - The threads of Time", "0")]
|
[InlineData("Samurai Jack Vol. 01 - The threads of Time", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")]
|
[InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")]
|
||||||
[InlineData("자유록 13회#2", "13")]
|
[InlineData("자유록 13회#2", "13")]
|
||||||
[InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")]
|
[InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")]
|
||||||
[InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")]
|
[InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")]
|
||||||
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "0")]
|
[InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
[InlineData("Kebab Том 1 Глава 3", "3")]
|
[InlineData("Kebab Том 1 Глава 3", "3")]
|
||||||
[InlineData("Манга Глава 2", "2")]
|
[InlineData("Манга Глава 2", "2")]
|
||||||
[InlineData("Манга 2 Глава", "2")]
|
[InlineData("Манга 2 Глава", "2")]
|
||||||
[InlineData("Манга Том 1 2 Глава", "2")]
|
[InlineData("Манга Том 1 2 Глава", "2")]
|
||||||
[InlineData("Accel World Chapter 001 Volume 002", "1")]
|
[InlineData("Accel World Chapter 001 Volume 002", "1")]
|
||||||
[InlineData("Bleach 001-003", "1-3")]
|
[InlineData("Bleach 001-003", "1-3")]
|
||||||
[InlineData("Accel World Volume 2", "0")]
|
[InlineData("Accel World Volume 2", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
|
||||||
|
[InlineData("Historys Strongest Disciple Kenichi_v11_c90-98", "90-98")]
|
||||||
|
[InlineData("Historys Strongest Disciple Kenichi c01-c04", "1-4")]
|
||||||
|
[InlineData("Adabana c00-02", "0-2")]
|
||||||
|
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "3")]
|
||||||
|
[InlineData("Max Level Returner ตอนที่ 5", "5")]
|
||||||
|
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
|
||||||
|
[InlineData("Monster #8 Ch. 001", "1")]
|
||||||
public void ParseChaptersTest(string filename, string expected)
|
public void ParseChaptersTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename));
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Manga));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -312,25 +321,25 @@ public class MangaParserTests
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
|
||||||
}
|
}
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
|
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
|
||||||
[InlineData("Beelzebub_Omake_June_2012_RHS", true)]
|
[InlineData("Beelzebub_Omake_June_2012_RHS", false)]
|
||||||
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
|
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
|
||||||
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
|
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", false)]
|
||||||
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
|
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", false)]
|
||||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
|
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", false)]
|
||||||
[InlineData("Ani-Hina Art Collection.cbz", true)]
|
[InlineData("Ani-Hina Art Collection.cbz", false)]
|
||||||
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
|
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", false)]
|
||||||
[InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
|
[InlineData("A Town Where You Live - Bonus Chapter.zip", false)]
|
||||||
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
|
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
|
||||||
[InlineData("Beastars - SP01", false)]
|
[InlineData("Beastars - SP01", true)]
|
||||||
[InlineData("Beastars SP01", false)]
|
[InlineData("Beastars SP01", true)]
|
||||||
[InlineData("The League of Extraordinary Gentlemen", false)]
|
[InlineData("The League of Extraordinary Gentlemen", false)]
|
||||||
[InlineData("The League of Extra-ordinary Gentlemen", false)]
|
[InlineData("The League of Extra-ordinary Gentlemen", false)]
|
||||||
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]
|
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]
|
||||||
[InlineData("Hajime no Ippo - Artbook", false)]
|
[InlineData("Hajime no Ippo - Artbook", false)]
|
||||||
public void IsMangaSpecialTest(string input, bool expected)
|
public void IsMangaSpecialTest(string input, bool expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsMangaSpecial(input));
|
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsSpecial(input, LibraryType.Manga));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
using API.Services.Tasks.Scanner.Parser;
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Parser;
|
namespace API.Tests.Parsing;
|
||||||
|
|
||||||
public class ParserInfoTests
|
public class ParserInfoTests
|
||||||
{
|
{
|
||||||
|
|
@ -11,14 +11,14 @@ public class ParserInfoTests
|
||||||
{
|
{
|
||||||
var p1 = new ParserInfo()
|
var p1 = new ParserInfo()
|
||||||
{
|
{
|
||||||
Chapters = "0",
|
Chapters = Parser.DefaultChapter,
|
||||||
Edition = "",
|
Edition = "",
|
||||||
Format = MangaFormat.Archive,
|
Format = MangaFormat.Archive,
|
||||||
FullFilePath = "/manga/darker than black.cbz",
|
FullFilePath = "/manga/darker than black.cbz",
|
||||||
IsSpecial = false,
|
IsSpecial = false,
|
||||||
Series = "darker than black",
|
Series = "darker than black",
|
||||||
Title = "darker than black",
|
Title = "darker than black",
|
||||||
Volumes = "0"
|
Volumes = Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
|
|
||||||
var p2 = new ParserInfo()
|
var p2 = new ParserInfo()
|
||||||
|
|
@ -30,7 +30,7 @@ public class ParserInfoTests
|
||||||
IsSpecial = false,
|
IsSpecial = false,
|
||||||
Series = "darker than black",
|
Series = "darker than black",
|
||||||
Title = "Darker Than Black",
|
Title = "Darker Than Black",
|
||||||
Volumes = "0"
|
Volumes = Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
|
|
||||||
var expected = new ParserInfo()
|
var expected = new ParserInfo()
|
||||||
|
|
@ -42,7 +42,7 @@ public class ParserInfoTests
|
||||||
IsSpecial = false,
|
IsSpecial = false,
|
||||||
Series = "darker than black",
|
Series = "darker than black",
|
||||||
Title = "darker than black",
|
Title = "darker than black",
|
||||||
Volumes = "0"
|
Volumes = Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
p1.Merge(p2);
|
p1.Merge(p2);
|
||||||
|
|
||||||
|
|
@ -62,12 +62,12 @@ public class ParserInfoTests
|
||||||
IsSpecial = true,
|
IsSpecial = true,
|
||||||
Series = "darker than black",
|
Series = "darker than black",
|
||||||
Title = "darker than black",
|
Title = "darker than black",
|
||||||
Volumes = "0"
|
Volumes = Parser.LooseLeafVolume
|
||||||
};
|
};
|
||||||
|
|
||||||
var p2 = new ParserInfo()
|
var p2 = new ParserInfo()
|
||||||
{
|
{
|
||||||
Chapters = "0",
|
Chapters = Parser.DefaultChapter,
|
||||||
Edition = "",
|
Edition = "",
|
||||||
Format = MangaFormat.Archive,
|
Format = MangaFormat.Archive,
|
||||||
FullFilePath = "/manga/darker than black.cbz",
|
FullFilePath = "/manga/darker than black.cbz",
|
||||||
|
|
@ -3,18 +3,32 @@ using System.Linq;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using static API.Services.Tasks.Scanner.Parser.Parser;
|
using static API.Services.Tasks.Scanner.Parser.Parser;
|
||||||
|
|
||||||
namespace API.Tests.Parser;
|
namespace API.Tests.Parsing;
|
||||||
|
|
||||||
public class ParserTests
|
public class ParsingTests
|
||||||
{
|
{
|
||||||
[Fact]
|
[Fact]
|
||||||
public void ShouldWork()
|
public void ShouldWork()
|
||||||
{
|
{
|
||||||
var s = 6.5f + "";
|
var s = 6.5f.ToString(CultureInfo.InvariantCulture);
|
||||||
var a = float.Parse(s, CultureInfo.InvariantCulture);
|
var a = float.Parse(s, CultureInfo.InvariantCulture);
|
||||||
Assert.Equal(6.5f, a);
|
Assert.Equal(6.5f, a);
|
||||||
|
|
||||||
|
s = 6.5f + "";
|
||||||
|
a = float.Parse(s, CultureInfo.CurrentCulture);
|
||||||
|
Assert.Equal(6.5f, a);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// [Theory]
|
||||||
|
// [InlineData("de-DE")]
|
||||||
|
// [InlineData("en-US")]
|
||||||
|
// public void ShouldParse(string culture)
|
||||||
|
// {
|
||||||
|
// var s = 6.5f + "";
|
||||||
|
// var a = float.Parse(s, CultureInfo.CreateSpecificCulture(culture));
|
||||||
|
// Assert.Equal(6.5f, a);
|
||||||
|
// }
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")]
|
[InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")]
|
||||||
[InlineData("Shmo, Joe", "Shmo, Joe")]
|
[InlineData("Shmo, Joe", "Shmo, Joe")]
|
||||||
|
|
@ -29,6 +43,7 @@ public class ParserTests
|
||||||
[InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")]
|
[InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")]
|
||||||
[InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")]
|
[InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")]
|
||||||
[InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")]
|
[InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")]
|
||||||
|
[InlineData("SP01 1. DEAD Tube Prologue", "1. DEAD Tube Prologue")]
|
||||||
public void CleanSpecialTitleTest(string input, string expected)
|
public void CleanSpecialTitleTest(string input, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, CleanSpecialTitle(input));
|
Assert.Equal(expected, CleanSpecialTitle(input));
|
||||||
|
|
@ -45,6 +60,18 @@ public class ParserTests
|
||||||
Assert.Equal(expected, HasSpecialMarker(input));
|
Assert.Equal(expected, HasSpecialMarker(input));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("Beastars - SP01", 1)]
|
||||||
|
[InlineData("Beastars SP01", 1)]
|
||||||
|
[InlineData("Beastars Special 01", 0)]
|
||||||
|
[InlineData("Beastars Extra 01", 0)]
|
||||||
|
[InlineData("Batman Beyond - Return of the Joker (2001) SP01", 1)]
|
||||||
|
[InlineData("Batman Beyond - Return of the Joker (2001)", 0)]
|
||||||
|
public void ParseSpecialIndexTest(string input, int expected)
|
||||||
|
{
|
||||||
|
Assert.Equal(expected, ParseSpecialIndex(input));
|
||||||
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("0001", "1")]
|
[InlineData("0001", "1")]
|
||||||
[InlineData("1", "1")]
|
[InlineData("1", "1")]
|
||||||
|
|
@ -71,7 +98,8 @@ public class ParserTests
|
||||||
[InlineData("-The Title", false, "The Title")]
|
[InlineData("-The Title", false, "The Title")]
|
||||||
[InlineData("- The Title", false, "The Title")]
|
[InlineData("- The Title", false, "The Title")]
|
||||||
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
|
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
|
||||||
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")]
|
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)",
|
||||||
|
true, "Batman - Detective Comics - Rebirth Deluxe Edition Book 04")]
|
||||||
[InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")]
|
[InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")]
|
||||||
[InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")]
|
[InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")]
|
||||||
[InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")]
|
[InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")]
|
||||||
|
|
@ -155,6 +183,7 @@ public class ParserTests
|
||||||
[InlineData("3.5", 3.5)]
|
[InlineData("3.5", 3.5)]
|
||||||
[InlineData("3.5-4.0", 3.5)]
|
[InlineData("3.5-4.0", 3.5)]
|
||||||
[InlineData("asdfasdf", 0.0)]
|
[InlineData("asdfasdf", 0.0)]
|
||||||
|
[InlineData("-10", -10.0)]
|
||||||
public void MinimumNumberFromRangeTest(string input, float expected)
|
public void MinimumNumberFromRangeTest(string input, float expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, MinNumberFromRange(input));
|
Assert.Equal(expected, MinNumberFromRange(input));
|
||||||
|
|
@ -171,6 +200,7 @@ public class ParserTests
|
||||||
[InlineData("3.5", 3.5)]
|
[InlineData("3.5", 3.5)]
|
||||||
[InlineData("3.5-4.0", 4.0)]
|
[InlineData("3.5-4.0", 4.0)]
|
||||||
[InlineData("asdfasdf", 0.0)]
|
[InlineData("asdfasdf", 0.0)]
|
||||||
|
[InlineData("-10", -10.0)]
|
||||||
public void MaximumNumberFromRangeTest(string input, float expected)
|
public void MaximumNumberFromRangeTest(string input, float expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, MaxNumberFromRange(input));
|
Assert.Equal(expected, MaxNumberFromRange(input));
|
||||||
|
|
@ -186,6 +216,7 @@ public class ParserTests
|
||||||
[InlineData("카비타", "카비타")]
|
[InlineData("카비타", "카비타")]
|
||||||
[InlineData("06", "06")]
|
[InlineData("06", "06")]
|
||||||
[InlineData("", "")]
|
[InlineData("", "")]
|
||||||
|
[InlineData("不安の種+", "不安の種+")]
|
||||||
public void NormalizeTest(string input, string expected)
|
public void NormalizeTest(string input, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, Normalize(input));
|
Assert.Equal(expected, Normalize(input));
|
||||||
|
|
@ -220,6 +251,7 @@ public class ParserTests
|
||||||
[InlineData("ch1/backcover.png", false)]
|
[InlineData("ch1/backcover.png", false)]
|
||||||
[InlineData("backcover.png", false)]
|
[InlineData("backcover.png", false)]
|
||||||
[InlineData("back_cover.png", false)]
|
[InlineData("back_cover.png", false)]
|
||||||
|
[InlineData("LD Blacklands #1 35 (back cover).png", false)]
|
||||||
public void IsCoverImageTest(string inputPath, bool expected)
|
public void IsCoverImageTest(string inputPath, bool expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, IsCoverImage(inputPath));
|
Assert.Equal(expected, IsCoverImage(inputPath));
|
||||||
|
|
@ -235,6 +267,7 @@ public class ParserTests
|
||||||
[InlineData("@recycle/Love Hina/", true)]
|
[InlineData("@recycle/Love Hina/", true)]
|
||||||
[InlineData("E:/Test/__MACOSX/Love Hina/", true)]
|
[InlineData("E:/Test/__MACOSX/Love Hina/", true)]
|
||||||
[InlineData("E:/Test/.caltrash/Love Hina/", true)]
|
[InlineData("E:/Test/.caltrash/Love Hina/", true)]
|
||||||
|
[InlineData("E:/Test/.yacreaderlibrary/Love Hina/", true)]
|
||||||
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
|
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
|
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
|
||||||
|
|
@ -15,7 +15,6 @@ using Microsoft.EntityFrameworkCore;
|
||||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
|
||||||
|
|
||||||
namespace API.Tests.Repository;
|
namespace API.Tests.Repository;
|
||||||
|
|
||||||
|
|
@ -114,65 +113,65 @@ public class CollectionTagRepositoryTests
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region RemoveTagsWithoutSeries
|
// #region RemoveTagsWithoutSeries
|
||||||
|
//
|
||||||
[Fact]
|
// [Fact]
|
||||||
public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
|
// public async Task RemoveTagsWithoutSeries_ShouldRemoveTags()
|
||||||
{
|
// {
|
||||||
var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
// var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
||||||
var series = new SeriesBuilder("Test 1").Build();
|
// var series = new SeriesBuilder("Test 1").Build();
|
||||||
var commonTag = new CollectionTagBuilder("Tag 1").Build();
|
// var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
|
||||||
series.Metadata.CollectionTags.Add(commonTag);
|
// series.Metadata.CollectionTags.Add(commonTag);
|
||||||
series.Metadata.CollectionTags.Add(new CollectionTagBuilder("Tag 2").Build());
|
// series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
|
||||||
|
//
|
||||||
var series2 = new SeriesBuilder("Test 1").Build();
|
// var series2 = new SeriesBuilder("Test 1").Build();
|
||||||
series2.Metadata.CollectionTags.Add(commonTag);
|
// series2.Metadata.CollectionTags.Add(commonTag);
|
||||||
library.Series.Add(series);
|
// library.Series.Add(series);
|
||||||
library.Series.Add(series2);
|
// library.Series.Add(series2);
|
||||||
_unitOfWork.LibraryRepository.Add(library);
|
// _unitOfWork.LibraryRepository.Add(library);
|
||||||
await _unitOfWork.CommitAsync();
|
// await _unitOfWork.CommitAsync();
|
||||||
|
//
|
||||||
Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
// Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
||||||
Assert.Single(series2.Metadata.CollectionTags);
|
// Assert.Single(series2.Metadata.CollectionTags);
|
||||||
|
//
|
||||||
// Delete both series
|
// // Delete both series
|
||||||
_unitOfWork.SeriesRepository.Remove(series);
|
// _unitOfWork.SeriesRepository.Remove(series);
|
||||||
_unitOfWork.SeriesRepository.Remove(series2);
|
// _unitOfWork.SeriesRepository.Remove(series2);
|
||||||
|
//
|
||||||
await _unitOfWork.CommitAsync();
|
// await _unitOfWork.CommitAsync();
|
||||||
|
//
|
||||||
// Validate that both tags exist
|
// // Validate that both tags exist
|
||||||
Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
// Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
||||||
|
//
|
||||||
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
// await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
||||||
|
//
|
||||||
Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
|
// Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
[Fact]
|
// [Fact]
|
||||||
public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
|
// public async Task RemoveTagsWithoutSeries_ShouldNotRemoveTags()
|
||||||
{
|
// {
|
||||||
var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
// var library = new LibraryBuilder("Test", LibraryType.Manga).Build();
|
||||||
var series = new SeriesBuilder("Test 1").Build();
|
// var series = new SeriesBuilder("Test 1").Build();
|
||||||
var commonTag = new CollectionTagBuilder("Tag 1").Build();
|
// var commonTag = new AppUserCollectionBuilder("Tag 1").Build();
|
||||||
series.Metadata.CollectionTags.Add(commonTag);
|
// series.Metadata.CollectionTags.Add(commonTag);
|
||||||
series.Metadata.CollectionTags.Add(new CollectionTagBuilder("Tag 2").Build());
|
// series.Metadata.CollectionTags.Add(new AppUserCollectionBuilder("Tag 2").Build());
|
||||||
|
//
|
||||||
var series2 = new SeriesBuilder("Test 1").Build();
|
// var series2 = new SeriesBuilder("Test 1").Build();
|
||||||
series2.Metadata.CollectionTags.Add(commonTag);
|
// series2.Metadata.CollectionTags.Add(commonTag);
|
||||||
library.Series.Add(series);
|
// library.Series.Add(series);
|
||||||
library.Series.Add(series2);
|
// library.Series.Add(series2);
|
||||||
_unitOfWork.LibraryRepository.Add(library);
|
// _unitOfWork.LibraryRepository.Add(library);
|
||||||
await _unitOfWork.CommitAsync();
|
// await _unitOfWork.CommitAsync();
|
||||||
|
//
|
||||||
Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
// Assert.Equal(2, series.Metadata.CollectionTags.Count);
|
||||||
Assert.Single(series2.Metadata.CollectionTags);
|
// Assert.Single(series2.Metadata.CollectionTags);
|
||||||
|
//
|
||||||
await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
// await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries();
|
||||||
|
//
|
||||||
// Validate that both tags exist
|
// // Validate that both tags exist
|
||||||
Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
// Assert.Equal(2, (await _unitOfWork.CollectionTagRepository.GetAllTagsAsync()).Count());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#endregion
|
// #endregion
|
||||||
}
|
}
|
||||||
|
|
|
||||||
280
API.Tests/Repository/GenreRepositoryTests.cs
Normal file
280
API.Tests/Repository/GenreRepositoryTests.cs
Normal file
|
|
@ -0,0 +1,280 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.DTOs.Metadata.Browse;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Entities.Metadata;
|
||||||
|
using API.Helpers;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Repository;
|
||||||
|
|
||||||
|
public class GenreRepositoryTests : AbstractDbTest
|
||||||
|
{
|
||||||
|
private AppUser _fullAccess;
|
||||||
|
private AppUser _restrictedAccess;
|
||||||
|
private AppUser _restrictedAgeAccess;
|
||||||
|
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.Genre.RemoveRange(Context.Genre);
|
||||||
|
Context.Library.RemoveRange(Context.Library);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private TestGenreSet CreateTestGenres()
|
||||||
|
{
|
||||||
|
return new TestGenreSet
|
||||||
|
{
|
||||||
|
SharedSeriesChaptersGenre = new GenreBuilder("Shared Series Chapter Genre").Build(),
|
||||||
|
SharedSeriesGenre = new GenreBuilder("Shared Series Genre").Build(),
|
||||||
|
SharedChaptersGenre = new GenreBuilder("Shared Chapters Genre").Build(),
|
||||||
|
Lib0SeriesChaptersGenre = new GenreBuilder("Lib0 Series Chapter Genre").Build(),
|
||||||
|
Lib0SeriesGenre = new GenreBuilder("Lib0 Series Genre").Build(),
|
||||||
|
Lib0ChaptersGenre = new GenreBuilder("Lib0 Chapters Genre").Build(),
|
||||||
|
Lib1SeriesChaptersGenre = new GenreBuilder("Lib1 Series Chapter Genre").Build(),
|
||||||
|
Lib1SeriesGenre = new GenreBuilder("Lib1 Series Genre").Build(),
|
||||||
|
Lib1ChaptersGenre = new GenreBuilder("Lib1 Chapters Genre").Build(),
|
||||||
|
Lib1ChapterAgeGenre = new GenreBuilder("Lib1 Chapter Age Genre").Build()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task SeedDbWithGenres(TestGenreSet genres)
|
||||||
|
{
|
||||||
|
await CreateTestUsers();
|
||||||
|
await AddGenresToContext(genres);
|
||||||
|
await CreateLibrariesWithGenres(genres);
|
||||||
|
await AssignLibrariesToUsers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task CreateTestUsers()
|
||||||
|
{
|
||||||
|
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
|
||||||
|
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
|
||||||
|
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
|
||||||
|
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
|
||||||
|
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
|
||||||
|
|
||||||
|
Context.Users.Add(_fullAccess);
|
||||||
|
Context.Users.Add(_restrictedAccess);
|
||||||
|
Context.Users.Add(_restrictedAgeAccess);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task AddGenresToContext(TestGenreSet genres)
|
||||||
|
{
|
||||||
|
var allGenres = genres.GetAllGenres();
|
||||||
|
Context.Genre.AddRange(allGenres);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task CreateLibrariesWithGenres(TestGenreSet genres)
|
||||||
|
{
|
||||||
|
var lib0 = new LibraryBuilder("lib0")
|
||||||
|
.WithSeries(new SeriesBuilder("lib0-s0")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib0SeriesChaptersGenre, genres.Lib0SeriesGenre])
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib0SeriesChaptersGenre, genres.Lib0ChaptersGenre])
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var lib1 = new LibraryBuilder("lib1")
|
||||||
|
.WithSeries(new SeriesBuilder("lib1-s0")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1SeriesGenre])
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre, genres.Lib1ChapterAgeGenre])
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.WithSeries(new SeriesBuilder("lib1-s1")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedSeriesGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1SeriesGenre])
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithGenres([genres.SharedSeriesChaptersGenre, genres.SharedChaptersGenre, genres.Lib1SeriesChaptersGenre, genres.Lib1ChaptersGenre])
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Context.Library.Add(lib0);
|
||||||
|
Context.Library.Add(lib1);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task AssignLibrariesToUsers()
|
||||||
|
{
|
||||||
|
var lib0 = Context.Library.First(l => l.Name == "lib0");
|
||||||
|
var lib1 = Context.Library.First(l => l.Name == "lib1");
|
||||||
|
|
||||||
|
_fullAccess.Libraries.Add(lib0);
|
||||||
|
_fullAccess.Libraries.Add(lib1);
|
||||||
|
_restrictedAccess.Libraries.Add(lib1);
|
||||||
|
_restrictedAgeAccess.Libraries.Add(lib1);
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Predicate<BrowseGenreDto> ContainsGenreCheck(Genre genre)
|
||||||
|
{
|
||||||
|
return g => g.Id == genre.Id;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AssertGenrePresent(IEnumerable<BrowseGenreDto> genres, Genre expectedGenre)
|
||||||
|
{
|
||||||
|
Assert.Contains(genres, ContainsGenreCheck(expectedGenre));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AssertGenreNotPresent(IEnumerable<BrowseGenreDto> genres, Genre expectedGenre)
|
||||||
|
{
|
||||||
|
Assert.DoesNotContain(genres, ContainsGenreCheck(expectedGenre));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static BrowseGenreDto GetGenreDto(IEnumerable<BrowseGenreDto> genres, Genre genre)
|
||||||
|
{
|
||||||
|
return genres.First(dto => dto.Id == genre.Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowseableGenre_FullAccess_ReturnsAllGenresWithCorrectCounts()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await ResetDb();
|
||||||
|
var genres = CreateTestGenres();
|
||||||
|
await SeedDbWithGenres(genres);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var fullAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_fullAccess.Id, new UserParams());
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(genres.GetAllGenres().Count, fullAccessGenres.TotalCount);
|
||||||
|
|
||||||
|
foreach (var genre in genres.GetAllGenres())
|
||||||
|
{
|
||||||
|
AssertGenrePresent(fullAccessGenres, genre);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify counts - 1 lib0 series, 2 lib1 series = 3 total series
|
||||||
|
Assert.Equal(3, GetGenreDto(fullAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
|
||||||
|
Assert.Equal(6, GetGenreDto(fullAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
|
||||||
|
Assert.Equal(1, GetGenreDto(fullAccessGenres, genres.Lib0SeriesGenre).SeriesCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowseableGenre_RestrictedAccess_ReturnsOnlyAccessibleGenres()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await ResetDb();
|
||||||
|
var genres = CreateTestGenres();
|
||||||
|
await SeedDbWithGenres(genres);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var restrictedAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAccess.Id, new UserParams());
|
||||||
|
|
||||||
|
// Assert - Should see: 3 shared + 4 library 1 specific = 7 genres
|
||||||
|
Assert.Equal(7, restrictedAccessGenres.TotalCount);
|
||||||
|
|
||||||
|
// Verify shared and Library 1 genres are present
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.SharedSeriesChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.SharedSeriesGenre);
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.SharedChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.Lib1SeriesChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.Lib1SeriesGenre);
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.Lib1ChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAccessGenres, genres.Lib1ChapterAgeGenre);
|
||||||
|
|
||||||
|
// Verify Library 0 specific genres are not present
|
||||||
|
AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0SeriesChaptersGenre);
|
||||||
|
AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0SeriesGenre);
|
||||||
|
AssertGenreNotPresent(restrictedAccessGenres, genres.Lib0ChaptersGenre);
|
||||||
|
|
||||||
|
// Verify counts - 2 lib1 series
|
||||||
|
Assert.Equal(2, GetGenreDto(restrictedAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
|
||||||
|
Assert.Equal(4, GetGenreDto(restrictedAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
|
||||||
|
Assert.Equal(2, GetGenreDto(restrictedAccessGenres, genres.Lib1SeriesGenre).SeriesCount);
|
||||||
|
Assert.Equal(4, GetGenreDto(restrictedAccessGenres, genres.Lib1ChaptersGenre).ChapterCount);
|
||||||
|
Assert.Equal(1, GetGenreDto(restrictedAccessGenres, genres.Lib1ChapterAgeGenre).ChapterCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowseableGenre_RestrictedAgeAccess_FiltersAgeRestrictedContent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await ResetDb();
|
||||||
|
var genres = CreateTestGenres();
|
||||||
|
await SeedDbWithGenres(genres);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var restrictedAgeAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAgeAccess.Id, new UserParams());
|
||||||
|
|
||||||
|
// Assert - Should see: 3 shared + 3 lib1 specific = 6 genres (age-restricted genre filtered out)
|
||||||
|
Assert.Equal(6, restrictedAgeAccessGenres.TotalCount);
|
||||||
|
|
||||||
|
// Verify accessible genres are present
|
||||||
|
AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedSeriesGenre);
|
||||||
|
AssertGenrePresent(restrictedAgeAccessGenres, genres.SharedChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1SeriesChaptersGenre);
|
||||||
|
AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1SeriesGenre);
|
||||||
|
AssertGenrePresent(restrictedAgeAccessGenres, genres.Lib1ChaptersGenre);
|
||||||
|
|
||||||
|
// Verify age-restricted genre is filtered out
|
||||||
|
AssertGenreNotPresent(restrictedAgeAccessGenres, genres.Lib1ChapterAgeGenre);
|
||||||
|
|
||||||
|
// Verify counts - 1 series lib1 (age-restricted series filtered out)
|
||||||
|
Assert.Equal(1, GetGenreDto(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre).SeriesCount);
|
||||||
|
Assert.Equal(1, GetGenreDto(restrictedAgeAccessGenres, genres.Lib1SeriesGenre).SeriesCount);
|
||||||
|
|
||||||
|
// These values represent a bug - chapters are not properly filtered when their series is age-restricted
|
||||||
|
// Should be 2, but currently returns 3 due to the filtering issue
|
||||||
|
Assert.Equal(3, GetGenreDto(restrictedAgeAccessGenres, genres.SharedSeriesChaptersGenre).ChapterCount);
|
||||||
|
Assert.Equal(3, GetGenreDto(restrictedAgeAccessGenres, genres.Lib1ChaptersGenre).ChapterCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
private class TestGenreSet
|
||||||
|
{
|
||||||
|
public Genre SharedSeriesChaptersGenre { get; set; }
|
||||||
|
public Genre SharedSeriesGenre { get; set; }
|
||||||
|
public Genre SharedChaptersGenre { get; set; }
|
||||||
|
public Genre Lib0SeriesChaptersGenre { get; set; }
|
||||||
|
public Genre Lib0SeriesGenre { get; set; }
|
||||||
|
public Genre Lib0ChaptersGenre { get; set; }
|
||||||
|
public Genre Lib1SeriesChaptersGenre { get; set; }
|
||||||
|
public Genre Lib1SeriesGenre { get; set; }
|
||||||
|
public Genre Lib1ChaptersGenre { get; set; }
|
||||||
|
public Genre Lib1ChapterAgeGenre { get; set; }
|
||||||
|
|
||||||
|
public List<Genre> GetAllGenres()
|
||||||
|
{
|
||||||
|
return
|
||||||
|
[
|
||||||
|
SharedSeriesChaptersGenre, SharedSeriesGenre, SharedChaptersGenre,
|
||||||
|
Lib0SeriesChaptersGenre, Lib0SeriesGenre, Lib0ChaptersGenre,
|
||||||
|
Lib1SeriesChaptersGenre, Lib1SeriesGenre, Lib1ChaptersGenre, Lib1ChapterAgeGenre
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
342
API.Tests/Repository/PersonRepositoryTests.cs
Normal file
342
API.Tests/Repository/PersonRepositoryTests.cs
Normal file
|
|
@ -0,0 +1,342 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.DTOs.Metadata.Browse;
|
||||||
|
using API.DTOs.Metadata.Browse.Requests;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Entities.Person;
|
||||||
|
using API.Helpers;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Repository;
|
||||||
|
|
||||||
|
public class PersonRepositoryTests : AbstractDbTest
|
||||||
|
{
|
||||||
|
private AppUser _fullAccess;
|
||||||
|
private AppUser _restrictedAccess;
|
||||||
|
private AppUser _restrictedAgeAccess;
|
||||||
|
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.Person.RemoveRange(Context.Person.ToList());
|
||||||
|
Context.Library.RemoveRange(Context.Library.ToList());
|
||||||
|
Context.AppUser.RemoveRange(Context.AppUser.ToList());
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task SeedDb()
|
||||||
|
{
|
||||||
|
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
|
||||||
|
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
|
||||||
|
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
|
||||||
|
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
|
||||||
|
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
|
||||||
|
|
||||||
|
Context.AppUser.Add(_fullAccess);
|
||||||
|
Context.AppUser.Add(_restrictedAccess);
|
||||||
|
Context.AppUser.Add(_restrictedAgeAccess);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
var people = CreateTestPeople();
|
||||||
|
Context.Person.AddRange(people);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
var libraries = CreateTestLibraries(people);
|
||||||
|
Context.Library.AddRange(libraries);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
_fullAccess.Libraries.Add(libraries[0]); // lib0
|
||||||
|
_fullAccess.Libraries.Add(libraries[1]); // lib1
|
||||||
|
_restrictedAccess.Libraries.Add(libraries[1]); // lib1 only
|
||||||
|
_restrictedAgeAccess.Libraries.Add(libraries[1]); // lib1 only
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Person> CreateTestPeople()
|
||||||
|
{
|
||||||
|
return new List<Person>
|
||||||
|
{
|
||||||
|
new PersonBuilder("Shared Series Chapter Person").Build(),
|
||||||
|
new PersonBuilder("Shared Series Person").Build(),
|
||||||
|
new PersonBuilder("Shared Chapters Person").Build(),
|
||||||
|
new PersonBuilder("Lib0 Series Chapter Person").Build(),
|
||||||
|
new PersonBuilder("Lib0 Series Person").Build(),
|
||||||
|
new PersonBuilder("Lib0 Chapters Person").Build(),
|
||||||
|
new PersonBuilder("Lib1 Series Chapter Person").Build(),
|
||||||
|
new PersonBuilder("Lib1 Series Person").Build(),
|
||||||
|
new PersonBuilder("Lib1 Chapters Person").Build(),
|
||||||
|
new PersonBuilder("Lib1 Chapter Age Person").Build()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Library> CreateTestLibraries(List<Person> people)
|
||||||
|
{
|
||||||
|
var lib0 = new LibraryBuilder("lib0")
|
||||||
|
.WithSeries(new SeriesBuilder("lib0-s0")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Writer)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Writer)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Writer)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib0 Series Person"), PersonRole.Writer)
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Colorist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Colorist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Colorist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib0 Chapters Person"), PersonRole.Colorist)
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Editor)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Editor)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib0 Series Chapter Person"), PersonRole.Editor)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib0 Chapters Person"), PersonRole.Editor)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var lib1 = new LibraryBuilder("lib1")
|
||||||
|
.WithSeries(new SeriesBuilder("lib1-s0")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Letterer)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Letterer)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Letterer)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Person"), PersonRole.Letterer)
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Imprint)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Imprint)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Imprint)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Imprint)
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.CoverArtist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.CoverArtist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.CoverArtist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.CoverArtist)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Chapter Age Person"), PersonRole.CoverArtist)
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.WithSeries(new SeriesBuilder("lib1-s1")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Inker)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Person"), PersonRole.Inker)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Inker)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Person"), PersonRole.Inker)
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Team)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Team)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Team)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Team)
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Series Chapter Person"), PersonRole.Translator)
|
||||||
|
.WithPerson(GetPersonByName(people, "Shared Chapters Person"), PersonRole.Translator)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Series Chapter Person"), PersonRole.Translator)
|
||||||
|
.WithPerson(GetPersonByName(people, "Lib1 Chapters Person"), PersonRole.Translator)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
return new List<Library> { lib0, lib1 };
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Person GetPersonByName(List<Person> people, string name)
|
||||||
|
{
|
||||||
|
return people.First(p => p.Name == name);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Person GetPersonByName(string name)
|
||||||
|
{
|
||||||
|
return Context.Person.First(p => p.Name == name);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Predicate<BrowsePersonDto> ContainsPersonCheck(Person person)
|
||||||
|
{
|
||||||
|
return p => p.Id == person.Id;
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowsePersonDtos()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedDb();
|
||||||
|
|
||||||
|
// Get people from database for assertions
|
||||||
|
var sharedSeriesChaptersPerson = GetPersonByName("Shared Series Chapter Person");
|
||||||
|
var lib0SeriesPerson = GetPersonByName("Lib0 Series Person");
|
||||||
|
var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
|
||||||
|
var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
|
||||||
|
var allPeople = Context.Person.ToList();
|
||||||
|
|
||||||
|
var fullAccessPeople =
|
||||||
|
await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_fullAccess.Id, new BrowsePersonFilterDto(),
|
||||||
|
new UserParams());
|
||||||
|
Assert.Equal(allPeople.Count, fullAccessPeople.TotalCount);
|
||||||
|
|
||||||
|
foreach (var person in allPeople)
|
||||||
|
Assert.Contains(fullAccessPeople, ContainsPersonCheck(person));
|
||||||
|
|
||||||
|
// 1 series in lib0, 2 series in lib1
|
||||||
|
Assert.Equal(3, fullAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
|
||||||
|
// 3 series with each 2 chapters
|
||||||
|
Assert.Equal(6, fullAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).ChapterCount);
|
||||||
|
// 1 series in lib0
|
||||||
|
Assert.Equal(1, fullAccessPeople.First(dto => dto.Id == lib0SeriesPerson.Id).SeriesCount);
|
||||||
|
// 2 series in lib1
|
||||||
|
Assert.Equal(2, fullAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
|
||||||
|
|
||||||
|
var restrictedAccessPeople =
|
||||||
|
await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAccess.Id, new BrowsePersonFilterDto(),
|
||||||
|
new UserParams());
|
||||||
|
|
||||||
|
Assert.Equal(7, restrictedAccessPeople.TotalCount);
|
||||||
|
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Chapter Person")));
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Person")));
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Chapters Person")));
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Chapter Person")));
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Person")));
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapters Person")));
|
||||||
|
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapter Age Person")));
|
||||||
|
|
||||||
|
// 2 series in lib1, no series in lib0
|
||||||
|
Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
|
||||||
|
// 2 series with each 2 chapters
|
||||||
|
Assert.Equal(4, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).ChapterCount);
|
||||||
|
// 2 series in lib1
|
||||||
|
Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
|
||||||
|
|
||||||
|
var restrictedAgeAccessPeople = await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAgeAccess.Id,
|
||||||
|
new BrowsePersonFilterDto(), new UserParams());
|
||||||
|
|
||||||
|
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
|
||||||
|
Assert.Equal(6, restrictedAgeAccessPeople.TotalCount);
|
||||||
|
|
||||||
|
// No access to the age restricted chapter
|
||||||
|
Assert.DoesNotContain(restrictedAgeAccessPeople, ContainsPersonCheck(lib1ChapterAgePerson));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetRolesForPersonByName()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedDb();
|
||||||
|
|
||||||
|
var sharedSeriesPerson = GetPersonByName("Shared Series Person");
|
||||||
|
var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
|
||||||
|
var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
|
||||||
|
|
||||||
|
var sharedSeriesRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _fullAccess.Id);
|
||||||
|
var chapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _fullAccess.Id);
|
||||||
|
var ageChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _fullAccess.Id);
|
||||||
|
Assert.Equal(3, sharedSeriesRoles.Count());
|
||||||
|
Assert.Equal(6, chapterRoles.Count());
|
||||||
|
Assert.Single(ageChapterRoles);
|
||||||
|
|
||||||
|
var restrictedRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAccess.Id);
|
||||||
|
var restrictedChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAccess.Id);
|
||||||
|
var restrictedAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAccess.Id);
|
||||||
|
Assert.Equal(2, restrictedRoles.Count());
|
||||||
|
Assert.Equal(4, restrictedChapterRoles.Count());
|
||||||
|
Assert.Single(restrictedAgePersonChapterRoles);
|
||||||
|
|
||||||
|
var restrictedAgeRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
|
||||||
|
var restrictedAgeChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAgeAccess.Id);
|
||||||
|
var restrictedAgeAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAgeAccess.Id);
|
||||||
|
Assert.Single(restrictedAgeRoles);
|
||||||
|
Assert.Equal(2, restrictedAgeChapterRoles.Count());
|
||||||
|
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
|
||||||
|
Assert.Empty(restrictedAgeAgePersonChapterRoles);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetPersonDtoByName()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedDb();
|
||||||
|
|
||||||
|
var allPeople = Context.Person.ToList();
|
||||||
|
|
||||||
|
foreach (var person in allPeople)
|
||||||
|
{
|
||||||
|
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName(person.Name, _fullAccess.Id));
|
||||||
|
}
|
||||||
|
|
||||||
|
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAccess.Id));
|
||||||
|
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Shared Series Person", _restrictedAccess.Id));
|
||||||
|
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAccess.Id));
|
||||||
|
|
||||||
|
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAgeAccess.Id));
|
||||||
|
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAgeAccess.Id));
|
||||||
|
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
|
||||||
|
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Chapter Age Person", _restrictedAgeAccess.Id));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetSeriesKnownFor()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedDb();
|
||||||
|
|
||||||
|
var sharedSeriesPerson = GetPersonByName("Shared Series Person");
|
||||||
|
var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
|
||||||
|
|
||||||
|
var series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _fullAccess.Id);
|
||||||
|
Assert.Equal(3, series.Count());
|
||||||
|
|
||||||
|
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAccess.Id);
|
||||||
|
Assert.Equal(2, series.Count());
|
||||||
|
|
||||||
|
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
|
||||||
|
Assert.Single(series);
|
||||||
|
|
||||||
|
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(lib1SeriesPerson.Id, _restrictedAgeAccess.Id);
|
||||||
|
Assert.Single(series);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetChaptersForPersonByRole()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedDb();
|
||||||
|
|
||||||
|
var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
|
||||||
|
|
||||||
|
// Lib0
|
||||||
|
var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Colorist);
|
||||||
|
var restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Colorist);
|
||||||
|
var restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Colorist);
|
||||||
|
Assert.Single(chapters);
|
||||||
|
Assert.Empty(restrictedChapters);
|
||||||
|
Assert.Empty(restrictedAgeChapters);
|
||||||
|
|
||||||
|
// Lib1 - age restricted series
|
||||||
|
chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Imprint);
|
||||||
|
restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Imprint);
|
||||||
|
restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Imprint);
|
||||||
|
Assert.Single(chapters);
|
||||||
|
Assert.Single(restrictedChapters);
|
||||||
|
Assert.Empty(restrictedAgeChapters);
|
||||||
|
|
||||||
|
// Lib1 - not age restricted series
|
||||||
|
chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Team);
|
||||||
|
restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Team);
|
||||||
|
restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Team);
|
||||||
|
Assert.Single(chapters);
|
||||||
|
Assert.Single(restrictedChapters);
|
||||||
|
Assert.Single(restrictedAgeChapters);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -6,7 +6,6 @@ using System.Threading.Tasks;
|
||||||
using API.Data;
|
using API.Data;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Extensions;
|
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
|
|
@ -159,4 +158,6 @@ public class SeriesRepositoryTests
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: GetSeriesDtoForLibraryIdV2Async Tests (On Deck)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
278
API.Tests/Repository/TagRepositoryTests.cs
Normal file
278
API.Tests/Repository/TagRepositoryTests.cs
Normal file
|
|
@ -0,0 +1,278 @@
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.DTOs.Metadata.Browse;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Entities.Metadata;
|
||||||
|
using API.Helpers;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Repository;
|
||||||
|
|
||||||
|
public class TagRepositoryTests : AbstractDbTest
|
||||||
|
{
|
||||||
|
private AppUser _fullAccess;
|
||||||
|
private AppUser _restrictedAccess;
|
||||||
|
private AppUser _restrictedAgeAccess;
|
||||||
|
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.Tag.RemoveRange(Context.Tag);
|
||||||
|
Context.Library.RemoveRange(Context.Library);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private TestTagSet CreateTestTags()
|
||||||
|
{
|
||||||
|
return new TestTagSet
|
||||||
|
{
|
||||||
|
SharedSeriesChaptersTag = new TagBuilder("Shared Series Chapter Tag").Build(),
|
||||||
|
SharedSeriesTag = new TagBuilder("Shared Series Tag").Build(),
|
||||||
|
SharedChaptersTag = new TagBuilder("Shared Chapters Tag").Build(),
|
||||||
|
Lib0SeriesChaptersTag = new TagBuilder("Lib0 Series Chapter Tag").Build(),
|
||||||
|
Lib0SeriesTag = new TagBuilder("Lib0 Series Tag").Build(),
|
||||||
|
Lib0ChaptersTag = new TagBuilder("Lib0 Chapters Tag").Build(),
|
||||||
|
Lib1SeriesChaptersTag = new TagBuilder("Lib1 Series Chapter Tag").Build(),
|
||||||
|
Lib1SeriesTag = new TagBuilder("Lib1 Series Tag").Build(),
|
||||||
|
Lib1ChaptersTag = new TagBuilder("Lib1 Chapters Tag").Build(),
|
||||||
|
Lib1ChapterAgeTag = new TagBuilder("Lib1 Chapter Age Tag").Build()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task SeedDbWithTags(TestTagSet tags)
|
||||||
|
{
|
||||||
|
await CreateTestUsers();
|
||||||
|
await AddTagsToContext(tags);
|
||||||
|
await CreateLibrariesWithTags(tags);
|
||||||
|
await AssignLibrariesToUsers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task CreateTestUsers()
|
||||||
|
{
|
||||||
|
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
|
||||||
|
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
|
||||||
|
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
|
||||||
|
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
|
||||||
|
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
|
||||||
|
|
||||||
|
Context.Users.Add(_fullAccess);
|
||||||
|
Context.Users.Add(_restrictedAccess);
|
||||||
|
Context.Users.Add(_restrictedAgeAccess);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task AddTagsToContext(TestTagSet tags)
|
||||||
|
{
|
||||||
|
var allTags = tags.GetAllTags();
|
||||||
|
Context.Tag.AddRange(allTags);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task CreateLibrariesWithTags(TestTagSet tags)
|
||||||
|
{
|
||||||
|
var lib0 = new LibraryBuilder("lib0")
|
||||||
|
.WithSeries(new SeriesBuilder("lib0-s0")
|
||||||
|
.WithMetadata(new SeriesMetadata
|
||||||
|
{
|
||||||
|
Tags = [tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib0SeriesChaptersTag, tags.Lib0SeriesTag]
|
||||||
|
})
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib0SeriesChaptersTag, tags.Lib0ChaptersTag])
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var lib1 = new LibraryBuilder("lib1")
|
||||||
|
.WithSeries(new SeriesBuilder("lib1-s0")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib1SeriesChaptersTag, tags.Lib1SeriesTag])
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag, tags.Lib1ChapterAgeTag])
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.WithSeries(new SeriesBuilder("lib1-s1")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedSeriesTag, tags.Lib1SeriesChaptersTag, tags.Lib1SeriesTag])
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||||
|
.Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2")
|
||||||
|
.WithTags([tags.SharedSeriesChaptersTag, tags.SharedChaptersTag, tags.Lib1SeriesChaptersTag, tags.Lib1ChaptersTag])
|
||||||
|
.WithAgeRating(AgeRating.Mature17Plus)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Context.Library.Add(lib0);
|
||||||
|
Context.Library.Add(lib1);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task AssignLibrariesToUsers()
|
||||||
|
{
|
||||||
|
var lib0 = Context.Library.First(l => l.Name == "lib0");
|
||||||
|
var lib1 = Context.Library.First(l => l.Name == "lib1");
|
||||||
|
|
||||||
|
_fullAccess.Libraries.Add(lib0);
|
||||||
|
_fullAccess.Libraries.Add(lib1);
|
||||||
|
_restrictedAccess.Libraries.Add(lib1);
|
||||||
|
_restrictedAgeAccess.Libraries.Add(lib1);
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Predicate<BrowseTagDto> ContainsTagCheck(Tag tag)
|
||||||
|
{
|
||||||
|
return t => t.Id == tag.Id;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AssertTagPresent(IEnumerable<BrowseTagDto> tags, Tag expectedTag)
|
||||||
|
{
|
||||||
|
Assert.Contains(tags, ContainsTagCheck(expectedTag));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void AssertTagNotPresent(IEnumerable<BrowseTagDto> tags, Tag expectedTag)
|
||||||
|
{
|
||||||
|
Assert.DoesNotContain(tags, ContainsTagCheck(expectedTag));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static BrowseTagDto GetTagDto(IEnumerable<BrowseTagDto> tags, Tag tag)
|
||||||
|
{
|
||||||
|
return tags.First(dto => dto.Id == tag.Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowseableTag_FullAccess_ReturnsAllTagsWithCorrectCounts()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await ResetDb();
|
||||||
|
var tags = CreateTestTags();
|
||||||
|
await SeedDbWithTags(tags);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var fullAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_fullAccess.Id, new UserParams());
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.Equal(tags.GetAllTags().Count, fullAccessTags.TotalCount);
|
||||||
|
|
||||||
|
foreach (var tag in tags.GetAllTags())
|
||||||
|
{
|
||||||
|
AssertTagPresent(fullAccessTags, tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify counts - 1 series lib0, 2 series lib1 = 3 total series
|
||||||
|
Assert.Equal(3, GetTagDto(fullAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
|
||||||
|
Assert.Equal(6, GetTagDto(fullAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
|
||||||
|
Assert.Equal(1, GetTagDto(fullAccessTags, tags.Lib0SeriesTag).SeriesCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowseableTag_RestrictedAccess_ReturnsOnlyAccessibleTags()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await ResetDb();
|
||||||
|
var tags = CreateTestTags();
|
||||||
|
await SeedDbWithTags(tags);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var restrictedAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAccess.Id, new UserParams());
|
||||||
|
|
||||||
|
// Assert - Should see: 3 shared + 4 library 1 specific = 7 tags
|
||||||
|
Assert.Equal(7, restrictedAccessTags.TotalCount);
|
||||||
|
|
||||||
|
// Verify shared and Library 1 tags are present
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.SharedSeriesChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.SharedSeriesTag);
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.SharedChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.Lib1SeriesChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.Lib1SeriesTag);
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.Lib1ChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAccessTags, tags.Lib1ChapterAgeTag);
|
||||||
|
|
||||||
|
// Verify Library 0 specific tags are not present
|
||||||
|
AssertTagNotPresent(restrictedAccessTags, tags.Lib0SeriesChaptersTag);
|
||||||
|
AssertTagNotPresent(restrictedAccessTags, tags.Lib0SeriesTag);
|
||||||
|
AssertTagNotPresent(restrictedAccessTags, tags.Lib0ChaptersTag);
|
||||||
|
|
||||||
|
// Verify counts - 2 series lib1
|
||||||
|
Assert.Equal(2, GetTagDto(restrictedAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
|
||||||
|
Assert.Equal(4, GetTagDto(restrictedAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
|
||||||
|
Assert.Equal(2, GetTagDto(restrictedAccessTags, tags.Lib1SeriesTag).SeriesCount);
|
||||||
|
Assert.Equal(4, GetTagDto(restrictedAccessTags, tags.Lib1ChaptersTag).ChapterCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetBrowseableTag_RestrictedAgeAccess_FiltersAgeRestrictedContent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await ResetDb();
|
||||||
|
var tags = CreateTestTags();
|
||||||
|
await SeedDbWithTags(tags);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var restrictedAgeAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAgeAccess.Id, new UserParams());
|
||||||
|
|
||||||
|
// Assert - Should see: 3 shared + 3 lib1 specific = 6 tags (age-restricted tag filtered out)
|
||||||
|
Assert.Equal(6, restrictedAgeAccessTags.TotalCount);
|
||||||
|
|
||||||
|
// Verify accessible tags are present
|
||||||
|
AssertTagPresent(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAgeAccessTags, tags.SharedSeriesTag);
|
||||||
|
AssertTagPresent(restrictedAgeAccessTags, tags.SharedChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAgeAccessTags, tags.Lib1SeriesChaptersTag);
|
||||||
|
AssertTagPresent(restrictedAgeAccessTags, tags.Lib1SeriesTag);
|
||||||
|
AssertTagPresent(restrictedAgeAccessTags, tags.Lib1ChaptersTag);
|
||||||
|
|
||||||
|
// Verify age-restricted tag is filtered out
|
||||||
|
AssertTagNotPresent(restrictedAgeAccessTags, tags.Lib1ChapterAgeTag);
|
||||||
|
|
||||||
|
// Verify counts - 1 series lib1 (age-restricted series filtered out)
|
||||||
|
Assert.Equal(1, GetTagDto(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag).SeriesCount);
|
||||||
|
Assert.Equal(2, GetTagDto(restrictedAgeAccessTags, tags.SharedSeriesChaptersTag).ChapterCount);
|
||||||
|
Assert.Equal(1, GetTagDto(restrictedAgeAccessTags, tags.Lib1SeriesTag).SeriesCount);
|
||||||
|
Assert.Equal(2, GetTagDto(restrictedAgeAccessTags, tags.Lib1ChaptersTag).ChapterCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
private class TestTagSet
|
||||||
|
{
|
||||||
|
public Tag SharedSeriesChaptersTag { get; set; }
|
||||||
|
public Tag SharedSeriesTag { get; set; }
|
||||||
|
public Tag SharedChaptersTag { get; set; }
|
||||||
|
public Tag Lib0SeriesChaptersTag { get; set; }
|
||||||
|
public Tag Lib0SeriesTag { get; set; }
|
||||||
|
public Tag Lib0ChaptersTag { get; set; }
|
||||||
|
public Tag Lib1SeriesChaptersTag { get; set; }
|
||||||
|
public Tag Lib1SeriesTag { get; set; }
|
||||||
|
public Tag Lib1ChaptersTag { get; set; }
|
||||||
|
public Tag Lib1ChapterAgeTag { get; set; }
|
||||||
|
|
||||||
|
public List<Tag> GetAllTags()
|
||||||
|
{
|
||||||
|
return
|
||||||
|
[
|
||||||
|
SharedSeriesChaptersTag, SharedSeriesTag, SharedChaptersTag,
|
||||||
|
Lib0SeriesChaptersTag, Lib0SeriesTag, Lib0ChaptersTag,
|
||||||
|
Lib1SeriesChaptersTag, Lib1SeriesTag, Lib1ChaptersTag, Lib1ChapterAgeTag
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -7,7 +7,6 @@ using System.Linq;
|
||||||
using API.Archive;
|
using API.Archive;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using EasyCaching.Core;
|
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NetVips;
|
using NetVips;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
|
|
@ -29,7 +28,7 @@ public class ArchiveServiceTests
|
||||||
{
|
{
|
||||||
_testOutputHelper = testOutputHelper;
|
_testOutputHelper = testOutputHelper;
|
||||||
_archiveService = new ArchiveService(_logger, _directoryService,
|
_archiveService = new ArchiveService(_logger, _directoryService,
|
||||||
new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService, Substitute.For<IEasyCachingProviderFactory>()),
|
new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService),
|
||||||
Substitute.For<IMediaErrorService>());
|
Substitute.For<IMediaErrorService>());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -167,7 +166,7 @@ public class ArchiveServiceTests
|
||||||
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
|
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
|
||||||
{
|
{
|
||||||
var ds = Substitute.For<DirectoryService>(_directoryServiceLogger, new FileSystem());
|
var ds = Substitute.For<DirectoryService>(_directoryServiceLogger, new FileSystem());
|
||||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds, Substitute.For<IEasyCachingProviderFactory>());
|
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), ds);
|
||||||
var archiveService = Substitute.For<ArchiveService>(_logger, ds, imageService, Substitute.For<IMediaErrorService>());
|
var archiveService = Substitute.For<ArchiveService>(_logger, ds, imageService, Substitute.For<IMediaErrorService>());
|
||||||
|
|
||||||
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
|
var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"));
|
||||||
|
|
@ -198,7 +197,7 @@ public class ArchiveServiceTests
|
||||||
[InlineData("sorting.zip", "sorting.expected.png")]
|
[InlineData("sorting.zip", "sorting.expected.png")]
|
||||||
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
|
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
|
||||||
{
|
{
|
||||||
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService, Substitute.For<IEasyCachingProviderFactory>());
|
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
|
||||||
var archiveService = Substitute.For<ArchiveService>(_logger,
|
var archiveService = Substitute.For<ArchiveService>(_logger,
|
||||||
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService,
|
new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService,
|
||||||
Substitute.For<IMediaErrorService>());
|
Substitute.For<IMediaErrorService>());
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,8 @@
|
||||||
using System.Collections.Generic;
|
using System.Data.Common;
|
||||||
using System.Data.Common;
|
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
using API.Data;
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
|
|
@ -21,7 +19,7 @@ using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
public class BackupServiceTests
|
public class BackupServiceTests: AbstractFsTest
|
||||||
{
|
{
|
||||||
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
|
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
|
||||||
private readonly IUnitOfWork _unitOfWork;
|
private readonly IUnitOfWork _unitOfWork;
|
||||||
|
|
@ -31,13 +29,6 @@ public class BackupServiceTests
|
||||||
private readonly DbConnection _connection;
|
private readonly DbConnection _connection;
|
||||||
private readonly DataContext _context;
|
private readonly DataContext _context;
|
||||||
|
|
||||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
|
||||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
|
||||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
|
||||||
private const string LogDirectory = "C:/kavita/config/logs/";
|
|
||||||
private const string ConfigDirectory = "C:/kavita/config/";
|
|
||||||
private const string BookmarkDirectory = "C:/kavita/config/bookmarks";
|
|
||||||
private const string ThemesDirectory = "C:/kavita/config/theme";
|
|
||||||
|
|
||||||
public BackupServiceTests()
|
public BackupServiceTests()
|
||||||
{
|
{
|
||||||
|
|
@ -82,7 +73,7 @@ public class BackupServiceTests
|
||||||
|
|
||||||
_context.ServerSetting.Update(setting);
|
_context.ServerSetting.Update(setting);
|
||||||
_context.Library.Add(new LibraryBuilder("Manga")
|
_context.Library.Add(new LibraryBuilder("Manga")
|
||||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||||
.Build());
|
.Build());
|
||||||
return await _context.SaveChangesAsync() > 0;
|
return await _context.SaveChangesAsync() > 0;
|
||||||
}
|
}
|
||||||
|
|
@ -94,22 +85,6 @@ public class BackupServiceTests
|
||||||
await _context.SaveChangesAsync();
|
await _context.SaveChangesAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MockFileSystem CreateFileSystem()
|
|
||||||
{
|
|
||||||
var fileSystem = new MockFileSystem();
|
|
||||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
|
||||||
fileSystem.AddDirectory("C:/kavita/config/");
|
|
||||||
fileSystem.AddDirectory(CacheDirectory);
|
|
||||||
fileSystem.AddDirectory(CoverImageDirectory);
|
|
||||||
fileSystem.AddDirectory(BackupDirectory);
|
|
||||||
fileSystem.AddDirectory(LogDirectory);
|
|
||||||
fileSystem.AddDirectory(ThemesDirectory);
|
|
||||||
fileSystem.AddDirectory(BookmarkDirectory);
|
|
||||||
fileSystem.AddDirectory("C:/data/");
|
|
||||||
|
|
||||||
return fileSystem;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Abstractions;
|
using System.IO.Abstractions;
|
||||||
|
using API.Entities.Enums;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using EasyCaching.Core;
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
@ -17,7 +18,7 @@ public class BookServiceTests
|
||||||
{
|
{
|
||||||
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||||
_bookService = new BookService(_logger, directoryService,
|
_bookService = new BookService(_logger, directoryService,
|
||||||
new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService, Substitute.For<IEasyCachingProviderFactory>())
|
new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService)
|
||||||
, Substitute.For<IMediaErrorService>());
|
, Substitute.For<IMediaErrorService>());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,4 +82,64 @@ public class BookServiceTests
|
||||||
Assert.Equal("Accel World", comicInfo.Series);
|
Assert.Equal("Accel World", comicInfo.Series);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ShouldHaveComicInfoForPdf()
|
||||||
|
{
|
||||||
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||||
|
var document = Path.Join(testDirectory, "test.pdf");
|
||||||
|
var comicInfo = _bookService.GetComicInfo(document);
|
||||||
|
Assert.NotNull(comicInfo);
|
||||||
|
Assert.Equal("Variations Chromatiques de concert", comicInfo.Title);
|
||||||
|
Assert.Equal("Georges Bizet \\(1838-1875\\)", comicInfo.Writer);
|
||||||
|
}
|
||||||
|
|
||||||
|
//[Fact]
|
||||||
|
public void ShouldUsePdfInfoDict()
|
||||||
|
{
|
||||||
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Library/Books/PDFs");
|
||||||
|
var document = Path.Join(testDirectory, "Rollo at Work SP01.pdf");
|
||||||
|
var comicInfo = _bookService.GetComicInfo(document);
|
||||||
|
Assert.NotNull(comicInfo);
|
||||||
|
Assert.Equal("Rollo at Work", comicInfo.Title);
|
||||||
|
Assert.Equal("Jacob Abbott", comicInfo.Writer);
|
||||||
|
Assert.Equal(2008, comicInfo.Year);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void ShouldHandleIndirectPdfObjects()
|
||||||
|
{
|
||||||
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||||
|
var document = Path.Join(testDirectory, "indirect.pdf");
|
||||||
|
var comicInfo = _bookService.GetComicInfo(document);
|
||||||
|
Assert.NotNull(comicInfo);
|
||||||
|
Assert.Equal(2018, comicInfo.Year);
|
||||||
|
Assert.Equal(8, comicInfo.Month);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void FailGracefullyWithEncryptedPdf()
|
||||||
|
{
|
||||||
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||||
|
var document = Path.Join(testDirectory, "encrypted.pdf");
|
||||||
|
var comicInfo = _bookService.GetComicInfo(document);
|
||||||
|
Assert.Null(comicInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void SeriesFallBackToMetadataTitle()
|
||||||
|
{
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||||
|
var pdfParser = new PdfParser(ds);
|
||||||
|
|
||||||
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
|
||||||
|
var filePath = Path.Join(testDirectory, "Bizet-Variations_Chromatiques_de_concert_Theme_A4.pdf");
|
||||||
|
|
||||||
|
var comicInfo = _bookService.GetComicInfo(filePath);
|
||||||
|
Assert.NotNull(comicInfo);
|
||||||
|
|
||||||
|
var parserInfo = pdfParser.Parse(filePath, testDirectory, ds.GetParentDirectoryName(testDirectory), LibraryType.Book, true, comicInfo);
|
||||||
|
Assert.NotNull(parserInfo);
|
||||||
|
Assert.Equal(parserInfo.Title, comicInfo.Title);
|
||||||
|
Assert.Equal(parserInfo.Series, comicInfo.Title);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,12 +9,9 @@ using API.Data.Repositories;
|
||||||
using API.DTOs.Reader;
|
using API.DTOs.Reader;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Entities.Metadata;
|
|
||||||
using API.Extensions;
|
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using API.SignalR;
|
|
||||||
using AutoMapper;
|
using AutoMapper;
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
@ -25,17 +22,12 @@ using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
public class BookmarkServiceTests
|
public class BookmarkServiceTests: AbstractFsTest
|
||||||
{
|
{
|
||||||
private readonly IUnitOfWork _unitOfWork;
|
private readonly IUnitOfWork _unitOfWork;
|
||||||
private readonly DbConnection _connection;
|
private readonly DbConnection _connection;
|
||||||
private readonly DataContext _context;
|
private readonly DataContext _context;
|
||||||
|
|
||||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
|
||||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
|
||||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
|
||||||
private const string BookmarkDirectory = "C:/kavita/config/bookmarks/";
|
|
||||||
|
|
||||||
|
|
||||||
public BookmarkServiceTests()
|
public BookmarkServiceTests()
|
||||||
{
|
{
|
||||||
|
|
@ -88,7 +80,7 @@ Substitute.For<IMediaConversionService>());
|
||||||
_context.ServerSetting.Update(setting);
|
_context.ServerSetting.Update(setting);
|
||||||
|
|
||||||
_context.Library.Add(new LibraryBuilder("Manga")
|
_context.Library.Add(new LibraryBuilder("Manga")
|
||||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||||
.Build());
|
.Build());
|
||||||
return await _context.SaveChangesAsync() > 0;
|
return await _context.SaveChangesAsync() > 0;
|
||||||
}
|
}
|
||||||
|
|
@ -102,20 +94,6 @@ Substitute.For<IMediaConversionService>());
|
||||||
await _context.SaveChangesAsync();
|
await _context.SaveChangesAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MockFileSystem CreateFileSystem()
|
|
||||||
{
|
|
||||||
var fileSystem = new MockFileSystem();
|
|
||||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
|
||||||
fileSystem.AddDirectory("C:/kavita/config/");
|
|
||||||
fileSystem.AddDirectory(CacheDirectory);
|
|
||||||
fileSystem.AddDirectory(CoverImageDirectory);
|
|
||||||
fileSystem.AddDirectory(BackupDirectory);
|
|
||||||
fileSystem.AddDirectory(BookmarkDirectory);
|
|
||||||
fileSystem.AddDirectory("C:/data/");
|
|
||||||
|
|
||||||
return fileSystem;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region BookmarkPage
|
#region BookmarkPage
|
||||||
|
|
@ -132,7 +110,7 @@ Substitute.For<IMediaConversionService>());
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -180,8 +158,8 @@ Substitute.For<IMediaConversionService>());
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("0")
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
.Build();
|
.Build();
|
||||||
|
|
@ -246,7 +224,7 @@ Substitute.For<IMediaConversionService>());
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -322,7 +300,7 @@ Substitute.For<IMediaConversionService>());
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -375,7 +353,7 @@ Substitute.For<IMediaConversionService>());
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -428,7 +406,7 @@ Substitute.For<IMediaConversionService>());
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build())
|
.Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,10 @@
|
||||||
using System.Collections.Generic;
|
using System.Data.Common;
|
||||||
using System.Data.Common;
|
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
using API.Data;
|
||||||
using API.Data.Metadata;
|
using API.Data.Metadata;
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
|
|
@ -52,17 +50,17 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
|
||||||
throw new System.NotImplementedException();
|
throw new System.NotImplementedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParserInfo Parse(string path, string rootPath, LibraryType type)
|
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata = true)
|
||||||
{
|
{
|
||||||
throw new System.NotImplementedException();
|
throw new System.NotImplementedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
|
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata = true)
|
||||||
{
|
{
|
||||||
throw new System.NotImplementedException();
|
throw new System.NotImplementedException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public class CacheServiceTests
|
public class CacheServiceTests: AbstractFsTest
|
||||||
{
|
{
|
||||||
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
|
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
|
||||||
private readonly IUnitOfWork _unitOfWork;
|
private readonly IUnitOfWork _unitOfWork;
|
||||||
|
|
@ -71,11 +69,6 @@ public class CacheServiceTests
|
||||||
private readonly DbConnection _connection;
|
private readonly DbConnection _connection;
|
||||||
private readonly DataContext _context;
|
private readonly DataContext _context;
|
||||||
|
|
||||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
|
||||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
|
||||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
|
||||||
private const string DataDirectory = "C:/data/";
|
|
||||||
|
|
||||||
public CacheServiceTests()
|
public CacheServiceTests()
|
||||||
{
|
{
|
||||||
var contextOptions = new DbContextOptionsBuilder()
|
var contextOptions = new DbContextOptionsBuilder()
|
||||||
|
|
@ -118,7 +111,7 @@ public class CacheServiceTests
|
||||||
_context.ServerSetting.Update(setting);
|
_context.ServerSetting.Update(setting);
|
||||||
|
|
||||||
_context.Library.Add(new LibraryBuilder("Manga")
|
_context.Library.Add(new LibraryBuilder("Manga")
|
||||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||||
.Build());
|
.Build());
|
||||||
return await _context.SaveChangesAsync() > 0;
|
return await _context.SaveChangesAsync() > 0;
|
||||||
}
|
}
|
||||||
|
|
@ -130,19 +123,6 @@ public class CacheServiceTests
|
||||||
await _context.SaveChangesAsync();
|
await _context.SaveChangesAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MockFileSystem CreateFileSystem()
|
|
||||||
{
|
|
||||||
var fileSystem = new MockFileSystem();
|
|
||||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
|
||||||
fileSystem.AddDirectory("C:/kavita/config/");
|
|
||||||
fileSystem.AddDirectory(CacheDirectory);
|
|
||||||
fileSystem.AddDirectory(CoverImageDirectory);
|
|
||||||
fileSystem.AddDirectory(BackupDirectory);
|
|
||||||
fileSystem.AddDirectory(DataDirectory);
|
|
||||||
|
|
||||||
return fileSystem;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region Ensure
|
#region Ensure
|
||||||
|
|
@ -156,7 +136,9 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
|
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(),
|
||||||
|
Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
await ResetDB();
|
await ResetDB();
|
||||||
var s = new SeriesBuilder("Test").Build();
|
var s = new SeriesBuilder("Test").Build();
|
||||||
|
|
@ -231,7 +213,8 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
|
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
cleanupService.CleanupChapters(new []{1, 3});
|
cleanupService.CleanupChapters(new []{1, 3});
|
||||||
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
|
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
|
||||||
|
|
@ -252,14 +235,15 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
var c = new ChapterBuilder("1")
|
var c = new ChapterBuilder("1")
|
||||||
.WithFile(new MangaFileBuilder($"{DataDirectory}1.epub", MangaFormat.Epub).Build())
|
.WithFile(new MangaFileBuilder($"{DataDirectory}1.epub", MangaFormat.Epub).Build())
|
||||||
.WithFile(new MangaFileBuilder($"{DataDirectory}2.epub", MangaFormat.Epub).Build())
|
.WithFile(new MangaFileBuilder($"{DataDirectory}2.epub", MangaFormat.Epub).Build())
|
||||||
.Build();
|
.Build();
|
||||||
cs.GetCachedFile(c);
|
cs.GetCachedFile(c);
|
||||||
Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
|
Assert.Equal($"{DataDirectory}1.epub", cs.GetCachedFile(c));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
@ -292,7 +276,8 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
// Flatten to prepare for how GetFullPath expects
|
// Flatten to prepare for how GetFullPath expects
|
||||||
ds.Flatten($"{CacheDirectory}1/");
|
ds.Flatten($"{CacheDirectory}1/");
|
||||||
|
|
@ -335,7 +320,8 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
// Flatten to prepare for how GetFullPath expects
|
// Flatten to prepare for how GetFullPath expects
|
||||||
ds.Flatten($"{CacheDirectory}1/");
|
ds.Flatten($"{CacheDirectory}1/");
|
||||||
|
|
@ -375,7 +361,8 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
// Flatten to prepare for how GetFullPath expects
|
// Flatten to prepare for how GetFullPath expects
|
||||||
ds.Flatten($"{CacheDirectory}1/");
|
ds.Flatten($"{CacheDirectory}1/");
|
||||||
|
|
@ -419,7 +406,8 @@ public class CacheServiceTests
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cs = new CacheService(_logger, _unitOfWork, ds,
|
var cs = new CacheService(_logger, _unitOfWork, ds,
|
||||||
new ReadingItemService(Substitute.For<IArchiveService>(),
|
new ReadingItemService(Substitute.For<IArchiveService>(),
|
||||||
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
|
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
|
||||||
|
Substitute.For<IBookmarkService>());
|
||||||
|
|
||||||
// Flatten to prepare for how GetFullPath expects
|
// Flatten to prepare for how GetFullPath expects
|
||||||
ds.Flatten($"{CacheDirectory}1/");
|
ds.Flatten($"{CacheDirectory}1/");
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,13 @@
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Abstractions;
|
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
|
||||||
using API.Data.Repositories;
|
using API.Data.Repositories;
|
||||||
using API.DTOs.Filtering;
|
using API.DTOs.Filtering;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Entities.Metadata;
|
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
|
|
@ -30,14 +27,13 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
|
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
|
||||||
private readonly IReaderService _readerService;
|
private readonly IReaderService _readerService;
|
||||||
|
|
||||||
|
|
||||||
public CleanupServiceTests() : base()
|
public CleanupServiceTests() : base()
|
||||||
{
|
{
|
||||||
_context.Library.Add(new LibraryBuilder("Manga")
|
Context.Library.Add(new LibraryBuilder("Manga")
|
||||||
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
|
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
|
||||||
.Build());
|
.Build());
|
||||||
|
|
||||||
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
|
_readerService = new ReaderService(UnitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
|
||||||
Substitute.For<IImageService>(),
|
Substitute.For<IImageService>(),
|
||||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()), Substitute.For<IScrobblingService>());
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()), Substitute.For<IScrobblingService>());
|
||||||
}
|
}
|
||||||
|
|
@ -47,11 +43,11 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
|
|
||||||
protected override async Task ResetDb()
|
protected override async Task ResetDb()
|
||||||
{
|
{
|
||||||
_context.Series.RemoveRange(_context.Series.ToList());
|
Context.Series.RemoveRange(Context.Series.ToList());
|
||||||
_context.Users.RemoveRange(_context.Users.ToList());
|
Context.Users.RemoveRange(Context.Users.ToList());
|
||||||
_context.AppUserBookmark.RemoveRange(_context.AppUserBookmark.ToList());
|
Context.AppUserBookmark.RemoveRange(Context.AppUserBookmark.ToList());
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
@ -72,18 +68,18 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
var s = new SeriesBuilder("Test 1").Build();
|
var s = new SeriesBuilder("Test 1").Build();
|
||||||
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
|
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
|
||||||
s.LibraryId = 1;
|
s.LibraryId = 1;
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
s = new SeriesBuilder("Test 2").Build();
|
s = new SeriesBuilder("Test 2").Build();
|
||||||
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
|
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
|
||||||
s.LibraryId = 1;
|
s.LibraryId = 1;
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
s = new SeriesBuilder("Test 3").Build();
|
s = new SeriesBuilder("Test 3").Build();
|
||||||
s.CoverImage = $"{ImageService.GetSeriesFormat(1000)}.jpg";
|
s.CoverImage = $"{ImageService.GetSeriesFormat(1000)}.jpg";
|
||||||
s.LibraryId = 1;
|
s.LibraryId = 1;
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
|
|
||||||
await cleanupService.DeleteSeriesCoverImages();
|
await cleanupService.DeleteSeriesCoverImages();
|
||||||
|
|
@ -106,16 +102,16 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
var s = new SeriesBuilder("Test 1").Build();
|
var s = new SeriesBuilder("Test 1").Build();
|
||||||
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
|
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
|
||||||
s.LibraryId = 1;
|
s.LibraryId = 1;
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
s = new SeriesBuilder("Test 2").Build();
|
s = new SeriesBuilder("Test 2").Build();
|
||||||
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
|
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
|
||||||
s.LibraryId = 1;
|
s.LibraryId = 1;
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
|
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
|
|
||||||
await cleanupService.DeleteSeriesCoverImages();
|
await cleanupService.DeleteSeriesCoverImages();
|
||||||
|
|
@ -137,18 +133,18 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
// Add 2 series with cover images
|
// Add 2 series with cover images
|
||||||
_context.Series.Add(new SeriesBuilder("Test 1")
|
Context.Series.Add(new SeriesBuilder("Test 1")
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("0").WithCoverImage("v01_c01.jpg").Build())
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c01.jpg").Build())
|
||||||
.WithCoverImage("v01_c01.jpg")
|
.WithCoverImage("v01_c01.jpg")
|
||||||
.Build())
|
.Build())
|
||||||
.WithCoverImage("series_01.jpg")
|
.WithCoverImage("series_01.jpg")
|
||||||
.WithLibraryId(1)
|
.WithLibraryId(1)
|
||||||
.Build());
|
.Build());
|
||||||
|
|
||||||
_context.Series.Add(new SeriesBuilder("Test 2")
|
Context.Series.Add(new SeriesBuilder("Test 2")
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("0").WithCoverImage("v01_c03.jpg").Build())
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c03.jpg").Build())
|
||||||
.WithCoverImage("v01_c03.jpg")
|
.WithCoverImage("v01_c03.jpg")
|
||||||
.Build())
|
.Build())
|
||||||
.WithCoverImage("series_03.jpg")
|
.WithCoverImage("series_03.jpg")
|
||||||
|
|
@ -156,9 +152,9 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
.Build());
|
.Build());
|
||||||
|
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
|
|
||||||
await cleanupService.DeleteChapterCoverImages();
|
await cleanupService.DeleteChapterCoverImages();
|
||||||
|
|
@ -167,53 +163,53 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region DeleteTagCoverImages
|
// #region DeleteTagCoverImages
|
||||||
|
//
|
||||||
[Fact]
|
// [Fact]
|
||||||
public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
|
// public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
|
||||||
{
|
// {
|
||||||
var filesystem = CreateFileSystem();
|
// var filesystem = CreateFileSystem();
|
||||||
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
|
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1)}.jpg", new MockFileData(""));
|
||||||
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
|
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(2)}.jpg", new MockFileData(""));
|
||||||
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
|
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
|
||||||
|
//
|
||||||
// Delete all Series to reset state
|
// // Delete all Series to reset state
|
||||||
await ResetDb();
|
// await ResetDb();
|
||||||
|
//
|
||||||
// Add 2 series with cover images
|
// // Add 2 series with cover images
|
||||||
|
//
|
||||||
_context.Series.Add(new SeriesBuilder("Test 1")
|
// _context.Series.Add(new SeriesBuilder("Test 1")
|
||||||
.WithMetadata(new SeriesMetadataBuilder()
|
// .WithMetadata(new SeriesMetadataBuilder()
|
||||||
.WithCollectionTag(new CollectionTagBuilder("Something")
|
// .WithCollectionTag(new AppUserCollectionBuilder("Something")
|
||||||
.WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
|
// .WithCoverImage($"{ImageService.GetCollectionTagFormat(1)}.jpg")
|
||||||
.Build())
|
// .Build())
|
||||||
.Build())
|
// .Build())
|
||||||
.WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
|
// .WithCoverImage($"{ImageService.GetSeriesFormat(1)}.jpg")
|
||||||
.WithLibraryId(1)
|
// .WithLibraryId(1)
|
||||||
.Build());
|
// .Build());
|
||||||
|
//
|
||||||
_context.Series.Add(new SeriesBuilder("Test 2")
|
// _context.Series.Add(new SeriesBuilder("Test 2")
|
||||||
.WithMetadata(new SeriesMetadataBuilder()
|
// .WithMetadata(new SeriesMetadataBuilder()
|
||||||
.WithCollectionTag(new CollectionTagBuilder("Something")
|
// .WithCollectionTag(new AppUserCollectionBuilder("Something")
|
||||||
.WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
|
// .WithCoverImage($"{ImageService.GetCollectionTagFormat(2)}.jpg")
|
||||||
.Build())
|
// .Build())
|
||||||
.Build())
|
// .Build())
|
||||||
.WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
|
// .WithCoverImage($"{ImageService.GetSeriesFormat(3)}.jpg")
|
||||||
.WithLibraryId(1)
|
// .WithLibraryId(1)
|
||||||
.Build());
|
// .Build());
|
||||||
|
//
|
||||||
|
//
|
||||||
await _context.SaveChangesAsync();
|
// await _context.SaveChangesAsync();
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
// var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
||||||
ds);
|
// ds);
|
||||||
|
//
|
||||||
await cleanupService.DeleteTagCoverImages();
|
// await cleanupService.DeleteTagCoverImages();
|
||||||
|
//
|
||||||
Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
|
// Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
#endregion
|
// #endregion
|
||||||
|
|
||||||
#region DeleteReadingListCoverImages
|
#region DeleteReadingListCoverImages
|
||||||
[Fact]
|
[Fact]
|
||||||
|
|
@ -227,7 +223,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
// Delete all Series to reset state
|
// Delete all Series to reset state
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
_context.Users.Add(new AppUser()
|
Context.Users.Add(new AppUser()
|
||||||
{
|
{
|
||||||
UserName = "Joe",
|
UserName = "Joe",
|
||||||
ReadingLists = new List<ReadingList>()
|
ReadingLists = new List<ReadingList>()
|
||||||
|
|
@ -243,9 +239,9 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
|
|
||||||
await cleanupService.DeleteReadingListCoverImages();
|
await cleanupService.DeleteReadingListCoverImages();
|
||||||
|
|
@ -264,7 +260,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData(""));
|
filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData(""));
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
cleanupService.CleanupCacheAndTempDirectories();
|
cleanupService.CleanupCacheAndTempDirectories();
|
||||||
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
|
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
|
||||||
|
|
@ -278,7 +274,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData(""));
|
filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData(""));
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
cleanupService.CleanupCacheAndTempDirectories();
|
cleanupService.CleanupCacheAndTempDirectories();
|
||||||
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
|
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
|
||||||
|
|
@ -301,7 +297,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile);
|
filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile);
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
await cleanupService.CleanupBackups();
|
await cleanupService.CleanupBackups();
|
||||||
Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories));
|
Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories));
|
||||||
|
|
@ -323,7 +319,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
});
|
});
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
await cleanupService.CleanupBackups();
|
await cleanupService.CleanupBackups();
|
||||||
Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip"));
|
Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip"));
|
||||||
|
|
@ -347,7 +343,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
}
|
}
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
await cleanupService.CleanupLogs();
|
await cleanupService.CleanupLogs();
|
||||||
Assert.Single(ds.GetFiles(LogDirectory, searchOption: SearchOption.AllDirectories));
|
Assert.Single(ds.GetFiles(LogDirectory, searchOption: SearchOption.AllDirectories));
|
||||||
|
|
@ -376,7 +372,7 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
|
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
|
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
|
||||||
ds);
|
ds);
|
||||||
await cleanupService.CleanupLogs();
|
await cleanupService.CleanupLogs();
|
||||||
Assert.True(filesystem.File.Exists($"{LogDirectory}kavita20200911.log"));
|
Assert.True(filesystem.File.Exists($"{LogDirectory}kavita20200911.log"));
|
||||||
|
|
@ -389,84 +385,85 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task CleanupDbEntries_CleanupAbandonedChapters()
|
public async Task CleanupDbEntries_CleanupAbandonedChapters()
|
||||||
{
|
{
|
||||||
var c = new ChapterBuilder("0")
|
var c = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
.WithPages(1)
|
.WithPages(1)
|
||||||
.Build();
|
.Build();
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithFormat(MangaFormat.Epub)
|
.WithFormat(MangaFormat.Epub)
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithNumber(1)
|
|
||||||
.WithChapter(c)
|
.WithChapter(c)
|
||||||
.Build())
|
.Build())
|
||||||
.Build();
|
.Build();
|
||||||
series.Library = new LibraryBuilder("Test LIb").Build();
|
series.Library = new LibraryBuilder("Test LIb").Build();
|
||||||
|
|
||||||
_context.Series.Add(series);
|
Context.Series.Add(series);
|
||||||
|
|
||||||
|
|
||||||
_context.AppUser.Add(new AppUser()
|
Context.AppUser.Add(new AppUser()
|
||||||
{
|
{
|
||||||
UserName = "majora2007"
|
UserName = "majora2007"
|
||||||
});
|
});
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
|
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
|
||||||
await _readerService.MarkChaptersUntilAsRead(user, 1, 5);
|
await _readerService.MarkChaptersUntilAsRead(user, 1, 5);
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
// Validate correct chapters have read status
|
// Validate correct chapters have read status
|
||||||
Assert.Equal(1, (await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
|
Assert.Equal(1, (await UnitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
|
||||||
|
|
||||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||||
Substitute.For<IEventHub>(),
|
Substitute.For<IEventHub>(),
|
||||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||||
|
|
||||||
// Delete the Chapter
|
// Delete the Chapter
|
||||||
_context.Chapter.Remove(c);
|
Context.Chapter.Remove(c);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
Assert.Empty(await _unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
||||||
|
|
||||||
// NOTE: This may not be needed, the underlying DB structure seems fixed as of v0.7
|
// NOTE: This may not be needed, the underlying DB structure seems fixed as of v0.7
|
||||||
await cleanupService.CleanupDbEntries();
|
await cleanupService.CleanupDbEntries();
|
||||||
|
|
||||||
Assert.Empty(await _unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task CleanupDbEntries_RemoveTagsWithoutSeries()
|
public async Task CleanupDbEntries_RemoveTagsWithoutSeries()
|
||||||
{
|
{
|
||||||
var c = new CollectionTag()
|
var s = new SeriesBuilder("Test")
|
||||||
|
.WithFormat(MangaFormat.Epub)
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
|
.Build();
|
||||||
|
s.Library = new LibraryBuilder("Test LIb").Build();
|
||||||
|
Context.Series.Add(s);
|
||||||
|
|
||||||
|
var c = new AppUserCollection()
|
||||||
{
|
{
|
||||||
Title = "Test Tag",
|
Title = "Test Tag",
|
||||||
NormalizedTitle = "Test Tag".ToNormalized(),
|
NormalizedTitle = "Test Tag".ToNormalized(),
|
||||||
|
AgeRating = AgeRating.Unknown,
|
||||||
|
Items = new List<Series>() {s}
|
||||||
};
|
};
|
||||||
var s = new SeriesBuilder("Test")
|
|
||||||
.WithFormat(MangaFormat.Epub)
|
|
||||||
.WithMetadata(new SeriesMetadataBuilder().WithCollectionTag(c).Build())
|
|
||||||
.Build();
|
|
||||||
s.Library = new LibraryBuilder("Test LIb").Build();
|
|
||||||
|
|
||||||
_context.Series.Add(s);
|
Context.AppUser.Add(new AppUser()
|
||||||
|
|
||||||
_context.AppUser.Add(new AppUser()
|
|
||||||
{
|
{
|
||||||
UserName = "majora2007"
|
UserName = "majora2007",
|
||||||
|
Collections = new List<AppUserCollection>() {c}
|
||||||
});
|
});
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||||
|
|
||||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
|
||||||
Substitute.For<IEventHub>(),
|
Substitute.For<IEventHub>(),
|
||||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||||
|
|
||||||
// Delete the Chapter
|
// Delete the Chapter
|
||||||
_context.Series.Remove(s);
|
Context.Series.Remove(s);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
await cleanupService.CleanupDbEntries();
|
await cleanupService.CleanupDbEntries();
|
||||||
|
|
||||||
Assert.Empty(await _unitOfWork.CollectionTagRepository.GetAllTagsAsync());
|
Assert.Empty(await UnitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
@ -483,24 +480,30 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
s.Library = new LibraryBuilder("Test LIb").Build();
|
s.Library = new LibraryBuilder("Test LIb").Build();
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
|
|
||||||
var user = new AppUser()
|
var user = new AppUser()
|
||||||
{
|
{
|
||||||
UserName = "CleanupWantToRead_ShouldRemoveFullyReadSeries",
|
UserName = "CleanupWantToRead_ShouldRemoveFullyReadSeries",
|
||||||
WantToRead = new List<Series>()
|
};
|
||||||
|
Context.AppUser.Add(user);
|
||||||
|
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Add want to read
|
||||||
|
user.WantToRead = new List<AppUserWantToRead>()
|
||||||
|
{
|
||||||
|
new AppUserWantToRead()
|
||||||
{
|
{
|
||||||
s
|
SeriesId = s.Id
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
_context.AppUser.Add(user);
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
await _unitOfWork.CommitAsync();
|
|
||||||
|
|
||||||
await _readerService.MarkSeriesAsRead(user, s.Id);
|
await _readerService.MarkSeriesAsRead(user, s.Id);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||||
Substitute.For<IEventHub>(),
|
Substitute.For<IEventHub>(),
|
||||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||||
|
|
||||||
|
|
@ -508,12 +511,77 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
await cleanupService.CleanupWantToRead();
|
await cleanupService.CleanupWantToRead();
|
||||||
|
|
||||||
var wantToRead =
|
var wantToRead =
|
||||||
await _unitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
|
await UnitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
|
||||||
|
|
||||||
Assert.Equal(0, wantToRead.TotalCount);
|
Assert.Equal(0, wantToRead.TotalCount);
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
#region ConsolidateProgress
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ConsolidateProgress_ShouldRemoveDuplicates()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
var s = new SeriesBuilder("Test ConsolidateProgress_ShouldRemoveDuplicates")
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1")
|
||||||
|
.WithPages(3)
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
s.Library = new LibraryBuilder("Test Lib").Build();
|
||||||
|
Context.Series.Add(s);
|
||||||
|
|
||||||
|
var user = new AppUser()
|
||||||
|
{
|
||||||
|
UserName = "ConsolidateProgress_ShouldRemoveDuplicates",
|
||||||
|
};
|
||||||
|
Context.AppUser.Add(user);
|
||||||
|
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Add 2 progress events
|
||||||
|
user.Progresses ??= [];
|
||||||
|
user.Progresses.Add(new AppUserProgress()
|
||||||
|
{
|
||||||
|
ChapterId = 1,
|
||||||
|
VolumeId = 1,
|
||||||
|
SeriesId = 1,
|
||||||
|
LibraryId = s.LibraryId,
|
||||||
|
PagesRead = 1,
|
||||||
|
});
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Add a duplicate with higher page number
|
||||||
|
user.Progresses.Add(new AppUserProgress()
|
||||||
|
{
|
||||||
|
ChapterId = 1,
|
||||||
|
VolumeId = 1,
|
||||||
|
SeriesId = 1,
|
||||||
|
LibraryId = s.LibraryId,
|
||||||
|
PagesRead = 3,
|
||||||
|
});
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
Assert.Equal(2, (await UnitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
|
||||||
|
|
||||||
|
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||||
|
Substitute.For<IEventHub>(),
|
||||||
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||||
|
|
||||||
|
|
||||||
|
await cleanupService.ConsolidateProgress();
|
||||||
|
|
||||||
|
var progress = await UnitOfWork.AppUserProgressRepository.GetAllProgress();
|
||||||
|
|
||||||
|
Assert.Single(progress);
|
||||||
|
Assert.True(progress.First().PagesRead == 3);
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
#region EnsureChapterProgressIsCapped
|
#region EnsureChapterProgressIsCapped
|
||||||
|
|
||||||
|
|
@ -531,56 +599,56 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
c.UserProgress = new List<AppUserProgress>();
|
c.UserProgress = new List<AppUserProgress>();
|
||||||
s.Volumes = new List<Volume>()
|
s.Volumes = new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0").WithChapter(c).Build()
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume).WithChapter(c).Build()
|
||||||
};
|
};
|
||||||
_context.Series.Add(s);
|
Context.Series.Add(s);
|
||||||
|
|
||||||
var user = new AppUser()
|
var user = new AppUser()
|
||||||
{
|
{
|
||||||
UserName = "EnsureChapterProgressIsCapped",
|
UserName = "EnsureChapterProgressIsCapped",
|
||||||
Progresses = new List<AppUserProgress>()
|
Progresses = new List<AppUserProgress>()
|
||||||
};
|
};
|
||||||
_context.AppUser.Add(user);
|
Context.AppUser.Add(user);
|
||||||
|
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
await _readerService.MarkChaptersAsRead(user, s.Id, new List<Chapter>() {c});
|
await _readerService.MarkChaptersAsRead(user, s.Id, new List<Chapter>() {c});
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
var chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||||
await _unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||||
|
|
||||||
Assert.NotNull(chapter);
|
Assert.NotNull(chapter);
|
||||||
Assert.Equal(2, chapter.PagesRead);
|
Assert.Equal(2, chapter.PagesRead);
|
||||||
|
|
||||||
// Update chapter to have 1 page
|
// Update chapter to have 1 page
|
||||||
c.Pages = 1;
|
c.Pages = 1;
|
||||||
_unitOfWork.ChapterRepository.Update(c);
|
UnitOfWork.ChapterRepository.Update(c);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||||
await _unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||||
Assert.NotNull(chapter);
|
Assert.NotNull(chapter);
|
||||||
Assert.Equal(2, chapter.PagesRead);
|
Assert.Equal(2, chapter.PagesRead);
|
||||||
Assert.Equal(1, chapter.Pages);
|
Assert.Equal(1, chapter.Pages);
|
||||||
|
|
||||||
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), _unitOfWork,
|
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
|
||||||
Substitute.For<IEventHub>(),
|
Substitute.For<IEventHub>(),
|
||||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
|
||||||
|
|
||||||
await cleanupService.EnsureChapterProgressIsCapped();
|
await cleanupService.EnsureChapterProgressIsCapped();
|
||||||
chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
|
||||||
await _unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
|
||||||
|
|
||||||
Assert.NotNull(chapter);
|
Assert.NotNull(chapter);
|
||||||
Assert.Equal(1, chapter.PagesRead);
|
Assert.Equal(1, chapter.PagesRead);
|
||||||
|
|
||||||
_context.AppUser.Remove(user);
|
Context.AppUser.Remove(user);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
// #region CleanupBookmarks
|
#region CleanupBookmarks
|
||||||
//
|
//
|
||||||
// [Fact]
|
// [Fact]
|
||||||
// public async Task CleanupBookmarks_LeaveAllFiles()
|
// public async Task CleanupBookmarks_LeaveAllFiles()
|
||||||
|
|
@ -717,5 +785,5 @@ public class CleanupServiceTests : AbstractDbTest
|
||||||
// Assert.Equal(1, ds.FileSystem.Directory.GetDirectories($"{BookmarkDirectory}1/1/").Length);
|
// Assert.Equal(1, ds.FileSystem.Directory.GetDirectories($"{BookmarkDirectory}1/1/").Length);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// #endregion
|
#endregion
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,18 @@
|
||||||
using System.Collections.Generic;
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using API.Constants;
|
||||||
using API.Data;
|
using API.Data;
|
||||||
using API.Data.Repositories;
|
using API.Data.Repositories;
|
||||||
using API.DTOs.CollectionTags;
|
using API.DTOs.Collection;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
|
using API.Services.Plus;
|
||||||
using API.SignalR;
|
using API.SignalR;
|
||||||
using API.Tests.Helpers;
|
using Kavita.Common;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
|
|
@ -20,132 +23,507 @@ public class CollectionTagServiceTests : AbstractDbTest
|
||||||
private readonly ICollectionTagService _service;
|
private readonly ICollectionTagService _service;
|
||||||
public CollectionTagServiceTests()
|
public CollectionTagServiceTests()
|
||||||
{
|
{
|
||||||
_service = new CollectionTagService(_unitOfWork, Substitute.For<IEventHub>());
|
_service = new CollectionTagService(UnitOfWork, Substitute.For<IEventHub>());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected override async Task ResetDb()
|
protected override async Task ResetDb()
|
||||||
{
|
{
|
||||||
_context.CollectionTag.RemoveRange(_context.CollectionTag.ToList());
|
Context.AppUserCollection.RemoveRange(Context.AppUserCollection.ToList());
|
||||||
_context.Library.RemoveRange(_context.Library.ToList());
|
Context.Library.RemoveRange(Context.Library.ToList());
|
||||||
|
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task SeedSeries()
|
private async Task SeedSeries()
|
||||||
{
|
{
|
||||||
if (_context.CollectionTag.Any()) return;
|
if (Context.AppUserCollection.Any()) return;
|
||||||
|
|
||||||
_context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
|
var s1 = new SeriesBuilder("Series 1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build()).Build();
|
||||||
.WithSeries(new SeriesBuilder("Series 1").Build())
|
var s2 = new SeriesBuilder("Series 2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build()).Build();
|
||||||
.WithSeries(new SeriesBuilder("Series 2").Build())
|
Context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
|
||||||
|
.WithSeries(s1)
|
||||||
|
.WithSeries(s2)
|
||||||
.Build());
|
.Build());
|
||||||
|
|
||||||
_context.CollectionTag.Add(new CollectionTagBuilder("Tag 1").Build());
|
var user = new AppUserBuilder("majora2007", "majora2007", Seed.DefaultThemes.First()).Build();
|
||||||
_context.CollectionTag.Add(new CollectionTagBuilder("Tag 2").WithIsPromoted(true).Build());
|
user.Collections = new List<AppUserCollection>()
|
||||||
await _unitOfWork.CommitAsync();
|
{
|
||||||
|
new AppUserCollectionBuilder("Tag 1").WithItems(new []{s1}).Build(),
|
||||||
|
new AppUserCollectionBuilder("Tag 2").WithItems(new []{s1, s2}).WithIsPromoted(true).Build()
|
||||||
|
};
|
||||||
|
UnitOfWork.UserRepository.Add(user);
|
||||||
|
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#region DeleteTag
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task TagExistsByName_ShouldFindTag()
|
public async Task DeleteTag_ShouldDeleteTag_WhenTagExists()
|
||||||
{
|
{
|
||||||
|
// Arrange
|
||||||
await SeedSeries();
|
await SeedSeries();
|
||||||
Assert.True(await _service.TagExistsByName("Tag 1"));
|
|
||||||
Assert.True(await _service.TagExistsByName("tag 1"));
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
Assert.False(await _service.TagExistsByName("tag5"));
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = await _service.DeleteTag(1, user);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
var deletedTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.Null(deletedTag);
|
||||||
|
Assert.Single(user.Collections); // Only one collection should remain
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task DeleteTag_ShouldReturnTrue_WhenTagDoesNotExist()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Act - Try to delete a non-existent tag
|
||||||
|
var result = await _service.DeleteTag(999, user);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result); // Should return true because the tag is already "deleted"
|
||||||
|
Assert.Equal(2, user.Collections.Count); // Both collections should remain
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task DeleteTag_ShouldNotAffectOtherTags()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = await _service.DeleteTag(1, user);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
var remainingTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||||
|
Assert.NotNull(remainingTag);
|
||||||
|
Assert.Equal("Tag 2", remainingTag.Title);
|
||||||
|
Assert.True(remainingTag.Promoted);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region UpdateTag
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task UpdateTag_ShouldUpdateFields()
|
public async Task UpdateTag_ShouldUpdateFields()
|
||||||
{
|
{
|
||||||
await SeedSeries();
|
await SeedSeries();
|
||||||
|
|
||||||
_context.CollectionTag.Add(new CollectionTagBuilder("UpdateTag_ShouldUpdateFields").WithId(3).WithIsPromoted(true).Build());
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
await _unitOfWork.CommitAsync();
|
Assert.NotNull(user);
|
||||||
|
|
||||||
await _service.UpdateTag(new CollectionTagDto()
|
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldUpdateFields").WithIsPromoted(true).Build());
|
||||||
|
UnitOfWork.UserRepository.Update(user);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
{
|
{
|
||||||
Title = "UpdateTag_ShouldUpdateFields",
|
Title = "UpdateTag_ShouldUpdateFields",
|
||||||
Id = 3,
|
Id = 3,
|
||||||
Promoted = true,
|
Promoted = true,
|
||||||
Summary = "Test Summary",
|
Summary = "Test Summary",
|
||||||
});
|
AgeRating = AgeRating.Unknown
|
||||||
|
}, 1);
|
||||||
|
|
||||||
var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(3);
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||||
Assert.NotNull(tag);
|
Assert.NotNull(tag);
|
||||||
Assert.True(tag.Promoted);
|
Assert.True(tag.Promoted);
|
||||||
Assert.True(!string.IsNullOrEmpty(tag.Summary));
|
Assert.False(string.IsNullOrEmpty(tag.Summary));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// UpdateTag should not change any title if non-Kavita source
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource()
|
||||||
|
{
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource").WithSource(ScrobbleProvider.Mal).Build());
|
||||||
|
UnitOfWork.UserRepository.Update(user);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "New Title",
|
||||||
|
Id = 3,
|
||||||
|
Promoted = true,
|
||||||
|
Summary = "Test Summary",
|
||||||
|
AgeRating = AgeRating.Unknown
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.Equal("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource", tag.Title);
|
||||||
|
Assert.False(string.IsNullOrEmpty(tag.Summary));
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task AddTagToSeries_ShouldAddTagToAllSeries()
|
public async Task UpdateTag_ShouldThrowException_WhenTagDoesNotExist()
|
||||||
{
|
{
|
||||||
|
// Arrange
|
||||||
await SeedSeries();
|
await SeedSeries();
|
||||||
var ids = new[] {1, 2};
|
|
||||||
await _service.AddTagToSeries(await _unitOfWork.CollectionTagRepository.GetTagAsync(1, CollectionTagIncludes.SeriesMetadata), ids);
|
|
||||||
|
|
||||||
var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(ids);
|
// Act & Assert
|
||||||
Assert.Contains(metadatas.ElementAt(0).CollectionTags, t => t.Title.Equals("Tag 1"));
|
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||||
Assert.Contains(metadatas.ElementAt(1).CollectionTags, t => t.Title.Equals("Tag 1"));
|
{
|
||||||
|
Title = "Non-existent Tag",
|
||||||
|
Id = 999, // Non-existent ID
|
||||||
|
Promoted = false
|
||||||
|
}, 1));
|
||||||
|
|
||||||
|
Assert.Equal("collection-doesnt-exist", exception.Message);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task RemoveTagFromSeries_ShouldRemoveMultiple()
|
public async Task UpdateTag_ShouldThrowException_WhenUserDoesNotOwnTag()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Create a second user
|
||||||
|
var user2 = new AppUserBuilder("user2", "user2", Seed.DefaultThemes.First()).Build();
|
||||||
|
UnitOfWork.UserRepository.Add(user2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 1",
|
||||||
|
Id = 1, // This belongs to user1
|
||||||
|
Promoted = false
|
||||||
|
}, 2)); // User with ID 2
|
||||||
|
|
||||||
|
Assert.Equal("access-denied", exception.Message);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldThrowException_WhenTitleIsEmpty()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = " ", // Empty after trimming
|
||||||
|
Id = 1,
|
||||||
|
Promoted = false
|
||||||
|
}, 1));
|
||||||
|
|
||||||
|
Assert.Equal("collection-tag-title-required", exception.Message);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldThrowException_WhenTitleAlreadyExists()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 2", // Already exists
|
||||||
|
Id = 1, // Trying to rename Tag 1 to Tag 2
|
||||||
|
Promoted = false
|
||||||
|
}, 1));
|
||||||
|
|
||||||
|
Assert.Equal("collection-tag-duplicate", exception.Message);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldUpdateCoverImageSettings()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 1",
|
||||||
|
Id = 1,
|
||||||
|
CoverImageLocked = true
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.True(tag.CoverImageLocked);
|
||||||
|
|
||||||
|
// Now test unlocking the cover image
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 1",
|
||||||
|
Id = 1,
|
||||||
|
CoverImageLocked = false
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.False(tag.CoverImageLocked);
|
||||||
|
Assert.Equal(string.Empty, tag.CoverImage);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldAllowPromoteForAdminRole()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Setup a user with admin role
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
await AddUserWithRole(user.Id, PolicyConstants.AdminRole);
|
||||||
|
|
||||||
|
|
||||||
|
// Act - Try to promote a tag that wasn't previously promoted
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 1",
|
||||||
|
Id = 1,
|
||||||
|
Promoted = true
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.True(tag.Promoted);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldAllowPromoteForPromoteRole()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Setup a user with promote role
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Mock to return promote role for the user
|
||||||
|
await AddUserWithRole(user.Id, PolicyConstants.PromoteRole);
|
||||||
|
|
||||||
|
// Act - Try to promote a tag that wasn't previously promoted
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 1",
|
||||||
|
Id = 1,
|
||||||
|
Promoted = true
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.True(tag.Promoted);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateTag_ShouldNotChangePromotion_WhenUserHasNoPermission()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Setup a user with no special roles
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Act - Try to promote a tag without proper role
|
||||||
|
await _service.UpdateTag(new AppUserCollectionDto()
|
||||||
|
{
|
||||||
|
Title = "Tag 1",
|
||||||
|
Id = 1,
|
||||||
|
Promoted = true
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.False(tag.Promoted); // Should remain unpromoted
|
||||||
|
}
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
|
#region RemoveTagFromSeries
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_RemoveSeriesFromTag()
|
||||||
{
|
{
|
||||||
await SeedSeries();
|
await SeedSeries();
|
||||||
var ids = new[] {1, 2};
|
|
||||||
var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(2, CollectionTagIncludes.SeriesMetadata);
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
await _service.AddTagToSeries(tag, ids);
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Tag 2 has 2 series
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
|
||||||
|
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||||
|
var userCollections = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.Equal(2, userCollections!.Collections.Count);
|
||||||
|
Assert.Single(tag.Items);
|
||||||
|
Assert.Equal(2, tag.Items.First().Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Ensure the rating of the tag updates after a series change
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_RemoveSeriesFromTag_UpdatesRating()
|
||||||
|
{
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Tag 2 has 2 series
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
|
||||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||||
|
|
||||||
var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {1});
|
Assert.Equal(AgeRating.G, tag.AgeRating);
|
||||||
|
|
||||||
Assert.Single(metadatas);
|
|
||||||
Assert.Empty(metadatas.First().CollectionTags);
|
|
||||||
Assert.NotEmpty(await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {2}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Should remove the tag when there are no items left on the tag
|
||||||
|
/// </summary>
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task GetTagOrCreate_ShouldReturnNewTag()
|
public async Task RemoveTagFromSeries_RemoveSeriesFromTag_DeleteTagWhenNoSeriesLeft()
|
||||||
{
|
{
|
||||||
await SeedSeries();
|
await SeedSeries();
|
||||||
var tag = await _service.GetTagOrCreate(0, "GetTagOrCreate_ShouldReturnNewTag");
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Tag 1 has 1 series
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
Assert.NotNull(tag);
|
Assert.NotNull(tag);
|
||||||
Assert.Equal(0, tag.Id);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task GetTagOrCreate_ShouldReturnExistingTag()
|
|
||||||
{
|
|
||||||
await SeedSeries();
|
|
||||||
var tag = await _service.GetTagOrCreate(1, "Some new tag");
|
|
||||||
Assert.NotNull(tag);
|
|
||||||
Assert.Equal(1, tag.Id);
|
|
||||||
Assert.Equal("Tag 1", tag.Title);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task RemoveTagsWithoutSeries_ShouldRemoveAbandonedEntries()
|
|
||||||
{
|
|
||||||
await SeedSeries();
|
|
||||||
// Setup a tag with one series
|
|
||||||
var tag = await _service.GetTagOrCreate(0, "Tag with a series");
|
|
||||||
await _unitOfWork.CommitAsync();
|
|
||||||
|
|
||||||
var metadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(new[] {1});
|
|
||||||
tag.SeriesMetadatas.Add(metadatas.First());
|
|
||||||
var tagId = tag.Id;
|
|
||||||
await _unitOfWork.CommitAsync();
|
|
||||||
|
|
||||||
// Validate it doesn't remove tags it shouldn't
|
|
||||||
await _service.RemoveTagsWithoutSeries();
|
|
||||||
Assert.NotNull(await _unitOfWork.CollectionTagRepository.GetTagAsync(tagId));
|
|
||||||
|
|
||||||
await _service.RemoveTagFromSeries(tag, new[] {1});
|
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||||
|
var tag2 = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
// Validate it does remove tags it should
|
Assert.Null(tag2);
|
||||||
await _service.RemoveTagsWithoutSeries();
|
|
||||||
Assert.Null(await _unitOfWork.CollectionTagRepository.GetTagAsync(tagId));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_ShouldReturnFalse_WhenTagIsNull()
|
||||||
|
{
|
||||||
|
// Act
|
||||||
|
var result = await _service.RemoveTagFromSeries(null, [1]);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.False(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_ShouldHandleEmptySeriesIdsList()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
var initialItemCount = tag.Items.Count;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = await _service.RemoveTagFromSeries(tag, Array.Empty<int>());
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_ShouldHandleNonExistentSeriesIds()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
var initialItemCount = tag.Items.Count;
|
||||||
|
|
||||||
|
// Act - Try to remove a series that doesn't exist in the tag
|
||||||
|
var result = await _service.RemoveTagFromSeries(tag, [999]);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_ShouldHandleNullItemsList()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
|
||||||
|
// Force null items list
|
||||||
|
tag.Items = null;
|
||||||
|
UnitOfWork.CollectionTagRepository.Update(tag);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var result = await _service.RemoveTagFromSeries(tag, [1]);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
Assert.True(result);
|
||||||
|
// The tag should not be removed since the items list was null, not empty
|
||||||
|
var tagAfter = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
|
||||||
|
Assert.Null(tagAfter);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task RemoveTagFromSeries_ShouldUpdateAgeRating_WhenMultipleSeriesRemain()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedSeries();
|
||||||
|
|
||||||
|
// Add a third series with a different age rating
|
||||||
|
var s3 = new SeriesBuilder("Series 3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.PG).Build()).Build();
|
||||||
|
Context.Library.First().Series.Add(s3);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Add series 3 to tag 2
|
||||||
|
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
tag.Items.Add(s3);
|
||||||
|
UnitOfWork.CollectionTagRepository.Update(tag);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Act - Remove the series with Mature rating
|
||||||
|
await _service.RemoveTagFromSeries(tag, new[] {1});
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
|
||||||
|
Assert.NotNull(tag);
|
||||||
|
Assert.Equal(2, tag.Items.Count);
|
||||||
|
|
||||||
|
// The age rating should be updated to the highest remaining rating (PG)
|
||||||
|
Assert.Equal(AgeRating.PG, tag.AgeRating);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
117
API.Tests/Services/CoverDbServiceTests.cs
Normal file
117
API.Tests/Services/CoverDbServiceTests.cs
Normal file
|
|
@ -0,0 +1,117 @@
|
||||||
|
using System.IO;
|
||||||
|
using System.IO.Abstractions;
|
||||||
|
using System.Reflection;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Constants;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Extensions;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Metadata;
|
||||||
|
using API.SignalR;
|
||||||
|
using EasyCaching.Core;
|
||||||
|
using Kavita.Common;
|
||||||
|
using Microsoft.Extensions.Hosting;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
|
public class CoverDbServiceTests : AbstractDbTest
|
||||||
|
{
|
||||||
|
private readonly DirectoryService _directoryService;
|
||||||
|
private readonly IEasyCachingProviderFactory _cacheFactory = Substitute.For<IEasyCachingProviderFactory>();
|
||||||
|
private readonly ICoverDbService _coverDbService;
|
||||||
|
|
||||||
|
private static readonly string FaviconPath = Path.Join(Directory.GetCurrentDirectory(),
|
||||||
|
"../../../Services/Test Data/CoverDbService/Favicons");
|
||||||
|
/// <summary>
|
||||||
|
/// Path to download files temp to. Should be empty after each test.
|
||||||
|
/// </summary>
|
||||||
|
private static readonly string TempPath = Path.Join(Directory.GetCurrentDirectory(),
|
||||||
|
"../../../Services/Test Data/CoverDbService/Temp");
|
||||||
|
|
||||||
|
public CoverDbServiceTests()
|
||||||
|
{
|
||||||
|
_directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), CreateFileSystem());
|
||||||
|
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
|
||||||
|
|
||||||
|
_coverDbService = new CoverDbService(Substitute.For<ILogger<CoverDbService>>(), _directoryService, _cacheFactory,
|
||||||
|
Substitute.For<IHostEnvironment>(), imageService, UnitOfWork, Substitute.For<IEventHub>());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override Task ResetDb()
|
||||||
|
{
|
||||||
|
throw new System.NotImplementedException();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#region Download Favicon
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// I cannot figure out how to test this code due to the reliance on the _directoryService.FaviconDirectory and not being
|
||||||
|
/// able to redirect it to the real filesystem.
|
||||||
|
/// </summary>
|
||||||
|
public async Task DownloadFaviconAsync_ShouldDownloadAndMatchExpectedFavicon()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var testUrl = "https://anilist.co/anime/6205/Kmpfer/";
|
||||||
|
var encodeFormat = EncodeFormat.WEBP;
|
||||||
|
var expectedFaviconPath = Path.Combine(FaviconPath, "anilist.co.webp");
|
||||||
|
|
||||||
|
// Ensure TempPath exists
|
||||||
|
_directoryService.ExistOrCreate(TempPath);
|
||||||
|
|
||||||
|
var baseUrl = "https://anilist.co";
|
||||||
|
|
||||||
|
// Ensure there is no cache result for this URL
|
||||||
|
var provider = Substitute.For<IEasyCachingProvider>();
|
||||||
|
provider.GetAsync<string>(baseUrl).Returns(new CacheValue<string>(null, false));
|
||||||
|
_cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
|
||||||
|
|
||||||
|
|
||||||
|
// // Replace favicon directory with TempPath
|
||||||
|
// var directoryService = (DirectoryService)_directoryService;
|
||||||
|
// directoryService.FaviconDirectory = TempPath;
|
||||||
|
|
||||||
|
// Hack: Swap FaviconDirectory with TempPath for ability to download real files
|
||||||
|
typeof(DirectoryService)
|
||||||
|
.GetField("FaviconDirectory", BindingFlags.NonPublic | BindingFlags.Instance)
|
||||||
|
?.SetValue(_directoryService, TempPath);
|
||||||
|
|
||||||
|
|
||||||
|
// Act
|
||||||
|
var resultFilename = await _coverDbService.DownloadFaviconAsync(testUrl, encodeFormat);
|
||||||
|
var actualFaviconPath = Path.Combine(TempPath, resultFilename);
|
||||||
|
|
||||||
|
// Assert file exists
|
||||||
|
Assert.True(File.Exists(actualFaviconPath), "Downloaded favicon does not exist in temp path");
|
||||||
|
|
||||||
|
// Load and compare similarity
|
||||||
|
|
||||||
|
var similarity = expectedFaviconPath.CalculateSimilarity(actualFaviconPath); // Assuming you have this extension
|
||||||
|
Assert.True(similarity > 0.9f, $"Image similarity too low: {similarity}");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task DownloadFaviconAsync_ShouldThrowKavitaException_WhenPreviouslyFailedUrlExistsInCache()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var testUrl = "https://example.com";
|
||||||
|
var encodeFormat = EncodeFormat.WEBP;
|
||||||
|
|
||||||
|
var provider = Substitute.For<IEasyCachingProvider>();
|
||||||
|
provider.GetAsync<string>(Arg.Any<string>())
|
||||||
|
.Returns(new CacheValue<string>(string.Empty, true)); // Simulate previous failure
|
||||||
|
|
||||||
|
_cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await Assert.ThrowsAsync<KavitaException>(() =>
|
||||||
|
_coverDbService.DownloadFaviconAsync(testUrl, encodeFormat));
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
@ -18,13 +18,13 @@ public class DeviceServiceDbTests : AbstractDbTest
|
||||||
|
|
||||||
public DeviceServiceDbTests() : base()
|
public DeviceServiceDbTests() : base()
|
||||||
{
|
{
|
||||||
_deviceService = new DeviceService(_unitOfWork, _logger, Substitute.For<IEmailService>());
|
_deviceService = new DeviceService(UnitOfWork, _logger, Substitute.For<IEmailService>());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected override async Task ResetDb()
|
protected override async Task ResetDb()
|
||||||
{
|
{
|
||||||
_context.Users.RemoveRange(_context.Users.ToList());
|
Context.Users.RemoveRange(Context.Users.ToList());
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -39,8 +39,8 @@ public class DeviceServiceDbTests : AbstractDbTest
|
||||||
Devices = new List<Device>()
|
Devices = new List<Device>()
|
||||||
};
|
};
|
||||||
|
|
||||||
_context.Users.Add(user);
|
Context.Users.Add(user);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
var device = await _deviceService.Create(new CreateDeviceDto()
|
var device = await _deviceService.Create(new CreateDeviceDto()
|
||||||
{
|
{
|
||||||
|
|
@ -62,8 +62,8 @@ public class DeviceServiceDbTests : AbstractDbTest
|
||||||
Devices = new List<Device>()
|
Devices = new List<Device>()
|
||||||
};
|
};
|
||||||
|
|
||||||
_context.Users.Add(user);
|
Context.Users.Add(user);
|
||||||
await _unitOfWork.CommitAsync();
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
var device = await _deviceService.Create(new CreateDeviceDto()
|
var device = await _deviceService.Create(new CreateDeviceDto()
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,30 @@
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Runtime.InteropServices;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
|
using Kavita.Common.Helpers;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
public class DirectoryServiceTests
|
public class DirectoryServiceTests: AbstractFsTest
|
||||||
{
|
{
|
||||||
private readonly ILogger<DirectoryService> _logger = Substitute.For<ILogger<DirectoryService>>();
|
private readonly ILogger<DirectoryService> _logger = Substitute.For<ILogger<DirectoryService>>();
|
||||||
|
private readonly ITestOutputHelper _testOutputHelper;
|
||||||
|
|
||||||
|
public DirectoryServiceTests(ITestOutputHelper testOutputHelper)
|
||||||
|
{
|
||||||
|
_testOutputHelper = testOutputHelper;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#region TraverseTreeParallelForEach
|
#region TraverseTreeParallelForEach
|
||||||
|
|
@ -372,9 +382,16 @@ public class DirectoryServiceTests
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region IsDriveMounted
|
#region IsDriveMounted
|
||||||
|
// The root directory (/) is always mounted on non windows
|
||||||
[Fact]
|
[Fact]
|
||||||
public void IsDriveMounted_DriveIsNotMounted()
|
public void IsDriveMounted_DriveIsNotMounted()
|
||||||
{
|
{
|
||||||
|
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||||
|
{
|
||||||
|
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const string testDirectory = "c:/manga/";
|
const string testDirectory = "c:/manga/";
|
||||||
var fileSystem = new MockFileSystem();
|
var fileSystem = new MockFileSystem();
|
||||||
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
||||||
|
|
@ -386,6 +403,12 @@ public class DirectoryServiceTests
|
||||||
[Fact]
|
[Fact]
|
||||||
public void IsDriveMounted_DriveIsMounted()
|
public void IsDriveMounted_DriveIsMounted()
|
||||||
{
|
{
|
||||||
|
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||||
|
{
|
||||||
|
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const string testDirectory = "c:/manga/";
|
const string testDirectory = "c:/manga/";
|
||||||
var fileSystem = new MockFileSystem();
|
var fileSystem = new MockFileSystem();
|
||||||
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
|
||||||
|
|
@ -721,6 +744,54 @@ public class DirectoryServiceTests
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
#region FindLowestDirectoriesFromFiles
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(new [] {"C:/Manga/"},
|
||||||
|
new [] {"C:/Manga/Love Hina/Vol. 01.cbz"},
|
||||||
|
"C:/Manga/Love Hina")]
|
||||||
|
[InlineData(new [] {"C:/Manga/"},
|
||||||
|
new [] {"C:/Manga/Romance/Love Hina/Vol. 01.cbz"},
|
||||||
|
"C:/Manga/Romance/Love Hina")]
|
||||||
|
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"},
|
||||||
|
new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"},
|
||||||
|
"C:/Manga/Dir 1/Love Hina")]
|
||||||
|
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"},
|
||||||
|
new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"},
|
||||||
|
null)]
|
||||||
|
[InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"},
|
||||||
|
new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"},
|
||||||
|
@"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")]
|
||||||
|
[InlineData(new [] {"C:/Manga/"},
|
||||||
|
new [] {"C:/Manga/Love Hina/Vol. 01.cbz", "C:/Manga/Love Hina/Specials/Sp01.cbz"},
|
||||||
|
"C:/Manga/Love Hina")]
|
||||||
|
[InlineData(new [] {"/manga"},
|
||||||
|
new [] {"/manga/Love Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
|
||||||
|
"/manga/Love Hina")]
|
||||||
|
[InlineData(new [] {"/manga"},
|
||||||
|
new [] {"/manga/Love Hina/Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
|
||||||
|
"/manga/Love Hina")]
|
||||||
|
[InlineData(new [] {"/manga"},
|
||||||
|
new [] {"/manga/Dress Up Darling/Dress Up Darling Ch 01.cbz", "/manga/Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz"},
|
||||||
|
"/manga/Dress Up Darling")]
|
||||||
|
public void FindLowestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
|
||||||
|
{
|
||||||
|
var fileSystem = new MockFileSystem();
|
||||||
|
foreach (var directory in rootDirectories)
|
||||||
|
{
|
||||||
|
fileSystem.AddDirectory(directory);
|
||||||
|
}
|
||||||
|
foreach (var f in files)
|
||||||
|
{
|
||||||
|
fileSystem.AddFile(f, new MockFileData(""));
|
||||||
|
}
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
|
|
||||||
|
var actual = ds.FindLowestDirectoriesFromFiles(rootDirectories, files);
|
||||||
|
Assert.Equal(expectedDirectory, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
#region GetFoldersTillRoot
|
#region GetFoldersTillRoot
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
|
|
@ -851,12 +922,14 @@ public class DirectoryServiceTests
|
||||||
#region GetHumanReadableBytes
|
#region GetHumanReadableBytes
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(1200, "1.17 KB")]
|
[InlineData(1200, 1.17, " KB")]
|
||||||
[InlineData(1, "1 B")]
|
[InlineData(1, 1, " B")]
|
||||||
[InlineData(10000000, "9.54 MB")]
|
[InlineData(10000000, 9.54, " MB")]
|
||||||
[InlineData(10000000000, "9.31 GB")]
|
[InlineData(10000000000, 9.31, " GB")]
|
||||||
public void GetHumanReadableBytesTest(long bytes, string expected)
|
public void GetHumanReadableBytesTest(long bytes, float number, string suffix)
|
||||||
{
|
{
|
||||||
|
// GetHumanReadableBytes is user facing, should be in CultureInfo.CurrentCulture
|
||||||
|
var expected = number.ToString(CultureInfo.CurrentCulture) + suffix;
|
||||||
Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes));
|
Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes));
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
@ -878,8 +951,9 @@ public class DirectoryServiceTests
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
|
|
||||||
|
var globMatcher = new GlobMatcher();
|
||||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
globMatcher.AddExclude("*.*");
|
||||||
|
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||||
|
|
||||||
Assert.Empty(allFiles);
|
Assert.Empty(allFiles);
|
||||||
|
|
||||||
|
|
@ -903,7 +977,9 @@ public class DirectoryServiceTests
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
|
|
||||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
var globMatcher = new GlobMatcher();
|
||||||
|
globMatcher.AddExclude("**/Accel World/*");
|
||||||
|
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||||
|
|
||||||
Assert.Single(allFiles); // Ignore files are not counted in files, only valid extensions
|
Assert.Single(allFiles); // Ignore files are not counted in files, only valid extensions
|
||||||
|
|
||||||
|
|
@ -932,7 +1008,10 @@ public class DirectoryServiceTests
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
|
|
||||||
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
|
var globMatcher = new GlobMatcher();
|
||||||
|
globMatcher.AddExclude("**/Accel World/*");
|
||||||
|
globMatcher.AddExclude("**/ArtBooks/*");
|
||||||
|
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
|
||||||
|
|
||||||
Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions
|
Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions
|
||||||
|
|
||||||
|
|
@ -986,11 +1065,14 @@ public class DirectoryServiceTests
|
||||||
#region GetParentDirectory
|
#region GetParentDirectory
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(@"C:/file.txt", "C:/")]
|
[InlineData(@"file.txt", "")]
|
||||||
[InlineData(@"C:/folder/file.txt", "C:/folder")]
|
[InlineData(@"folder/file.txt", "folder")]
|
||||||
[InlineData(@"C:/folder/subfolder/file.txt", "C:/folder/subfolder")]
|
[InlineData(@"folder/subfolder/file.txt", "folder/subfolder")]
|
||||||
public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected)
|
public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected)
|
||||||
{
|
{
|
||||||
|
path = Root + path;
|
||||||
|
expected = Root + expected;
|
||||||
|
|
||||||
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
|
||||||
{
|
{
|
||||||
{ path, new MockFileData(string.Empty)}
|
{ path, new MockFileData(string.Empty)}
|
||||||
|
|
@ -1000,11 +1082,14 @@ public class DirectoryServiceTests
|
||||||
Assert.Equal(expected, ds.GetParentDirectoryName(path));
|
Assert.Equal(expected, ds.GetParentDirectoryName(path));
|
||||||
}
|
}
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData(@"C:/folder", "C:/")]
|
[InlineData(@"folder", "")]
|
||||||
[InlineData(@"C:/folder/subfolder", "C:/folder")]
|
[InlineData(@"folder/subfolder", "folder")]
|
||||||
[InlineData(@"C:/folder/subfolder/another", "C:/folder/subfolder")]
|
[InlineData(@"folder/subfolder/another", "folder/subfolder")]
|
||||||
public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected)
|
public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected)
|
||||||
{
|
{
|
||||||
|
path = Root + path;
|
||||||
|
expected = Root + expected;
|
||||||
|
|
||||||
var fileSystem = new MockFileSystem();
|
var fileSystem = new MockFileSystem();
|
||||||
fileSystem.AddDirectory(path);
|
fileSystem.AddDirectory(path);
|
||||||
|
|
||||||
|
|
|
||||||
3198
API.Tests/Services/ExternalMetadataServiceTests.cs
Normal file
3198
API.Tests/Services/ExternalMetadataServiceTests.cs
Normal file
File diff suppressed because it is too large
Load diff
221
API.Tests/Services/ImageServiceTests.cs
Normal file
221
API.Tests/Services/ImageServiceTests.cs
Normal file
|
|
@ -0,0 +1,221 @@
|
||||||
|
using System.IO;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Services;
|
||||||
|
using NetVips;
|
||||||
|
using Xunit;
|
||||||
|
using Image = NetVips.Image;
|
||||||
|
|
||||||
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
|
public class ImageServiceTests
|
||||||
|
{
|
||||||
|
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/Covers");
|
||||||
|
private readonly string _testDirectoryColorScapes = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageService/ColorScapes");
|
||||||
|
private const string OutputPattern = "_output";
|
||||||
|
private const string BaselinePattern = "_baseline";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Run this once to get the baseline generation
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void GenerateBaseline()
|
||||||
|
{
|
||||||
|
GenerateFiles(BaselinePattern);
|
||||||
|
Assert.True(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Change the Scaling/Crop code then run this continuously
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void TestScaling()
|
||||||
|
{
|
||||||
|
GenerateFiles(OutputPattern);
|
||||||
|
GenerateHtmlFile();
|
||||||
|
Assert.True(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void GenerateFiles(string outputExtension)
|
||||||
|
{
|
||||||
|
// Step 1: Delete any images that have _output in the name
|
||||||
|
var outputFiles = Directory.GetFiles(_testDirectory, "*_output.*");
|
||||||
|
foreach (var file in outputFiles)
|
||||||
|
{
|
||||||
|
File.Delete(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Scan the _testDirectory for images
|
||||||
|
var imageFiles = Directory.GetFiles(_testDirectory, "*.*")
|
||||||
|
.Where(file => !file.EndsWith("html"))
|
||||||
|
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
// Step 3: Process each image
|
||||||
|
foreach (var imagePath in imageFiles)
|
||||||
|
{
|
||||||
|
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||||
|
var dims = CoverImageSize.Default.GetDimensions();
|
||||||
|
using var sourceImage = Image.NewFromFile(imagePath, false, Enums.Access.SequentialUnbuffered);
|
||||||
|
|
||||||
|
var size = ImageService.GetSizeForDimensions(sourceImage, dims.Width, dims.Height);
|
||||||
|
var crop = ImageService.GetCropForDimensions(sourceImage, dims.Width, dims.Height);
|
||||||
|
|
||||||
|
using var thumbnail = Image.Thumbnail(imagePath, dims.Width, dims.Height,
|
||||||
|
size: size,
|
||||||
|
crop: crop);
|
||||||
|
|
||||||
|
var outputFileName = fileName + outputExtension + ".png";
|
||||||
|
thumbnail.WriteToFile(Path.Join(_testDirectory, outputFileName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void GenerateHtmlFile()
|
||||||
|
{
|
||||||
|
var imageFiles = Directory.GetFiles(_testDirectory, "*.*")
|
||||||
|
.Where(file => !file.EndsWith("html"))
|
||||||
|
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
var htmlBuilder = new StringBuilder();
|
||||||
|
htmlBuilder.AppendLine("<!DOCTYPE html>");
|
||||||
|
htmlBuilder.AppendLine("<html lang=\"en\">");
|
||||||
|
htmlBuilder.AppendLine("<head>");
|
||||||
|
htmlBuilder.AppendLine("<meta charset=\"UTF-8\">");
|
||||||
|
htmlBuilder.AppendLine("<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">");
|
||||||
|
htmlBuilder.AppendLine("<title>Image Comparison</title>");
|
||||||
|
htmlBuilder.AppendLine("<style>");
|
||||||
|
htmlBuilder.AppendLine("body { font-family: Arial, sans-serif; }");
|
||||||
|
htmlBuilder.AppendLine(".container { display: flex; flex-wrap: wrap; }");
|
||||||
|
htmlBuilder.AppendLine(".image-row { display: flex; align-items: center; margin-bottom: 20px; width: 100% }");
|
||||||
|
htmlBuilder.AppendLine(".image-row img { margin-right: 10px; max-width: 200px; height: auto; }");
|
||||||
|
htmlBuilder.AppendLine("</style>");
|
||||||
|
htmlBuilder.AppendLine("</head>");
|
||||||
|
htmlBuilder.AppendLine("<body>");
|
||||||
|
htmlBuilder.AppendLine("<div class=\"container\">");
|
||||||
|
|
||||||
|
foreach (var imagePath in imageFiles)
|
||||||
|
{
|
||||||
|
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||||
|
var baselinePath = Path.Combine(_testDirectory, fileName + "_baseline.png");
|
||||||
|
var outputPath = Path.Combine(_testDirectory, fileName + "_output.png");
|
||||||
|
var dims = CoverImageSize.Default.GetDimensions();
|
||||||
|
|
||||||
|
using var sourceImage = Image.NewFromFile(imagePath, false, Enums.Access.SequentialUnbuffered);
|
||||||
|
htmlBuilder.AppendLine("<div class=\"image-row\">");
|
||||||
|
htmlBuilder.AppendLine($"<p>{fileName} ({((double) sourceImage.Width / sourceImage.Height).ToString("F2")}) - {ImageService.WillScaleWell(sourceImage, dims.Width, dims.Height)}</p>");
|
||||||
|
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(imagePath)}\" alt=\"{fileName}\">");
|
||||||
|
if (File.Exists(baselinePath))
|
||||||
|
{
|
||||||
|
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(baselinePath)}\" alt=\"{fileName} baseline\">");
|
||||||
|
}
|
||||||
|
if (File.Exists(outputPath))
|
||||||
|
{
|
||||||
|
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(outputPath)}\" alt=\"{fileName} output\">");
|
||||||
|
}
|
||||||
|
htmlBuilder.AppendLine("</div>");
|
||||||
|
}
|
||||||
|
|
||||||
|
htmlBuilder.AppendLine("</div>");
|
||||||
|
htmlBuilder.AppendLine("</body>");
|
||||||
|
htmlBuilder.AppendLine("</html>");
|
||||||
|
|
||||||
|
File.WriteAllText(Path.Combine(_testDirectory, "index.html"), htmlBuilder.ToString());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void TestColorScapes()
|
||||||
|
{
|
||||||
|
// Step 1: Delete any images that have _output in the name
|
||||||
|
var outputFiles = Directory.GetFiles(_testDirectoryColorScapes, "*_output.*");
|
||||||
|
foreach (var file in outputFiles)
|
||||||
|
{
|
||||||
|
File.Delete(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Scan the _testDirectory for images
|
||||||
|
var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
|
||||||
|
.Where(file => !file.EndsWith("html"))
|
||||||
|
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
// Step 3: Process each image
|
||||||
|
foreach (var imagePath in imageFiles)
|
||||||
|
{
|
||||||
|
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||||
|
var colors = ImageService.CalculateColorScape(imagePath);
|
||||||
|
|
||||||
|
// Generate primary color image
|
||||||
|
GenerateColorImage(colors.Primary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png"));
|
||||||
|
|
||||||
|
// Generate secondary color image
|
||||||
|
GenerateColorImage(colors.Secondary, Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Generate HTML file
|
||||||
|
GenerateHtmlFileForColorScape();
|
||||||
|
Assert.True(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void GenerateColorImage(string hexColor, string outputPath)
|
||||||
|
{
|
||||||
|
var (r, g, b) = ImageService.HexToRgb(hexColor);
|
||||||
|
using var blackImage = Image.Black(200, 100);
|
||||||
|
using var colorImage = blackImage.NewFromImage(r, g, b);
|
||||||
|
colorImage.WriteToFile(outputPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void GenerateHtmlFileForColorScape()
|
||||||
|
{
|
||||||
|
var imageFiles = Directory.GetFiles(_testDirectoryColorScapes, "*.*")
|
||||||
|
.Where(file => !file.EndsWith("html"))
|
||||||
|
.Where(file => !file.Contains(OutputPattern) && !file.Contains(BaselinePattern))
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
var htmlBuilder = new StringBuilder();
|
||||||
|
htmlBuilder.AppendLine("<!DOCTYPE html>");
|
||||||
|
htmlBuilder.AppendLine("<html lang=\"en\">");
|
||||||
|
htmlBuilder.AppendLine("<head>");
|
||||||
|
htmlBuilder.AppendLine("<meta charset=\"UTF-8\">");
|
||||||
|
htmlBuilder.AppendLine("<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">");
|
||||||
|
htmlBuilder.AppendLine("<title>Color Scape Comparison</title>");
|
||||||
|
htmlBuilder.AppendLine("<style>");
|
||||||
|
htmlBuilder.AppendLine("body { font-family: Arial, sans-serif; }");
|
||||||
|
htmlBuilder.AppendLine(".container { display: flex; flex-wrap: wrap; }");
|
||||||
|
htmlBuilder.AppendLine(".image-row { display: flex; align-items: center; margin-bottom: 20px; width: 100% }");
|
||||||
|
htmlBuilder.AppendLine(".image-row img { margin-right: 10px; max-width: 200px; height: auto; }");
|
||||||
|
htmlBuilder.AppendLine(".color-square { width: 100px; height: 100px; margin-right: 10px; }");
|
||||||
|
htmlBuilder.AppendLine("</style>");
|
||||||
|
htmlBuilder.AppendLine("</head>");
|
||||||
|
htmlBuilder.AppendLine("<body>");
|
||||||
|
htmlBuilder.AppendLine("<div class=\"container\">");
|
||||||
|
|
||||||
|
foreach (var imagePath in imageFiles)
|
||||||
|
{
|
||||||
|
var fileName = Path.GetFileNameWithoutExtension(imagePath);
|
||||||
|
var primaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_primary_output.png");
|
||||||
|
var secondaryPath = Path.Combine(_testDirectoryColorScapes, $"{fileName}_secondary_output.png");
|
||||||
|
|
||||||
|
htmlBuilder.AppendLine("<div class=\"image-row\">");
|
||||||
|
htmlBuilder.AppendLine($"<p>{fileName}</p>");
|
||||||
|
htmlBuilder.AppendLine($"<img src=\"./{Path.GetFileName(imagePath)}\" alt=\"{fileName}\">");
|
||||||
|
if (File.Exists(primaryPath))
|
||||||
|
{
|
||||||
|
htmlBuilder.AppendLine($"<img class=\"color-square\" src=\"./{Path.GetFileName(primaryPath)}\" alt=\"{fileName} primary color\">");
|
||||||
|
}
|
||||||
|
if (File.Exists(secondaryPath))
|
||||||
|
{
|
||||||
|
htmlBuilder.AppendLine($"<img class=\"color-square\" src=\"./{Path.GetFileName(secondaryPath)}\" alt=\"{fileName} secondary color\">");
|
||||||
|
}
|
||||||
|
htmlBuilder.AppendLine("</div>");
|
||||||
|
}
|
||||||
|
|
||||||
|
htmlBuilder.AppendLine("</div>");
|
||||||
|
htmlBuilder.AppendLine("</body>");
|
||||||
|
htmlBuilder.AppendLine("</html>");
|
||||||
|
|
||||||
|
File.WriteAllText(Path.Combine(_testDirectoryColorScapes, "colorscape_index.html"), htmlBuilder.ToString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,37 +1,41 @@
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Data.Common;
|
using System.IO;
|
||||||
|
using System.IO.Abstractions;
|
||||||
using System.IO.Abstractions.TestingHelpers;
|
using System.IO.Abstractions.TestingHelpers;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
|
||||||
using API.Data.Metadata;
|
using API.Data.Metadata;
|
||||||
using API.Data.Repositories;
|
using API.Data.Repositories;
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Extensions;
|
|
||||||
using API.Helpers.Builders;
|
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using API.Services.Tasks.Scanner;
|
using API.Services.Tasks.Scanner;
|
||||||
using API.Services.Tasks.Scanner.Parser;
|
using API.Services.Tasks.Scanner.Parser;
|
||||||
using API.SignalR;
|
using API.SignalR;
|
||||||
using AutoMapper;
|
using API.Tests.Helpers;
|
||||||
using Microsoft.Data.Sqlite;
|
using Hangfire;
|
||||||
using Microsoft.EntityFrameworkCore;
|
|
||||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
internal class MockReadingItemService : IReadingItemService
|
public class MockReadingItemService : IReadingItemService
|
||||||
{
|
{
|
||||||
private readonly IDefaultParser _defaultParser;
|
private readonly BasicParser _basicParser;
|
||||||
|
private readonly ComicVineParser _comicVineParser;
|
||||||
|
private readonly ImageParser _imageParser;
|
||||||
|
private readonly BookParser _bookParser;
|
||||||
|
private readonly PdfParser _pdfParser;
|
||||||
|
|
||||||
public MockReadingItemService(IDefaultParser defaultParser)
|
public MockReadingItemService(IDirectoryService directoryService, IBookService bookService)
|
||||||
{
|
{
|
||||||
_defaultParser = defaultParser;
|
_imageParser = new ImageParser(directoryService);
|
||||||
|
_basicParser = new BasicParser(directoryService, _imageParser);
|
||||||
|
_bookParser = new BookParser(directoryService, bookService, _basicParser);
|
||||||
|
_comicVineParser = new ComicVineParser(directoryService);
|
||||||
|
_pdfParser = new PdfParser(directoryService);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ComicInfo GetComicInfo(string filePath)
|
public ComicInfo GetComicInfo(string filePath)
|
||||||
|
|
@ -54,99 +58,57 @@ internal class MockReadingItemService : IReadingItemService
|
||||||
throw new NotImplementedException();
|
throw new NotImplementedException();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParserInfo Parse(string path, string rootPath, LibraryType type)
|
public ParserInfo Parse(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata)
|
||||||
{
|
{
|
||||||
return _defaultParser.Parse(path, rootPath, type);
|
if (_comicVineParser.IsApplicable(path, type))
|
||||||
|
{
|
||||||
|
return _comicVineParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||||
|
}
|
||||||
|
if (_imageParser.IsApplicable(path, type))
|
||||||
|
{
|
||||||
|
return _imageParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||||
|
}
|
||||||
|
if (_bookParser.IsApplicable(path, type))
|
||||||
|
{
|
||||||
|
return _bookParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||||
|
}
|
||||||
|
if (_pdfParser.IsApplicable(path, type))
|
||||||
|
{
|
||||||
|
return _pdfParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||||
|
}
|
||||||
|
if (_basicParser.IsApplicable(path, type))
|
||||||
|
{
|
||||||
|
return _basicParser.Parse(path, rootPath, libraryRoot, type, enableMetadata, GetComicInfo(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
|
public ParserInfo ParseFile(string path, string rootPath, string libraryRoot, LibraryType type, bool enableMetadata)
|
||||||
{
|
{
|
||||||
return _defaultParser.Parse(path, rootPath, type);
|
return Parse(path, rootPath, libraryRoot, type, enableMetadata);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public class ParseScannedFilesTests
|
public class ParseScannedFilesTests : AbstractDbTest
|
||||||
{
|
{
|
||||||
private readonly ILogger<ParseScannedFiles> _logger = Substitute.For<ILogger<ParseScannedFiles>>();
|
private readonly ILogger<ParseScannedFiles> _logger = Substitute.For<ILogger<ParseScannedFiles>>();
|
||||||
private readonly IUnitOfWork _unitOfWork;
|
private readonly ScannerHelper _scannerHelper;
|
||||||
|
|
||||||
private readonly DbConnection _connection;
|
public ParseScannedFilesTests(ITestOutputHelper testOutputHelper)
|
||||||
private readonly DataContext _context;
|
|
||||||
|
|
||||||
private const string CacheDirectory = "C:/kavita/config/cache/";
|
|
||||||
private const string CoverImageDirectory = "C:/kavita/config/covers/";
|
|
||||||
private const string BackupDirectory = "C:/kavita/config/backups/";
|
|
||||||
private const string DataDirectory = "C:/data/";
|
|
||||||
|
|
||||||
public ParseScannedFilesTests()
|
|
||||||
{
|
{
|
||||||
var contextOptions = new DbContextOptionsBuilder()
|
|
||||||
.UseSqlite(CreateInMemoryDatabase())
|
|
||||||
.Options;
|
|
||||||
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
|
|
||||||
|
|
||||||
_context = new DataContext(contextOptions);
|
|
||||||
Task.Run(SeedDb).GetAwaiter().GetResult();
|
|
||||||
|
|
||||||
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
|
|
||||||
|
|
||||||
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
|
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
|
||||||
|
GlobalConfiguration.Configuration.UseInMemoryStorage();
|
||||||
|
_scannerHelper = new ScannerHelper(UnitOfWork, testOutputHelper);
|
||||||
}
|
}
|
||||||
|
|
||||||
#region Setup
|
protected override async Task ResetDb()
|
||||||
|
|
||||||
private static DbConnection CreateInMemoryDatabase()
|
|
||||||
{
|
{
|
||||||
var connection = new SqliteConnection("Filename=:memory:");
|
Context.Series.RemoveRange(Context.Series.ToList());
|
||||||
|
|
||||||
connection.Open();
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
return connection;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<bool> SeedDb()
|
|
||||||
{
|
|
||||||
await _context.Database.MigrateAsync();
|
|
||||||
var filesystem = CreateFileSystem();
|
|
||||||
|
|
||||||
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
|
|
||||||
|
|
||||||
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
|
|
||||||
setting.Value = CacheDirectory;
|
|
||||||
|
|
||||||
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
|
|
||||||
setting.Value = BackupDirectory;
|
|
||||||
|
|
||||||
_context.ServerSetting.Update(setting);
|
|
||||||
|
|
||||||
_context.Library.Add(new LibraryBuilder("Manga")
|
|
||||||
.WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
|
|
||||||
.Build());
|
|
||||||
return await _context.SaveChangesAsync() > 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task ResetDB()
|
|
||||||
{
|
|
||||||
_context.Series.RemoveRange(_context.Series.ToList());
|
|
||||||
|
|
||||||
await _context.SaveChangesAsync();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static MockFileSystem CreateFileSystem()
|
|
||||||
{
|
|
||||||
var fileSystem = new MockFileSystem();
|
|
||||||
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
|
|
||||||
fileSystem.AddDirectory("C:/kavita/config/");
|
|
||||||
fileSystem.AddDirectory(CacheDirectory);
|
|
||||||
fileSystem.AddDirectory(CoverImageDirectory);
|
|
||||||
fileSystem.AddDirectory(BackupDirectory);
|
|
||||||
fileSystem.AddDirectory(DataDirectory);
|
|
||||||
|
|
||||||
return fileSystem;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
|
||||||
|
|
||||||
#region MergeName
|
#region MergeName
|
||||||
|
|
||||||
// NOTE: I don't think I can test MergeName as it relies on Tracking Files, which is more complicated than I need
|
// NOTE: I don't think I can test MergeName as it relies on Tracking Files, which is more complicated than I need
|
||||||
|
|
@ -219,48 +181,45 @@ public class ParseScannedFilesTests
|
||||||
|
|
||||||
#region ScanLibrariesForSeries
|
#region ScanLibrariesForSeries
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Test that when a folder has 2 series with a localizedSeries, they combine into one final series
|
||||||
|
/// </summary>
|
||||||
|
// [Fact]
|
||||||
|
// public async Task ScanLibrariesForSeries_ShouldCombineSeries()
|
||||||
|
// {
|
||||||
|
// // TODO: Implement these unit tests
|
||||||
|
// }
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task ScanLibrariesForSeries_ShouldFindFiles()
|
public async Task ScanLibrariesForSeries_ShouldFindFiles()
|
||||||
{
|
{
|
||||||
var fileSystem = new MockFileSystem();
|
var fileSystem = new MockFileSystem();
|
||||||
fileSystem.AddDirectory("C:/Data/");
|
fileSystem.AddDirectory(Root + "Data/");
|
||||||
fileSystem.AddFile("C:/Data/Accel World v1.cbz", new MockFileData(string.Empty));
|
fileSystem.AddFile(Root + "Data/Accel World v1.cbz", new MockFileData(string.Empty));
|
||||||
fileSystem.AddFile("C:/Data/Accel World v2.cbz", new MockFileData(string.Empty));
|
fileSystem.AddFile(Root + "Data/Accel World v2.cbz", new MockFileData(string.Empty));
|
||||||
fileSystem.AddFile("C:/Data/Accel World v2.pdf", new MockFileData(string.Empty));
|
fileSystem.AddFile(Root + "Data/Accel World v2.pdf", new MockFileData(string.Empty));
|
||||||
fileSystem.AddFile("C:/Data/Nothing.pdf", new MockFileData(string.Empty));
|
fileSystem.AddFile(Root + "Data/Nothing.pdf", new MockFileData(string.Empty));
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
|
||||||
|
|
||||||
Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
|
|
||||||
{
|
|
||||||
var skippedScan = parsedInfo.Item1;
|
|
||||||
var parsedFiles = parsedInfo.Item2;
|
|
||||||
if (parsedFiles.Count == 0) return Task.CompletedTask;
|
|
||||||
|
|
||||||
var foundParsedSeries = new ParsedSeries()
|
|
||||||
{
|
|
||||||
Name = parsedFiles.First().Series,
|
|
||||||
NormalizedName = parsedFiles.First().Series.ToNormalized(),
|
|
||||||
Format = parsedFiles.First().Format
|
|
||||||
};
|
|
||||||
|
|
||||||
parsedSeries.Add(foundParsedSeries, parsedFiles);
|
|
||||||
return Task.CompletedTask;
|
|
||||||
}
|
|
||||||
|
|
||||||
var library =
|
var library =
|
||||||
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||||
|
Assert.NotNull(library);
|
||||||
|
|
||||||
library.Type = LibraryType.Manga;
|
library.Type = LibraryType.Manga;
|
||||||
await psf.ScanLibrariesForSeries(library, new List<string>() {"C:/Data/"}, false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), TrackFiles);
|
var parsedSeries = await psf.ScanLibrariesForSeries(library, new List<string>() {Root + "Data/"}, false,
|
||||||
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(1));
|
||||||
|
|
||||||
|
|
||||||
Assert.Equal(3, parsedSeries.Values.Count);
|
// Assert.Equal(3, parsedSeries.Values.Count);
|
||||||
Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
|
// Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
|
||||||
|
|
||||||
|
Assert.Equal(3, parsedSeries.Count);
|
||||||
|
Assert.NotEmpty(parsedSeries.Select(p => p.ParsedSeries).Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
@ -289,18 +248,16 @@ public class ParseScannedFilesTests
|
||||||
var fileSystem = CreateTestFilesystem();
|
var fileSystem = CreateTestFilesystem();
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
var directoriesSeen = new HashSet<string>();
|
var directoriesSeen = new HashSet<string>();
|
||||||
var library =
|
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||||
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
|
||||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||||
await psf.ProcessFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),
|
var scanResults = await psf.ScanFiles("C:/Data/", true, await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||||
(files, directoryPath) =>
|
foreach (var scanResult in scanResults)
|
||||||
{
|
{
|
||||||
directoriesSeen.Add(directoryPath);
|
directoriesSeen.Add(scanResult.Folder);
|
||||||
return Task.CompletedTask;
|
}
|
||||||
}, library);
|
|
||||||
|
|
||||||
Assert.Equal(2, directoriesSeen.Count);
|
Assert.Equal(2, directoriesSeen.Count);
|
||||||
}
|
}
|
||||||
|
|
@ -311,16 +268,20 @@ public class ParseScannedFilesTests
|
||||||
var fileSystem = CreateTestFilesystem();
|
var fileSystem = CreateTestFilesystem();
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
|
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||||
|
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||||
|
Assert.NotNull(library);
|
||||||
|
|
||||||
var directoriesSeen = new HashSet<string>();
|
var directoriesSeen = new HashSet<string>();
|
||||||
await psf.ProcessFiles("C:/Data/", false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),
|
var scanResults = await psf.ScanFiles("C:/Data/", false,
|
||||||
(files, directoryPath) =>
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||||
|
|
||||||
|
foreach (var scanResult in scanResults)
|
||||||
{
|
{
|
||||||
directoriesSeen.Add(directoryPath);
|
directoriesSeen.Add(scanResult.Folder);
|
||||||
return Task.CompletedTask;
|
}
|
||||||
}, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
|
||||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes));
|
|
||||||
|
|
||||||
Assert.Single(directoriesSeen);
|
Assert.Single(directoriesSeen);
|
||||||
directoriesSeen.TryGetValue("C:/Data/", out var actual);
|
directoriesSeen.TryGetValue("C:/Data/", out var actual);
|
||||||
|
|
@ -342,18 +303,14 @@ public class ParseScannedFilesTests
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
var callCount = 0;
|
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||||
await psf.ProcessFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),(files, folderPath) =>
|
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||||
{
|
Assert.NotNull(library);
|
||||||
callCount++;
|
var scanResults = await psf.ScanFiles("C:/Data", true, await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||||
|
|
||||||
return Task.CompletedTask;
|
Assert.Equal(2, scanResults.Count);
|
||||||
}, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
|
||||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes));
|
|
||||||
|
|
||||||
Assert.Equal(2, callCount);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -375,18 +332,235 @@ public class ParseScannedFilesTests
|
||||||
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
|
||||||
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
new MockReadingItemService(new DefaultParser(ds)), Substitute.For<IEventHub>());
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
var callCount = 0;
|
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
||||||
await psf.ProcessFiles("C:/Data", false, await _unitOfWork.SeriesRepository.GetFolderPathMap(1),(files, folderPath) =>
|
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
|
||||||
{
|
Assert.NotNull(library);
|
||||||
callCount++;
|
var scanResults = await psf.ScanFiles("C:/Data", false,
|
||||||
return Task.CompletedTask;
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
|
||||||
}, await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
|
|
||||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes));
|
|
||||||
|
|
||||||
Assert.Equal(1, callCount);
|
Assert.Single(scanResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
// TODO: Add back in (removed for Hotfix v0.8.5.x)
|
||||||
|
//[Fact]
|
||||||
|
public async Task HasSeriesFolderNotChangedSinceLastScan_AllSeriesFoldersHaveChanges()
|
||||||
|
{
|
||||||
|
const string testcase = "Subfolders always scanning all series changes - Manga.json";
|
||||||
|
var infos = new Dictionary<string, ComicInfo>();
|
||||||
|
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||||
|
var testDirectoryPath = library.Folders.First().Path;
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Update(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var fs = new FileSystem();
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
|
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||||
|
await scanner.ScanLibrary(library.Id);
|
||||||
|
|
||||||
|
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||||
|
Assert.NotNull(postLib);
|
||||||
|
Assert.Equal(4, postLib.Series.Count);
|
||||||
|
|
||||||
|
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||||
|
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||||
|
|
||||||
|
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||||
|
Assert.Single(frieren.Volumes);
|
||||||
|
|
||||||
|
var executionerAndHerWayOfLife = postLib.Series.First(x => x.Name == "The Executioner and Her Way of Life");
|
||||||
|
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
|
||||||
|
|
||||||
|
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||||
|
|
||||||
|
// Add a new chapter to a volume of the series, and scan. Validate that only, and all directories of this
|
||||||
|
// series are marked as HasChanged
|
||||||
|
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "The Executioner and Her Way of Life"),
|
||||||
|
"The Executioner and Her Way of Life Vol. 1");
|
||||||
|
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0001.cbz"),
|
||||||
|
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
|
||||||
|
|
||||||
|
// 4 series, of which 2 have volumes as directories
|
||||||
|
var folderMap = await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id);
|
||||||
|
Assert.Equal(6, folderMap.Count);
|
||||||
|
|
||||||
|
var res = await psf.ScanFiles(testDirectoryPath, true, folderMap, postLib);
|
||||||
|
var changes = res.Where(sc => sc.HasChanged).ToList();
|
||||||
|
Assert.Equal(2, changes.Count);
|
||||||
|
// Only volumes of The Executioner and Her Way of Life should be marked as HasChanged (Spice and Wolf also has 2 volumes dirs)
|
||||||
|
Assert.Equal(2, changes.Count(sc => sc.Folder.Contains("The Executioner and Her Way of Life")));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task HasSeriesFolderNotChangedSinceLastScan_PublisherLayout()
|
||||||
|
{
|
||||||
|
const string testcase = "Subfolder always scanning fix publisher layout - Comic.json";
|
||||||
|
var infos = new Dictionary<string, ComicInfo>();
|
||||||
|
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||||
|
var testDirectoryPath = library.Folders.First().Path;
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Update(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var fs = new FileSystem();
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
|
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||||
|
await scanner.ScanLibrary(library.Id);
|
||||||
|
|
||||||
|
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||||
|
Assert.NotNull(postLib);
|
||||||
|
Assert.Equal(4, postLib.Series.Count);
|
||||||
|
|
||||||
|
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||||
|
Assert.Equal(2, spiceAndWolf.Volumes.Count);
|
||||||
|
|
||||||
|
var frieren = postLib.Series.First(x => x.Name == "Frieren - Beyond Journey's End");
|
||||||
|
Assert.Equal(2, frieren.Volumes.Count);
|
||||||
|
|
||||||
|
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||||
|
|
||||||
|
// Add a volume to a series, and scan. Ensure only this series is marked as HasChanged
|
||||||
|
var executionerCopyDir = Path.Join(Path.Join(testDirectoryPath, "YenPress"), "The Executioner and Her Way of Life");
|
||||||
|
File.Copy(Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1.cbz"),
|
||||||
|
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 2.cbz"));
|
||||||
|
|
||||||
|
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||||
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||||
|
var changes = res.Count(sc => sc.HasChanged);
|
||||||
|
Assert.Equal(1, changes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Add back in (removed for Hotfix v0.8.5.x)
|
||||||
|
//[Fact]
|
||||||
|
public async Task SubFoldersNoSubFolders_SkipAll()
|
||||||
|
{
|
||||||
|
const string testcase = "Subfolders and files at root - Manga.json";
|
||||||
|
var infos = new Dictionary<string, ComicInfo>();
|
||||||
|
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||||
|
var testDirectoryPath = library.Folders.First().Path;
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Update(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var fs = new FileSystem();
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
|
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||||
|
await scanner.ScanLibrary(library.Id);
|
||||||
|
|
||||||
|
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||||
|
Assert.NotNull(postLib);
|
||||||
|
Assert.Single(postLib.Series);
|
||||||
|
|
||||||
|
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||||
|
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||||
|
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||||
|
|
||||||
|
// Needs to be actual time as the write time is now, so if we set LastFolderChecked in the past
|
||||||
|
// it'll always a scan as it was changed since the last scan.
|
||||||
|
await Task.Delay(1100); // Ensure at least one second has passed since library scan
|
||||||
|
|
||||||
|
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||||
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||||
|
Assert.DoesNotContain(res, sc => sc.HasChanged);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
|
||||||
|
{
|
||||||
|
const string testcase = "Subfolders and files at root - Manga.json";
|
||||||
|
var infos = new Dictionary<string, ComicInfo>();
|
||||||
|
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||||
|
var testDirectoryPath = library.Folders.First().Path;
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Update(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var fs = new FileSystem();
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
|
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||||
|
await scanner.ScanLibrary(library.Id);
|
||||||
|
|
||||||
|
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||||
|
Assert.NotNull(postLib);
|
||||||
|
Assert.Single(postLib.Series);
|
||||||
|
|
||||||
|
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||||
|
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||||
|
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||||
|
|
||||||
|
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
|
||||||
|
Context.Series.Update(spiceAndWolf);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Add file at series root
|
||||||
|
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
|
||||||
|
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
|
||||||
|
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
|
||||||
|
|
||||||
|
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||||
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||||
|
var changes = res.Count(sc => sc.HasChanged);
|
||||||
|
Assert.Equal(2, changes);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
|
||||||
|
{
|
||||||
|
const string testcase = "Subfolders and files at root - Manga.json";
|
||||||
|
var infos = new Dictionary<string, ComicInfo>();
|
||||||
|
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
|
||||||
|
var testDirectoryPath = library.Folders.First().Path;
|
||||||
|
|
||||||
|
UnitOfWork.LibraryRepository.Update(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var fs = new FileSystem();
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
|
||||||
|
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
|
||||||
|
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
|
||||||
|
|
||||||
|
var scanner = _scannerHelper.CreateServices(ds, fs);
|
||||||
|
await scanner.ScanLibrary(library.Id);
|
||||||
|
|
||||||
|
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
|
||||||
|
Assert.NotNull(postLib);
|
||||||
|
Assert.Single(postLib.Series);
|
||||||
|
|
||||||
|
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
|
||||||
|
Assert.Equal(3, spiceAndWolf.Volumes.Count);
|
||||||
|
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
|
||||||
|
|
||||||
|
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
|
||||||
|
Context.Series.Update(spiceAndWolf);
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
// Add file in subfolder
|
||||||
|
var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
|
||||||
|
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0011.cbz"),
|
||||||
|
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
|
||||||
|
|
||||||
|
var res = await psf.ScanFiles(testDirectoryPath, true,
|
||||||
|
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
|
||||||
|
var changes = res.Count(sc => sc.HasChanged);
|
||||||
|
Assert.Equal(2, changes);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
286
API.Tests/Services/PersonServiceTests.cs
Normal file
286
API.Tests/Services/PersonServiceTests.cs
Normal file
|
|
@ -0,0 +1,286 @@
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Data.Repositories;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Entities.Person;
|
||||||
|
using API.Extensions;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using API.Services;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
|
public class PersonServiceTests: AbstractDbTest
|
||||||
|
{
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PersonMerge_KeepNonEmptyMetadata()
|
||||||
|
{
|
||||||
|
var ps = new PersonService(UnitOfWork);
|
||||||
|
|
||||||
|
var person1 = new Person
|
||||||
|
{
|
||||||
|
Name = "Casey Delores",
|
||||||
|
NormalizedName = "Casey Delores".ToNormalized(),
|
||||||
|
HardcoverId = "ANonEmptyId",
|
||||||
|
MalId = 12,
|
||||||
|
};
|
||||||
|
|
||||||
|
var person2 = new Person
|
||||||
|
{
|
||||||
|
Name= "Delores Casey",
|
||||||
|
NormalizedName = "Delores Casey".ToNormalized(),
|
||||||
|
Description = "Hi, I'm Delores Casey!",
|
||||||
|
Aliases = [new PersonAliasBuilder("Casey, Delores").Build()],
|
||||||
|
AniListId = 27,
|
||||||
|
};
|
||||||
|
|
||||||
|
UnitOfWork.PersonRepository.Attach(person1);
|
||||||
|
UnitOfWork.PersonRepository.Attach(person2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await ps.MergePeopleAsync(person2, person1);
|
||||||
|
|
||||||
|
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Single(allPeople);
|
||||||
|
|
||||||
|
var person = allPeople[0];
|
||||||
|
Assert.Equal("Casey Delores", person.Name);
|
||||||
|
Assert.NotEmpty(person.Description);
|
||||||
|
Assert.Equal(27, person.AniListId);
|
||||||
|
Assert.NotNull(person.HardcoverId);
|
||||||
|
Assert.NotEmpty(person.HardcoverId);
|
||||||
|
Assert.Contains(person.Aliases, pa => pa.Alias == "Delores Casey");
|
||||||
|
Assert.Contains(person.Aliases, pa => pa.Alias == "Casey, Delores");
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PersonMerge_MergedPersonDestruction()
|
||||||
|
{
|
||||||
|
var ps = new PersonService(UnitOfWork);
|
||||||
|
|
||||||
|
var person1 = new Person
|
||||||
|
{
|
||||||
|
Name = "Casey Delores",
|
||||||
|
NormalizedName = "Casey Delores".ToNormalized(),
|
||||||
|
};
|
||||||
|
|
||||||
|
var person2 = new Person
|
||||||
|
{
|
||||||
|
Name = "Delores Casey",
|
||||||
|
NormalizedName = "Delores Casey".ToNormalized(),
|
||||||
|
};
|
||||||
|
|
||||||
|
UnitOfWork.PersonRepository.Attach(person1);
|
||||||
|
UnitOfWork.PersonRepository.Attach(person2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await ps.MergePeopleAsync(person2, person1);
|
||||||
|
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Single(allPeople);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PersonMerge_RetentionChapters()
|
||||||
|
{
|
||||||
|
var ps = new PersonService(UnitOfWork);
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("My Library").Build();
|
||||||
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var user = new AppUserBuilder("Amelia", "amelia@localhost")
|
||||||
|
.WithLibrary(library).Build();
|
||||||
|
UnitOfWork.UserRepository.Add(user);
|
||||||
|
|
||||||
|
var person = new PersonBuilder("Jillian Cowan").Build();
|
||||||
|
|
||||||
|
var person2 = new PersonBuilder("Cowan Jillian").Build();
|
||||||
|
|
||||||
|
var chapter = new ChapterBuilder("1")
|
||||||
|
.WithPerson(person, PersonRole.Editor)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var chapter2 = new ChapterBuilder("2")
|
||||||
|
.WithPerson(person2, PersonRole.Editor)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(chapter)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var series2 = new SeriesBuilder("Test 2")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("2")
|
||||||
|
.WithChapter(chapter2)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
|
UnitOfWork.SeriesRepository.Add(series2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await ps.MergePeopleAsync(person2, person);
|
||||||
|
|
||||||
|
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Single(allPeople);
|
||||||
|
var mergedPerson = allPeople[0];
|
||||||
|
|
||||||
|
Assert.Equal("Jillian Cowan", mergedPerson.Name);
|
||||||
|
|
||||||
|
var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(1, 1, PersonRole.Editor);
|
||||||
|
Assert.Equal(2, chapters.Count());
|
||||||
|
|
||||||
|
chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(1, ChapterIncludes.People);
|
||||||
|
Assert.NotNull(chapter);
|
||||||
|
Assert.Single(chapter.People);
|
||||||
|
|
||||||
|
chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(2, ChapterIncludes.People);
|
||||||
|
Assert.NotNull(chapter2);
|
||||||
|
Assert.Single(chapter2.People);
|
||||||
|
|
||||||
|
Assert.Equal(chapter.People.First().PersonId, chapter2.People.First().PersonId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PersonMerge_NoDuplicateChaptersOrSeries()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
var ps = new PersonService(UnitOfWork);
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("My Library").Build();
|
||||||
|
UnitOfWork.LibraryRepository.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var user = new AppUserBuilder("Amelia", "amelia@localhost")
|
||||||
|
.WithLibrary(library).Build();
|
||||||
|
UnitOfWork.UserRepository.Add(user);
|
||||||
|
|
||||||
|
var person = new PersonBuilder("Jillian Cowan").Build();
|
||||||
|
|
||||||
|
var person2 = new PersonBuilder("Cowan Jillian").Build();
|
||||||
|
|
||||||
|
var chapter = new ChapterBuilder("1")
|
||||||
|
.WithPerson(person, PersonRole.Editor)
|
||||||
|
.WithPerson(person2, PersonRole.Colorist)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var chapter2 = new ChapterBuilder("2")
|
||||||
|
.WithPerson(person2, PersonRole.Editor)
|
||||||
|
.WithPerson(person, PersonRole.Editor)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var series = new SeriesBuilder("Test 1")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(chapter)
|
||||||
|
.Build())
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithPerson(person, PersonRole.Editor)
|
||||||
|
.WithPerson(person2, PersonRole.Editor)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var series2 = new SeriesBuilder("Test 2")
|
||||||
|
.WithLibraryId(library.Id)
|
||||||
|
.WithVolume(new VolumeBuilder("2")
|
||||||
|
.WithChapter(chapter2)
|
||||||
|
.Build())
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder()
|
||||||
|
.WithPerson(person, PersonRole.Editor)
|
||||||
|
.WithPerson(person2, PersonRole.Colorist)
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
UnitOfWork.SeriesRepository.Add(series);
|
||||||
|
UnitOfWork.SeriesRepository.Add(series2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await ps.MergePeopleAsync(person2, person);
|
||||||
|
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
|
||||||
|
Assert.Single(allPeople);
|
||||||
|
|
||||||
|
var mergedPerson = await UnitOfWork.PersonRepository.GetPersonById(person.Id, PersonIncludes.All);
|
||||||
|
Assert.NotNull(mergedPerson);
|
||||||
|
Assert.Equal(3, mergedPerson.ChapterPeople.Count);
|
||||||
|
Assert.Equal(3, mergedPerson.SeriesMetadataPeople.Count);
|
||||||
|
|
||||||
|
chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(chapter.Id, ChapterIncludes.People);
|
||||||
|
Assert.NotNull(chapter);
|
||||||
|
Assert.Equal(2, chapter.People.Count);
|
||||||
|
Assert.Single(chapter.People.Select(p => p.Person.Id).Distinct());
|
||||||
|
Assert.Contains(chapter.People, p => p.Role == PersonRole.Editor);
|
||||||
|
Assert.Contains(chapter.People, p => p.Role == PersonRole.Colorist);
|
||||||
|
|
||||||
|
chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(chapter2.Id, ChapterIncludes.People);
|
||||||
|
Assert.NotNull(chapter2);
|
||||||
|
Assert.Single(chapter2.People);
|
||||||
|
Assert.Contains(chapter2.People, p => p.Role == PersonRole.Editor);
|
||||||
|
Assert.DoesNotContain(chapter2.People, p => p.Role == PersonRole.Colorist);
|
||||||
|
|
||||||
|
series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(series.Id, SeriesIncludes.Metadata);
|
||||||
|
Assert.NotNull(series);
|
||||||
|
Assert.Single(series.Metadata.People);
|
||||||
|
Assert.Contains(series.Metadata.People, p => p.Role == PersonRole.Editor);
|
||||||
|
Assert.DoesNotContain(series.Metadata.People, p => p.Role == PersonRole.Colorist);
|
||||||
|
|
||||||
|
series2 = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(series2.Id, SeriesIncludes.Metadata);
|
||||||
|
Assert.NotNull(series2);
|
||||||
|
Assert.Equal(2, series2.Metadata.People.Count);
|
||||||
|
Assert.Contains(series2.Metadata.People, p => p.Role == PersonRole.Editor);
|
||||||
|
Assert.Contains(series2.Metadata.People, p => p.Role == PersonRole.Colorist);
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PersonAddAlias_NoOverlap()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
UnitOfWork.PersonRepository.Attach(new PersonBuilder("Jillian Cowan").Build());
|
||||||
|
UnitOfWork.PersonRepository.Attach(new PersonBuilder("Jilly Cowan").WithAlias("Jolly Cowan").Build());
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var ps = new PersonService(UnitOfWork);
|
||||||
|
|
||||||
|
var person1 = await UnitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jillian Cowan");
|
||||||
|
var person2 = await UnitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jilly Cowan");
|
||||||
|
Assert.NotNull(person1);
|
||||||
|
Assert.NotNull(person2);
|
||||||
|
|
||||||
|
// Overlap on Name
|
||||||
|
var success = await ps.UpdatePersonAliasesAsync(person1, ["Jilly Cowan"]);
|
||||||
|
Assert.False(success);
|
||||||
|
|
||||||
|
// Overlap on alias
|
||||||
|
success = await ps.UpdatePersonAliasesAsync(person1, ["Jolly Cowan"]);
|
||||||
|
Assert.False(success);
|
||||||
|
|
||||||
|
// No overlap
|
||||||
|
success = await ps.UpdatePersonAliasesAsync(person2, ["Jilly Joy Cowan"]);
|
||||||
|
Assert.True(success);
|
||||||
|
|
||||||
|
// Some overlap
|
||||||
|
success = await ps.UpdatePersonAliasesAsync(person1, ["Jolly Cowan", "Jilly Joy Cowan"]);
|
||||||
|
Assert.False(success);
|
||||||
|
|
||||||
|
// Some overlap
|
||||||
|
success = await ps.UpdatePersonAliasesAsync(person1, ["Jolly Cowan", "Jilly Joy Cowan"]);
|
||||||
|
Assert.False(success);
|
||||||
|
|
||||||
|
Assert.Single(person2.Aliases);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.Person.RemoveRange(Context.Person.ToList());
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,23 +1,8 @@
|
||||||
using System.IO;
|
namespace API.Tests.Services;
|
||||||
using API.Data;
|
|
||||||
using API.Data.Metadata;
|
|
||||||
using API.Entities;
|
|
||||||
using API.Entities.Enums;
|
|
||||||
using API.Helpers;
|
|
||||||
using API.Helpers.Builders;
|
|
||||||
using API.Services;
|
|
||||||
using API.Services.Tasks.Metadata;
|
|
||||||
using API.Services.Tasks.Scanner;
|
|
||||||
using API.SignalR;
|
|
||||||
using Microsoft.Extensions.Logging;
|
|
||||||
using NSubstitute;
|
|
||||||
using Xunit;
|
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
|
||||||
|
|
||||||
public class ProcessSeriesTests
|
public class ProcessSeriesTests
|
||||||
{
|
{
|
||||||
|
// TODO: Implement
|
||||||
|
|
||||||
#region UpdateSeriesMetadata
|
#region UpdateSeriesMetadata
|
||||||
|
|
||||||
|
|
|
||||||
189
API.Tests/Services/RatingServiceTests.cs
Normal file
189
API.Tests/Services/RatingServiceTests.cs
Normal file
|
|
@ -0,0 +1,189 @@
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Data.Repositories;
|
||||||
|
using API.DTOs;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Plus;
|
||||||
|
using Hangfire;
|
||||||
|
using Hangfire.InMemory;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
|
public class RatingServiceTests: AbstractDbTest
|
||||||
|
{
|
||||||
|
private readonly RatingService _ratingService;
|
||||||
|
|
||||||
|
public RatingServiceTests()
|
||||||
|
{
|
||||||
|
_ratingService = new RatingService(UnitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateRating_ShouldSetRating()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
Context.Library.Add(new LibraryBuilder("Test LIb")
|
||||||
|
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||||
|
.WithSeries(new SeriesBuilder("Test")
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||||
|
|
||||||
|
JobStorage.Current = new InMemoryStorage();
|
||||||
|
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||||
|
{
|
||||||
|
SeriesId = 1,
|
||||||
|
UserRating = 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(result);
|
||||||
|
|
||||||
|
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))!
|
||||||
|
.Ratings;
|
||||||
|
Assert.NotEmpty(ratings);
|
||||||
|
Assert.Equal(3, ratings.First().Rating);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateRating_ShouldUpdateExistingRating()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
Context.Library.Add(new LibraryBuilder("Test LIb")
|
||||||
|
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||||
|
.WithSeries(new SeriesBuilder("Test")
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||||
|
|
||||||
|
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||||
|
{
|
||||||
|
SeriesId = 1,
|
||||||
|
UserRating = 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(result);
|
||||||
|
|
||||||
|
JobStorage.Current = new InMemoryStorage();
|
||||||
|
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
|
||||||
|
.Ratings;
|
||||||
|
Assert.NotEmpty(ratings);
|
||||||
|
Assert.Equal(3, ratings.First().Rating);
|
||||||
|
|
||||||
|
// Update the DB again
|
||||||
|
|
||||||
|
var result2 = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||||
|
{
|
||||||
|
SeriesId = 1,
|
||||||
|
UserRating = 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(result2);
|
||||||
|
|
||||||
|
var ratings2 = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
|
||||||
|
.Ratings;
|
||||||
|
Assert.NotEmpty(ratings2);
|
||||||
|
Assert.True(ratings2.Count == 1);
|
||||||
|
Assert.Equal(5, ratings2.First().Rating);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateRating_ShouldClampRatingAt5()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
Context.Library.Add(new LibraryBuilder("Test LIb")
|
||||||
|
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||||
|
.WithSeries(new SeriesBuilder("Test")
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||||
|
|
||||||
|
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||||
|
{
|
||||||
|
SeriesId = 1,
|
||||||
|
UserRating = 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(result);
|
||||||
|
|
||||||
|
JobStorage.Current = new InMemoryStorage();
|
||||||
|
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007",
|
||||||
|
AppUserIncludes.Ratings)!)
|
||||||
|
.Ratings;
|
||||||
|
Assert.NotEmpty(ratings);
|
||||||
|
Assert.Equal(5, ratings.First().Rating);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateRating_ShouldReturnFalseWhenSeriesDoesntExist()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
|
||||||
|
Context.Library.Add(new LibraryBuilder("Test LIb", LibraryType.Book)
|
||||||
|
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
|
||||||
|
.WithSeries(new SeriesBuilder("Test")
|
||||||
|
|
||||||
|
.WithVolume(new VolumeBuilder("1")
|
||||||
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
|
.Build())
|
||||||
|
.Build())
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
|
||||||
|
|
||||||
|
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
|
||||||
|
{
|
||||||
|
SeriesId = 2,
|
||||||
|
UserRating = 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.False(result);
|
||||||
|
|
||||||
|
var ratings = user.Ratings;
|
||||||
|
Assert.Empty(ratings);
|
||||||
|
}
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.Series.RemoveRange(Context.Series.ToList());
|
||||||
|
Context.AppUserRating.RemoveRange(Context.AppUserRating.ToList());
|
||||||
|
Context.Genre.RemoveRange(Context.Genre.ToList());
|
||||||
|
Context.CollectionTag.RemoveRange(Context.CollectionTag.ToList());
|
||||||
|
Context.Person.RemoveRange(Context.Person.ToList());
|
||||||
|
Context.Library.RemoveRange(Context.Library.ToList());
|
||||||
|
|
||||||
|
await Context.SaveChangesAsync();
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -11,15 +11,11 @@ using API.DTOs.ReadingLists;
|
||||||
using API.DTOs.ReadingLists.CBL;
|
using API.DTOs.ReadingLists.CBL;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Entities.Metadata;
|
|
||||||
using API.Extensions;
|
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Helpers.Builders;
|
using API.Helpers.Builders;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using API.Services.Plus;
|
using API.Services.Plus;
|
||||||
using API.Services.Tasks;
|
|
||||||
using API.SignalR;
|
using API.SignalR;
|
||||||
using API.Tests.Helpers;
|
|
||||||
using AutoMapper;
|
using AutoMapper;
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
@ -52,7 +48,9 @@ public class ReadingListServiceTests
|
||||||
var mapper = config.CreateMapper();
|
var mapper = config.CreateMapper();
|
||||||
_unitOfWork = new UnitOfWork(_context, mapper, null!);
|
_unitOfWork = new UnitOfWork(_context, mapper, null!);
|
||||||
|
|
||||||
_readingListService = new ReadingListService(_unitOfWork, Substitute.For<ILogger<ReadingListService>>(), Substitute.For<IEventHub>());
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
|
||||||
|
_readingListService = new ReadingListService(_unitOfWork, Substitute.For<ILogger<ReadingListService>>(),
|
||||||
|
Substitute.For<IEventHub>(), Substitute.For<IImageService>(), ds);
|
||||||
|
|
||||||
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(),
|
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(),
|
||||||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
||||||
|
|
@ -128,7 +126,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithAgeRating(AgeRating.Everyone)
|
.WithAgeRating(AgeRating.Everyone)
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -177,7 +175,7 @@ public class ReadingListServiceTests
|
||||||
.WithSeries(new SeriesBuilder("Test")
|
.WithSeries(new SeriesBuilder("Test")
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithAgeRating(AgeRating.Everyone)
|
.WithAgeRating(AgeRating.Everyone)
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -236,7 +234,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithAgeRating(AgeRating.Everyone)
|
.WithAgeRating(AgeRating.Everyone)
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -296,7 +294,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithAgeRating(AgeRating.Everyone)
|
.WithAgeRating(AgeRating.Everyone)
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -375,7 +373,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithAgeRating(AgeRating.Everyone)
|
.WithAgeRating(AgeRating.Everyone)
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -432,7 +430,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithAgeRating(AgeRating.Everyone)
|
.WithAgeRating(AgeRating.Everyone)
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -497,7 +495,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build()
|
.Build()
|
||||||
)
|
)
|
||||||
|
|
@ -538,7 +536,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build()
|
.Build()
|
||||||
)
|
)
|
||||||
|
|
@ -581,6 +579,93 @@ public class ReadingListServiceTests
|
||||||
Assert.Equal(AgeRating.G, readingList.AgeRating);
|
Assert.Equal(AgeRating.G, readingList.AgeRating);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateReadingListAgeRatingForSeries()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var spiceAndWolf = new SeriesBuilder("Spice and Wolf")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
|
.WithVolumes([
|
||||||
|
new VolumeBuilder("1")
|
||||||
|
.WithChapters([
|
||||||
|
new ChapterBuilder("1").Build(),
|
||||||
|
new ChapterBuilder("2").Build(),
|
||||||
|
]).Build()
|
||||||
|
]).Build();
|
||||||
|
spiceAndWolf.Metadata.AgeRating = AgeRating.Everyone;
|
||||||
|
|
||||||
|
var othersidePicnic = new SeriesBuilder("Otherside Picnic ")
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
|
.WithVolumes([
|
||||||
|
new VolumeBuilder("1")
|
||||||
|
.WithChapters([
|
||||||
|
new ChapterBuilder("1").Build(),
|
||||||
|
new ChapterBuilder("2").Build(),
|
||||||
|
]).Build()
|
||||||
|
]).Build();
|
||||||
|
othersidePicnic.Metadata.AgeRating = AgeRating.Everyone;
|
||||||
|
|
||||||
|
_context.AppUser.Add(new AppUser()
|
||||||
|
{
|
||||||
|
UserName = "Amelia",
|
||||||
|
ReadingLists = new List<ReadingList>(),
|
||||||
|
Libraries = new List<Library>
|
||||||
|
{
|
||||||
|
new LibraryBuilder("Test Library", LibraryType.LightNovel)
|
||||||
|
.WithSeries(spiceAndWolf)
|
||||||
|
.WithSeries(othersidePicnic)
|
||||||
|
.Build(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await _context.SaveChangesAsync();
|
||||||
|
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("Amelia", AppUserIncludes.ReadingLists);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
var myTestReadingList = new ReadingListBuilder("MyReadingList").Build();
|
||||||
|
var mySecondTestReadingList = new ReadingListBuilder("MySecondReadingList").Build();
|
||||||
|
var myThirdTestReadingList = new ReadingListBuilder("MyThirdReadingList").Build();
|
||||||
|
user.ReadingLists = new List<ReadingList>()
|
||||||
|
{
|
||||||
|
myTestReadingList,
|
||||||
|
mySecondTestReadingList,
|
||||||
|
myThirdTestReadingList,
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List<int> {1, 2}, myTestReadingList);
|
||||||
|
await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List<int> {3, 4}, myTestReadingList);
|
||||||
|
await _readingListService.AddChaptersToReadingList(spiceAndWolf.Id, new List<int> {1, 2}, myThirdTestReadingList);
|
||||||
|
await _readingListService.AddChaptersToReadingList(othersidePicnic.Id, new List<int> {3, 4}, mySecondTestReadingList);
|
||||||
|
|
||||||
|
|
||||||
|
_unitOfWork.UserRepository.Update(user);
|
||||||
|
await _unitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _readingListService.CalculateReadingListAgeRating(myTestReadingList);
|
||||||
|
await _readingListService.CalculateReadingListAgeRating(mySecondTestReadingList);
|
||||||
|
Assert.Equal(AgeRating.Everyone, myTestReadingList.AgeRating);
|
||||||
|
Assert.Equal(AgeRating.Everyone, mySecondTestReadingList.AgeRating);
|
||||||
|
Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
|
||||||
|
|
||||||
|
await _readingListService.UpdateReadingListAgeRatingForSeries(othersidePicnic.Id, AgeRating.Mature);
|
||||||
|
await _unitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Reading lists containing Otherside Picnic are updated
|
||||||
|
myTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
|
||||||
|
Assert.NotNull(myTestReadingList);
|
||||||
|
Assert.Equal(AgeRating.Mature, myTestReadingList.AgeRating);
|
||||||
|
|
||||||
|
mySecondTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(2);
|
||||||
|
Assert.NotNull(mySecondTestReadingList);
|
||||||
|
Assert.Equal(AgeRating.Mature, mySecondTestReadingList.AgeRating);
|
||||||
|
|
||||||
|
// Unrelated reading list is not updated
|
||||||
|
myThirdTestReadingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(3);
|
||||||
|
Assert.NotNull(myThirdTestReadingList);
|
||||||
|
Assert.Equal(AgeRating.Everyone, myThirdTestReadingList.AgeRating);
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region CalculateStartAndEndDates
|
#region CalculateStartAndEndDates
|
||||||
|
|
@ -593,7 +678,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.Build()
|
.Build()
|
||||||
)
|
)
|
||||||
|
|
@ -645,7 +730,7 @@ public class ReadingListServiceTests
|
||||||
.WithMetadata(new SeriesMetadataBuilder().Build())
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
.WithVolumes(new List<Volume>()
|
.WithVolumes(new List<Volume>()
|
||||||
{
|
{
|
||||||
new VolumeBuilder("0")
|
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1")
|
.WithChapter(new ChapterBuilder("1")
|
||||||
.WithReleaseDate(new DateTime(2005, 03, 01))
|
.WithReleaseDate(new DateTime(2005, 03, 01))
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -711,6 +796,9 @@ public class ReadingListServiceTests
|
||||||
Assert.Equal("Issue #1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", "1", "The Title")));
|
Assert.Equal("Issue #1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", "1", "The Title")));
|
||||||
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", chapterTitleName: "The Title")));
|
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, "1", chapterTitleName: "The Title")));
|
||||||
Assert.Equal("The Title", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterTitleName: "The Title")));
|
Assert.Equal("The Title", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterTitleName: "The Title")));
|
||||||
|
var dto = CreateListItemDto(MangaFormat.Archive, LibraryType.Comic, chapterNumber: "The Special Title");
|
||||||
|
dto.IsSpecial = true;
|
||||||
|
Assert.Equal("The Special Title", ReadingListService.FormatTitle(dto));
|
||||||
|
|
||||||
// Book Library & Archive
|
// Book Library & Archive
|
||||||
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Book, "1")));
|
Assert.Equal("Volume 1", ReadingListService.FormatTitle(CreateListItemDto(MangaFormat.Archive, LibraryType.Book, "1")));
|
||||||
|
|
@ -736,8 +824,8 @@ public class ReadingListServiceTests
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ReadingListItemDto CreateListItemDto(MangaFormat seriesFormat, LibraryType libraryType,
|
private static ReadingListItemDto CreateListItemDto(MangaFormat seriesFormat, LibraryType libraryType,
|
||||||
string volumeNumber = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume,
|
string volumeNumber = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume,
|
||||||
string chapterNumber = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
string chapterNumber =API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter,
|
||||||
string chapterTitleName = "")
|
string chapterTitleName = "")
|
||||||
{
|
{
|
||||||
return new ReadingListItemDto()
|
return new ReadingListItemDto()
|
||||||
|
|
@ -759,7 +847,7 @@ public class ReadingListServiceTests
|
||||||
var fablesSeries = new SeriesBuilder("Fables").Build();
|
var fablesSeries = new SeriesBuilder("Fables").Build();
|
||||||
fablesSeries.Volumes.Add(
|
fablesSeries.Volumes.Add(
|
||||||
new VolumeBuilder("1")
|
new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithName("2002")
|
.WithName("2002")
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.Build()
|
.Build()
|
||||||
|
|
@ -937,7 +1025,7 @@ public class ReadingListServiceTests
|
||||||
var fables2Series = new SeriesBuilder("Fables: The Last Castle").Build();
|
var fables2Series = new SeriesBuilder("Fables: The Last Castle").Build();
|
||||||
|
|
||||||
fablesSeries.Volumes.Add(new VolumeBuilder("1")
|
fablesSeries.Volumes.Add(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithName("2002")
|
.WithName("2002")
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
|
|
@ -945,7 +1033,7 @@ public class ReadingListServiceTests
|
||||||
.Build()
|
.Build()
|
||||||
);
|
);
|
||||||
fables2Series.Volumes.Add(new VolumeBuilder("1")
|
fables2Series.Volumes.Add(new VolumeBuilder("1")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithName("2003")
|
.WithName("2003")
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
|
|
@ -980,13 +1068,13 @@ public class ReadingListServiceTests
|
||||||
var fables2Series = new SeriesBuilder("Fablesa: The Last Castle").Build();
|
var fables2Series = new SeriesBuilder("Fablesa: The Last Castle").Build();
|
||||||
|
|
||||||
fablesSeries.Volumes.Add(new VolumeBuilder("2002")
|
fablesSeries.Volumes.Add(new VolumeBuilder("2002")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.Build());
|
.Build());
|
||||||
fables2Series.Volumes.Add(new VolumeBuilder("2003")
|
fables2Series.Volumes.Add(new VolumeBuilder("2003")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
|
|
@ -1036,7 +1124,7 @@ public class ReadingListServiceTests
|
||||||
// Mock up our series
|
// Mock up our series
|
||||||
var fablesSeries = new SeriesBuilder("Fables")
|
var fablesSeries = new SeriesBuilder("Fables")
|
||||||
.WithVolume(new VolumeBuilder("2002")
|
.WithVolume(new VolumeBuilder("2002")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
|
|
@ -1045,7 +1133,7 @@ public class ReadingListServiceTests
|
||||||
|
|
||||||
var fables2Series = new SeriesBuilder("Fables: The Last Castle")
|
var fables2Series = new SeriesBuilder("Fables: The Last Castle")
|
||||||
.WithVolume(new VolumeBuilder("2003")
|
.WithVolume(new VolumeBuilder("2003")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
|
|
@ -1094,13 +1182,13 @@ public class ReadingListServiceTests
|
||||||
var fables2Series = new SeriesBuilder("Fables: The Last Castle").Build();
|
var fables2Series = new SeriesBuilder("Fables: The Last Castle").Build();
|
||||||
|
|
||||||
fablesSeries.Volumes.Add(new VolumeBuilder("2002")
|
fablesSeries.Volumes.Add(new VolumeBuilder("2002")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.Build());
|
.Build());
|
||||||
fables2Series.Volumes.Add(new VolumeBuilder("2003")
|
fables2Series.Volumes.Add(new VolumeBuilder("2003")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
|
|
@ -1153,13 +1241,13 @@ public class ReadingListServiceTests
|
||||||
var fables2Series = new SeriesBuilder("Fables: The Last Castle").Build();
|
var fables2Series = new SeriesBuilder("Fables: The Last Castle").Build();
|
||||||
|
|
||||||
fablesSeries.Volumes.Add(new VolumeBuilder("2002")
|
fablesSeries.Volumes.Add(new VolumeBuilder("2002")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
.Build());
|
.Build());
|
||||||
fables2Series.Volumes.Add(new VolumeBuilder("2003")
|
fables2Series.Volumes.Add(new VolumeBuilder("2003")
|
||||||
.WithNumber(1)
|
.WithMinNumber(1)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.WithChapter(new ChapterBuilder("3").Build())
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
|
|
@ -1205,6 +1293,65 @@ public class ReadingListServiceTests
|
||||||
Assert.Equal(2, createdList.Items.First(item => item.Order == 2).ChapterId);
|
Assert.Equal(2, createdList.Items.First(item => item.Order == 2).ChapterId);
|
||||||
Assert.Equal(4, createdList.Items.First(item => item.Order == 3).ChapterId);
|
Assert.Equal(4, createdList.Items.First(item => item.Order == 3).ChapterId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// This test is about ensuring Annuals that are a separate series can be linked up properly (ComicVine)
|
||||||
|
/// </summary>
|
||||||
|
//[Fact]
|
||||||
|
public async Task CreateReadingListFromCBL_ShouldCreateList_WithAnnuals()
|
||||||
|
{
|
||||||
|
// TODO: Implement this correctly
|
||||||
|
await ResetDb();
|
||||||
|
var cblReadingList = LoadCblFromPath("Annual.cbl");
|
||||||
|
|
||||||
|
// Mock up our series
|
||||||
|
var fablesSeries = new SeriesBuilder("Fables")
|
||||||
|
.WithVolume(new VolumeBuilder("2002")
|
||||||
|
.WithMinNumber(1)
|
||||||
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
|
.WithChapter(new ChapterBuilder("3").Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var fables2Series = new SeriesBuilder("Fables Annual")
|
||||||
|
.WithVolume(new VolumeBuilder("2003")
|
||||||
|
.WithMinNumber(1)
|
||||||
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
_context.AppUser.Add(new AppUser()
|
||||||
|
{
|
||||||
|
UserName = "majora2007",
|
||||||
|
ReadingLists = new List<ReadingList>(),
|
||||||
|
Libraries = new List<Library>()
|
||||||
|
{
|
||||||
|
new LibraryBuilder("Test LIb 2", LibraryType.Book)
|
||||||
|
.WithSeries(fablesSeries)
|
||||||
|
.WithSeries(fables2Series)
|
||||||
|
.Build()
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await _unitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var importSummary = await _readingListService.CreateReadingListFromCbl(1, cblReadingList);
|
||||||
|
|
||||||
|
Assert.Equal(CblImportResult.Success, importSummary.Success);
|
||||||
|
Assert.NotEmpty(importSummary.Results);
|
||||||
|
|
||||||
|
var createdList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(1);
|
||||||
|
|
||||||
|
Assert.NotNull(createdList);
|
||||||
|
Assert.Equal("Annual", createdList.Title);
|
||||||
|
|
||||||
|
Assert.Equal(4, createdList.Items.Count);
|
||||||
|
Assert.Equal(1, createdList.Items.First(item => item.Order == 0).ChapterId);
|
||||||
|
Assert.Equal(2, createdList.Items.First(item => item.Order == 1).ChapterId);
|
||||||
|
Assert.Equal(4, createdList.Items.First(item => item.Order == 2).ChapterId);
|
||||||
|
Assert.Equal(3, createdList.Items.First(item => item.Order == 3).ChapterId);
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region CreateReadingListsFromSeries
|
#region CreateReadingListsFromSeries
|
||||||
|
|
@ -1239,7 +1386,7 @@ public class ReadingListServiceTests
|
||||||
|
|
||||||
var series2 = new SeriesBuilder("Series 2")
|
var series2 = new SeriesBuilder("Series 2")
|
||||||
.WithFormat(MangaFormat.Archive)
|
.WithFormat(MangaFormat.Archive)
|
||||||
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume)
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("1").Build())
|
.WithChapter(new ChapterBuilder("1").Build())
|
||||||
.WithChapter(new ChapterBuilder("2").Build())
|
.WithChapter(new ChapterBuilder("2").Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
|
||||||
561
API.Tests/Services/ReadingProfileServiceTest.cs
Normal file
561
API.Tests/Services/ReadingProfileServiceTest.cs
Normal file
|
|
@ -0,0 +1,561 @@
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Data.Repositories;
|
||||||
|
using API.DTOs;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using API.Services;
|
||||||
|
using API.Tests.Helpers;
|
||||||
|
using Kavita.Common;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
|
public class ReadingProfileServiceTest: AbstractDbTest
|
||||||
|
{
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Does not add a default reading profile
|
||||||
|
/// </summary>
|
||||||
|
/// <returns></returns>
|
||||||
|
public async Task<(ReadingProfileService, AppUser, Library, Series)> Setup()
|
||||||
|
{
|
||||||
|
var user = new AppUserBuilder("amelia", "amelia@localhost").Build();
|
||||||
|
Context.AppUser.Add(user);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var series = new SeriesBuilder("Spice and Wolf").Build();
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("Manga")
|
||||||
|
.WithSeries(series)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
user.Libraries.Add(library);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var rps = new ReadingProfileService(UnitOfWork, Substitute.For<ILocalizationService>(), Mapper);
|
||||||
|
user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.UserPreferences);
|
||||||
|
|
||||||
|
return (rps, user, library, series);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ImplicitProfileFirst()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, library, series) = await Setup();
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithKind(ReadingProfileKind.Implicit)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Implicit Profile")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Non-implicit Profile")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
user.ReadingProfiles.Add(profile);
|
||||||
|
user.ReadingProfiles.Add(profile2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(seriesProfile);
|
||||||
|
Assert.Equal("Implicit Profile", seriesProfile.Name);
|
||||||
|
|
||||||
|
// Find parent
|
||||||
|
seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id, true);
|
||||||
|
Assert.NotNull(seriesProfile);
|
||||||
|
Assert.Equal("Non-implicit Profile", seriesProfile.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CantDeleteDefaultReadingProfile()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, _, _) = await Setup();
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithKind(ReadingProfileKind.Default)
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||||
|
{
|
||||||
|
await rps.DeleteReadingProfile(user.Id, profile.Id);
|
||||||
|
});
|
||||||
|
|
||||||
|
var profile2 = new AppUserReadingProfileBuilder(user.Id).Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.DeleteReadingProfile(user.Id, profile2.Id);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var allProfiles = await Context.AppUserReadingProfiles.ToListAsync();
|
||||||
|
Assert.Single(allProfiles);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreateImplicitSeriesReadingProfile()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, _, series) = await Setup();
|
||||||
|
|
||||||
|
var dto = new UserReadingProfileDto
|
||||||
|
{
|
||||||
|
ReaderMode = ReaderMode.Webtoon,
|
||||||
|
ScalingOption = ScalingOption.FitToHeight,
|
||||||
|
WidthOverride = 53,
|
||||||
|
};
|
||||||
|
|
||||||
|
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, dto);
|
||||||
|
|
||||||
|
var profile = await rps.GetReadingProfileForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(profile);
|
||||||
|
Assert.Contains(profile.SeriesIds, s => s == series.Id);
|
||||||
|
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateImplicitReadingProfile_DoesNotCreateNew()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, _, series) = await Setup();
|
||||||
|
|
||||||
|
var dto = new UserReadingProfileDto
|
||||||
|
{
|
||||||
|
ReaderMode = ReaderMode.Webtoon,
|
||||||
|
ScalingOption = ScalingOption.FitToHeight,
|
||||||
|
WidthOverride = 53,
|
||||||
|
};
|
||||||
|
|
||||||
|
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, dto);
|
||||||
|
|
||||||
|
var profile = await rps.GetReadingProfileForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(profile);
|
||||||
|
Assert.Contains(profile.SeriesIds, s => s == series.Id);
|
||||||
|
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
|
||||||
|
|
||||||
|
dto = new UserReadingProfileDto
|
||||||
|
{
|
||||||
|
ReaderMode = ReaderMode.LeftRight,
|
||||||
|
};
|
||||||
|
|
||||||
|
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, dto);
|
||||||
|
profile = await rps.GetReadingProfileForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(profile);
|
||||||
|
Assert.Contains(profile.SeriesIds, s => s == series.Id);
|
||||||
|
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
|
||||||
|
Assert.Equal(ReaderMode.LeftRight, profile.ReaderMode);
|
||||||
|
|
||||||
|
var implicitCount = await Context.AppUserReadingProfiles
|
||||||
|
.Where(p => p.Kind == ReadingProfileKind.Implicit)
|
||||||
|
.CountAsync();
|
||||||
|
Assert.Equal(1, implicitCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task GetCorrectProfile()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Series Specific")
|
||||||
|
.Build();
|
||||||
|
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithLibrary(lib)
|
||||||
|
.WithName("Library Specific")
|
||||||
|
.Build();
|
||||||
|
var profile3 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithKind(ReadingProfileKind.Default)
|
||||||
|
.WithName("Global")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile);
|
||||||
|
Context.AppUserReadingProfiles.Add(profile2);
|
||||||
|
Context.AppUserReadingProfiles.Add(profile3);
|
||||||
|
|
||||||
|
var series2 = new SeriesBuilder("Rainbows After Storms").Build();
|
||||||
|
lib.Series.Add(series2);
|
||||||
|
|
||||||
|
var lib2 = new LibraryBuilder("Manga2").Build();
|
||||||
|
var series3 = new SeriesBuilder("A Tropical Fish Yearns for Snow").Build();
|
||||||
|
lib2.Series.Add(series3);
|
||||||
|
|
||||||
|
user.Libraries.Add(lib2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var p = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(p);
|
||||||
|
Assert.Equal("Series Specific", p.Name);
|
||||||
|
|
||||||
|
p = await rps.GetReadingProfileDtoForSeries(user.Id, series2.Id);
|
||||||
|
Assert.NotNull(p);
|
||||||
|
Assert.Equal("Library Specific", p.Name);
|
||||||
|
|
||||||
|
p = await rps.GetReadingProfileDtoForSeries(user.Id, series3.Id);
|
||||||
|
Assert.NotNull(p);
|
||||||
|
Assert.Equal("Global", p.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ReplaceReadingProfile()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
var profile1 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Profile 1")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithName("Profile 2")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Context.AppUserReadingProfiles.Add(profile1);
|
||||||
|
Context.AppUserReadingProfiles.Add(profile2);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var profile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(profile);
|
||||||
|
Assert.Equal("Profile 1", profile.Name);
|
||||||
|
|
||||||
|
await rps.AddProfileToSeries(user.Id, profile2.Id, series.Id);
|
||||||
|
profile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(profile);
|
||||||
|
Assert.Equal("Profile 2", profile.Name);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task DeleteReadingProfile()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
var profile1 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Profile 1")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Context.AppUserReadingProfiles.Add(profile1);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.ClearSeriesProfile(user.Id, series.Id);
|
||||||
|
var profiles = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
|
||||||
|
Assert.DoesNotContain(profiles, rp => rp.SeriesIds.Contains(series.Id));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task BulkAddReadingProfiles()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
for (var i = 0; i < 10; i++)
|
||||||
|
{
|
||||||
|
var generatedSeries = new SeriesBuilder($"Generated Series #{i}").Build();
|
||||||
|
lib.Series.Add(generatedSeries);
|
||||||
|
}
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Profile")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile);
|
||||||
|
|
||||||
|
var profile2 = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Profile2")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile2);
|
||||||
|
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var someSeriesIds = lib.Series.Take(lib.Series.Count / 2).Select(s => s.Id).ToList();
|
||||||
|
await rps.BulkAddProfileToSeries(user.Id, profile.Id, someSeriesIds);
|
||||||
|
|
||||||
|
foreach (var id in someSeriesIds)
|
||||||
|
{
|
||||||
|
var foundProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||||
|
Assert.NotNull(foundProfile);
|
||||||
|
Assert.Equal(profile.Id, foundProfile.Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
var allIds = lib.Series.Select(s => s.Id).ToList();
|
||||||
|
await rps.BulkAddProfileToSeries(user.Id, profile2.Id, allIds);
|
||||||
|
|
||||||
|
foreach (var id in allIds)
|
||||||
|
{
|
||||||
|
var foundProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||||
|
Assert.NotNull(foundProfile);
|
||||||
|
Assert.Equal(profile2.Id, foundProfile.Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task BulkAssignDeletesImplicit()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
var implicitProfile = Mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithName("Profile 1")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile);
|
||||||
|
|
||||||
|
for (var i = 0; i < 10; i++)
|
||||||
|
{
|
||||||
|
var generatedSeries = new SeriesBuilder($"Generated Series #{i}").Build();
|
||||||
|
lib.Series.Add(generatedSeries);
|
||||||
|
}
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var ids = lib.Series.Select(s => s.Id).ToList();
|
||||||
|
|
||||||
|
foreach (var id in ids)
|
||||||
|
{
|
||||||
|
await rps.UpdateImplicitReadingProfile(user.Id, id, implicitProfile);
|
||||||
|
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||||
|
Assert.NotNull(seriesProfile);
|
||||||
|
Assert.Equal(ReadingProfileKind.Implicit, seriesProfile.Kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
await rps.BulkAddProfileToSeries(user.Id, profile.Id, ids);
|
||||||
|
|
||||||
|
foreach (var id in ids)
|
||||||
|
{
|
||||||
|
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, id);
|
||||||
|
Assert.NotNull(seriesProfile);
|
||||||
|
Assert.Equal(ReadingProfileKind.User, seriesProfile.Kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
var implicitCount = await Context.AppUserReadingProfiles
|
||||||
|
.Where(p => p.Kind == ReadingProfileKind.Implicit)
|
||||||
|
.CountAsync();
|
||||||
|
Assert.Equal(0, implicitCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task AddDeletesImplicit()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
var implicitProfile = Mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithKind(ReadingProfileKind.Implicit)
|
||||||
|
.Build());
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithName("Profile 1")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, implicitProfile);
|
||||||
|
|
||||||
|
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(seriesProfile);
|
||||||
|
Assert.Equal(ReadingProfileKind.Implicit, seriesProfile.Kind);
|
||||||
|
|
||||||
|
await rps.AddProfileToSeries(user.Id, profile.Id, series.Id);
|
||||||
|
|
||||||
|
seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
|
||||||
|
Assert.NotNull(seriesProfile);
|
||||||
|
Assert.Equal(ReadingProfileKind.User, seriesProfile.Kind);
|
||||||
|
|
||||||
|
var implicitCount = await Context.AppUserReadingProfiles
|
||||||
|
.Where(p => p.Kind == ReadingProfileKind.Implicit)
|
||||||
|
.CountAsync();
|
||||||
|
Assert.Equal(0, implicitCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task CreateReadingProfile()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, series) = await Setup();
|
||||||
|
|
||||||
|
var dto = new UserReadingProfileDto
|
||||||
|
{
|
||||||
|
Name = "Profile 1",
|
||||||
|
ReaderMode = ReaderMode.LeftRight,
|
||||||
|
EmulateBook = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
await rps.CreateReadingProfile(user.Id, dto);
|
||||||
|
|
||||||
|
var dto2 = new UserReadingProfileDto
|
||||||
|
{
|
||||||
|
Name = "Profile 2",
|
||||||
|
ReaderMode = ReaderMode.LeftRight,
|
||||||
|
EmulateBook = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
await rps.CreateReadingProfile(user.Id, dto2);
|
||||||
|
|
||||||
|
var dto3 = new UserReadingProfileDto
|
||||||
|
{
|
||||||
|
Name = "Profile 1", // Not unique name
|
||||||
|
ReaderMode = ReaderMode.LeftRight,
|
||||||
|
EmulateBook = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||||
|
{
|
||||||
|
await rps.CreateReadingProfile(user.Id, dto3);
|
||||||
|
});
|
||||||
|
|
||||||
|
var allProfiles = Context.AppUserReadingProfiles.ToList();
|
||||||
|
Assert.Equal(2, allProfiles.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ClearSeriesProfile_RemovesImplicitAndUnlinksExplicit()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, _, series) = await Setup();
|
||||||
|
|
||||||
|
var implicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithKind(ReadingProfileKind.Implicit)
|
||||||
|
.WithName("Implicit Profile")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var explicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithSeries(series)
|
||||||
|
.WithName("Explicit Profile")
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
Context.AppUserReadingProfiles.Add(implicitProfile);
|
||||||
|
Context.AppUserReadingProfiles.Add(explicitProfile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var allBefore = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
|
||||||
|
Assert.Equal(2, allBefore.Count(rp => rp.SeriesIds.Contains(series.Id)));
|
||||||
|
|
||||||
|
await rps.ClearSeriesProfile(user.Id, series.Id);
|
||||||
|
|
||||||
|
var remainingProfiles = await Context.AppUserReadingProfiles.ToListAsync();
|
||||||
|
Assert.Single(remainingProfiles);
|
||||||
|
Assert.Equal("Explicit Profile", remainingProfiles[0].Name);
|
||||||
|
Assert.Empty(remainingProfiles[0].SeriesIds);
|
||||||
|
|
||||||
|
var profilesForSeries = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
|
||||||
|
Assert.DoesNotContain(profilesForSeries, rp => rp.SeriesIds.Contains(series.Id));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task AddProfileToLibrary_AddsAndOverridesExisting()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, _) = await Setup();
|
||||||
|
|
||||||
|
var profile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithName("Library Profile")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(profile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.AddProfileToLibrary(user.Id, profile.Id, lib.Id);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var linkedProfile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||||
|
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||||
|
Assert.NotNull(linkedProfile);
|
||||||
|
Assert.Equal(profile.Id, linkedProfile.Id);
|
||||||
|
|
||||||
|
var newProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithName("New Profile")
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(newProfile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.AddProfileToLibrary(user.Id, newProfile.Id, lib.Id);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
linkedProfile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||||
|
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||||
|
Assert.NotNull(linkedProfile);
|
||||||
|
Assert.Equal(newProfile.Id, linkedProfile.Id);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ClearLibraryProfile_RemovesImplicitOrUnlinksExplicit()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var (rps, user, lib, _) = await Setup();
|
||||||
|
|
||||||
|
var implicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithKind(ReadingProfileKind.Implicit)
|
||||||
|
.WithLibrary(lib)
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(implicitProfile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.ClearLibraryProfile(user.Id, lib.Id);
|
||||||
|
var profile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||||
|
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||||
|
Assert.Null(profile);
|
||||||
|
|
||||||
|
var explicitProfile = new AppUserReadingProfileBuilder(user.Id)
|
||||||
|
.WithLibrary(lib)
|
||||||
|
.Build();
|
||||||
|
Context.AppUserReadingProfiles.Add(explicitProfile);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await rps.ClearLibraryProfile(user.Id, lib.Id);
|
||||||
|
profile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
|
||||||
|
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
|
||||||
|
Assert.Null(profile);
|
||||||
|
|
||||||
|
var stillExists = await Context.AppUserReadingProfiles.FindAsync(explicitProfile.Id);
|
||||||
|
Assert.NotNull(stillExists);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// As response to #3793, I'm not sure if we want to keep this. It's not the most nice. But I think the idea of this test
|
||||||
|
/// is worth having.
|
||||||
|
/// </summary>
|
||||||
|
[Fact]
|
||||||
|
public void UpdateFields_UpdatesAll()
|
||||||
|
{
|
||||||
|
// Repeat to ensure booleans are flipped and actually tested
|
||||||
|
for (int i = 0; i < 10; i++)
|
||||||
|
{
|
||||||
|
var profile = new AppUserReadingProfile();
|
||||||
|
var dto = new UserReadingProfileDto();
|
||||||
|
|
||||||
|
RandfHelper.SetRandomValues(profile);
|
||||||
|
RandfHelper.SetRandomValues(dto);
|
||||||
|
|
||||||
|
ReadingProfileService.UpdateReaderProfileFields(profile, dto);
|
||||||
|
|
||||||
|
var newDto = Mapper.Map<UserReadingProfileDto>(profile);
|
||||||
|
|
||||||
|
Assert.True(RandfHelper.AreSimpleFieldsEqual(dto, newDto,
|
||||||
|
["<Id>k__BackingField", "<UserId>k__BackingField"]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.AppUserReadingProfiles.RemoveRange(Context.AppUserReadingProfiles);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,14 +1,632 @@
|
||||||
using API.Services.Plus;
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Data.Repositories;
|
||||||
|
using API.DTOs.Scrobbling;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Entities.Scrobble;
|
||||||
|
using API.Helpers.Builders;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Plus;
|
||||||
|
using API.SignalR;
|
||||||
|
using Kavita.Common;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
namespace API.Tests.Services;
|
||||||
|
#nullable enable
|
||||||
|
|
||||||
public class ScrobblingServiceTests
|
public class ScrobblingServiceTests : AbstractDbTest
|
||||||
{
|
{
|
||||||
|
private const int ChapterPages = 100;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// {
|
||||||
|
/// "Issuer": "Issuer",
|
||||||
|
/// "Issued At": "2025-06-15T21:01:57.615Z",
|
||||||
|
/// "Expiration": "2200-06-15T21:01:57.615Z"
|
||||||
|
/// }
|
||||||
|
/// </summary>
|
||||||
|
/// <remarks>Our UnitTests will fail in 2200 :(</remarks>
|
||||||
|
private const string ValidJwtToken =
|
||||||
|
"eyJhbGciOiJIUzI1NiJ9.eyJJc3N1ZXIiOiJJc3N1ZXIiLCJleHAiOjcyNzI0NTAxMTcsImlhdCI6MTc1MDAyMTMxN30.zADmcGq_BfxbcV8vy4xw5Cbzn4COkmVINxgqpuL17Ng";
|
||||||
|
|
||||||
|
private readonly ScrobblingService _service;
|
||||||
|
private readonly ILicenseService _licenseService;
|
||||||
|
private readonly ILocalizationService _localizationService;
|
||||||
|
private readonly ILogger<ScrobblingService> _logger;
|
||||||
|
private readonly IEmailService _emailService;
|
||||||
|
private readonly IKavitaPlusApiService _kavitaPlusApiService;
|
||||||
|
/// <summary>
|
||||||
|
/// IReaderService, without the ScrobblingService injected
|
||||||
|
/// </summary>
|
||||||
|
private readonly IReaderService _readerService;
|
||||||
|
/// <summary>
|
||||||
|
/// IReaderService, with the _service injected
|
||||||
|
/// </summary>
|
||||||
|
private readonly IReaderService _hookedUpReaderService;
|
||||||
|
|
||||||
|
public ScrobblingServiceTests()
|
||||||
|
{
|
||||||
|
_licenseService = Substitute.For<ILicenseService>();
|
||||||
|
_localizationService = Substitute.For<ILocalizationService>();
|
||||||
|
_logger = Substitute.For<ILogger<ScrobblingService>>();
|
||||||
|
_emailService = Substitute.For<IEmailService>();
|
||||||
|
_kavitaPlusApiService = Substitute.For<IKavitaPlusApiService>();
|
||||||
|
|
||||||
|
_service = new ScrobblingService(UnitOfWork, Substitute.For<IEventHub>(), _logger, _licenseService,
|
||||||
|
_localizationService, _emailService, _kavitaPlusApiService);
|
||||||
|
|
||||||
|
_readerService = new ReaderService(UnitOfWork,
|
||||||
|
Substitute.For<ILogger<ReaderService>>(),
|
||||||
|
Substitute.For<IEventHub>(),
|
||||||
|
Substitute.For<IImageService>(),
|
||||||
|
Substitute.For<IDirectoryService>(),
|
||||||
|
Substitute.For<IScrobblingService>()); // Do not use the actual one
|
||||||
|
|
||||||
|
_hookedUpReaderService = new ReaderService(UnitOfWork,
|
||||||
|
Substitute.For<ILogger<ReaderService>>(),
|
||||||
|
Substitute.For<IEventHub>(),
|
||||||
|
Substitute.For<IImageService>(),
|
||||||
|
Substitute.For<IDirectoryService>(),
|
||||||
|
_service);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override async Task ResetDb()
|
||||||
|
{
|
||||||
|
Context.ScrobbleEvent.RemoveRange(Context.ScrobbleEvent.ToList());
|
||||||
|
Context.Series.RemoveRange(Context.Series.ToList());
|
||||||
|
Context.Library.RemoveRange(Context.Library.ToList());
|
||||||
|
Context.AppUser.RemoveRange(Context.AppUser.ToList());
|
||||||
|
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task SeedData()
|
||||||
|
{
|
||||||
|
var series = new SeriesBuilder("Test Series")
|
||||||
|
.WithFormat(MangaFormat.Archive)
|
||||||
|
.WithMetadata(new SeriesMetadataBuilder().Build())
|
||||||
|
.WithVolume(new VolumeBuilder("Volume 1")
|
||||||
|
.WithChapters([
|
||||||
|
new ChapterBuilder("1")
|
||||||
|
.WithPages(ChapterPages)
|
||||||
|
.Build(),
|
||||||
|
new ChapterBuilder("2")
|
||||||
|
.WithPages(ChapterPages)
|
||||||
|
.Build(),
|
||||||
|
new ChapterBuilder("3")
|
||||||
|
.WithPages(ChapterPages)
|
||||||
|
.Build()])
|
||||||
|
.Build())
|
||||||
|
.WithVolume(new VolumeBuilder("Volume 2")
|
||||||
|
.WithChapters([
|
||||||
|
new ChapterBuilder("4")
|
||||||
|
.WithPages(ChapterPages)
|
||||||
|
.Build(),
|
||||||
|
new ChapterBuilder("5")
|
||||||
|
.WithPages(ChapterPages)
|
||||||
|
.Build(),
|
||||||
|
new ChapterBuilder("6")
|
||||||
|
.WithPages(ChapterPages)
|
||||||
|
.Build()])
|
||||||
|
.Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
var library = new LibraryBuilder("Test Library", LibraryType.Manga)
|
||||||
|
.WithAllowScrobbling(true)
|
||||||
|
.WithSeries(series)
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
|
||||||
|
Context.Library.Add(library);
|
||||||
|
|
||||||
|
var user = new AppUserBuilder("testuser", "testuser")
|
||||||
|
//.WithPreferences(new UserPreferencesBuilder().WithAniListScrobblingEnabled(true).Build())
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
user.UserPreferences.AniListScrobblingEnabled = true;
|
||||||
|
|
||||||
|
UnitOfWork.UserRepository.Add(user);
|
||||||
|
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<ScrobbleEvent> CreateScrobbleEvent(int? seriesId = null)
|
||||||
|
{
|
||||||
|
var evt = new ScrobbleEvent
|
||||||
|
{
|
||||||
|
ScrobbleEventType = ScrobbleEventType.ChapterRead,
|
||||||
|
Format = PlusMediaFormat.Manga,
|
||||||
|
SeriesId = seriesId ?? 0,
|
||||||
|
LibraryId = 0,
|
||||||
|
AppUserId = 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (seriesId != null)
|
||||||
|
{
|
||||||
|
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId.Value);
|
||||||
|
if (series != null) evt.Series = series;
|
||||||
|
}
|
||||||
|
|
||||||
|
return evt;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#region K+ API Request Tests
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PostScrobbleUpdate_AuthErrors()
|
||||||
|
{
|
||||||
|
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
|
||||||
|
.ReturnsForAnyArgs(new ScrobbleResponseDto()
|
||||||
|
{
|
||||||
|
ErrorMessage = "Unauthorized"
|
||||||
|
});
|
||||||
|
|
||||||
|
var evt = await CreateScrobbleEvent();
|
||||||
|
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||||
|
{
|
||||||
|
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
|
||||||
|
});
|
||||||
|
Assert.True(evt.IsErrored);
|
||||||
|
Assert.Equal("Kavita+ subscription no longer active", evt.ErrorDetails);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PostScrobbleUpdate_UnknownSeriesLoggedAsError()
|
||||||
|
{
|
||||||
|
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
|
||||||
|
.ReturnsForAnyArgs(new ScrobbleResponseDto()
|
||||||
|
{
|
||||||
|
ErrorMessage = "Unknown Series"
|
||||||
|
});
|
||||||
|
|
||||||
|
await SeedData();
|
||||||
|
var evt = await CreateScrobbleEvent(1);
|
||||||
|
|
||||||
|
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
Assert.True(evt.IsErrored);
|
||||||
|
|
||||||
|
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
|
||||||
|
Assert.NotNull(series);
|
||||||
|
Assert.True(series.IsBlacklisted);
|
||||||
|
|
||||||
|
var errors = await UnitOfWork.ScrobbleRepository.GetAllScrobbleErrorsForSeries(1);
|
||||||
|
Assert.Single(errors);
|
||||||
|
Assert.Equal("Series cannot be matched for Scrobbling", errors.First().Comment);
|
||||||
|
Assert.Equal(series.Id, errors.First().SeriesId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task PostScrobbleUpdate_InvalidAccessToken()
|
||||||
|
{
|
||||||
|
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
|
||||||
|
.ReturnsForAnyArgs(new ScrobbleResponseDto()
|
||||||
|
{
|
||||||
|
ErrorMessage = "Access token is invalid"
|
||||||
|
});
|
||||||
|
|
||||||
|
var evt = await CreateScrobbleEvent();
|
||||||
|
|
||||||
|
await Assert.ThrowsAsync<KavitaException>(async () =>
|
||||||
|
{
|
||||||
|
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(evt.IsErrored);
|
||||||
|
Assert.Equal("Access Token needs to be rotated to continue scrobbling", evt.ErrorDetails);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region K+ API Request data tests
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ProcessReadEvents_CreatesNoEventsWhenNoProgress()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
// Set Returns
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
_kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
|
||||||
|
.Returns(100);
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Ensure CanProcessScrobbleEvent returns true
|
||||||
|
user.AniListAccessToken = ValidJwtToken;
|
||||||
|
UnitOfWork.UserRepository.Update(user);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(4);
|
||||||
|
Assert.NotNull(chapter);
|
||||||
|
|
||||||
|
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
|
||||||
|
Assert.NotNull(volume);
|
||||||
|
|
||||||
|
// Call Scrobble without having any progress
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Empty(events);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ProcessReadEvents_UpdateVolumeAndChapterData()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
// Set Returns
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
_kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
|
||||||
|
.Returns(100);
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
// Ensure CanProcessScrobbleEvent returns true
|
||||||
|
user.AniListAccessToken = ValidJwtToken;
|
||||||
|
UnitOfWork.UserRepository.Update(user);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
var chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(4);
|
||||||
|
Assert.NotNull(chapter);
|
||||||
|
|
||||||
|
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
|
||||||
|
Assert.NotNull(volume);
|
||||||
|
|
||||||
|
// Mark something as read to trigger event creation
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Call Scrobble while having some progress
|
||||||
|
await _service.ScrobbleReadingUpdate(user.Id, 1);
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Single(events);
|
||||||
|
|
||||||
|
// Give it some (more) read progress
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, volume.Chapters);
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, [chapter]);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _service.ProcessUpdatesSinceLastSync();
|
||||||
|
|
||||||
|
await _kavitaPlusApiService.Received(1).PostScrobbleUpdate(
|
||||||
|
Arg.Is<ScrobbleDto>(data =>
|
||||||
|
data.ChapterNumber == (int)chapter.MaxNumber &&
|
||||||
|
data.VolumeNumber == (int)volume.MaxNumber
|
||||||
|
),
|
||||||
|
Arg.Any<string>());
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region Scrobble Reading Update Tests
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleReadingUpdate_IgnoreNoLicense()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
_licenseService.HasActiveLicense().Returns(false);
|
||||||
|
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Empty(events);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleReadingUpdate_RemoveWhenNoProgress()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
_licenseService.HasActiveLicense().Returns(true);
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
|
||||||
|
Assert.NotNull(volume);
|
||||||
|
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Single(events);
|
||||||
|
|
||||||
|
var readEvent = events.First();
|
||||||
|
Assert.False(readEvent.IsProcessed);
|
||||||
|
|
||||||
|
await _hookedUpReaderService.MarkSeriesAsUnread(user, 1);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Existing event is deleted
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Empty(events);
|
||||||
|
|
||||||
|
await _hookedUpReaderService.MarkSeriesAsUnread(user, 1);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// No new events are added
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Empty(events);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleReadingUpdate_UpdateExistingNotIsProcessed()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
var chapter1 = await UnitOfWork.ChapterRepository.GetChapterAsync(1);
|
||||||
|
var chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(2);
|
||||||
|
var chapter3 = await UnitOfWork.ChapterRepository.GetChapterAsync(3);
|
||||||
|
Assert.NotNull(chapter1);
|
||||||
|
Assert.NotNull(chapter2);
|
||||||
|
Assert.NotNull(chapter3);
|
||||||
|
|
||||||
|
_licenseService.HasActiveLicense().Returns(true);
|
||||||
|
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Empty(events);
|
||||||
|
|
||||||
|
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, [chapter1]);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Scrobble update
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Single(events);
|
||||||
|
|
||||||
|
var readEvent = events[0];
|
||||||
|
Assert.False(readEvent.IsProcessed);
|
||||||
|
Assert.Equal(1, readEvent.ChapterNumber);
|
||||||
|
|
||||||
|
// Mark as processed
|
||||||
|
readEvent.IsProcessed = true;
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, [chapter2]);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Scrobble update
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Equal(2, events.Count);
|
||||||
|
Assert.Single(events.Where(e => e.IsProcessed).ToList());
|
||||||
|
Assert.Single(events.Where(e => !e.IsProcessed).ToList());
|
||||||
|
|
||||||
|
// Should update the existing non processed event
|
||||||
|
await _readerService.MarkChaptersAsRead(user, 1, [chapter3]);
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
// Scrobble update
|
||||||
|
await _service.ScrobbleReadingUpdate(1, 1);
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Equal(2, events.Count);
|
||||||
|
Assert.Single(events.Where(e => e.IsProcessed).ToList());
|
||||||
|
Assert.Single(events.Where(e => !e.IsProcessed).ToList());
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region ScrobbleWantToReadUpdate Tests
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_WantToRead_ShouldCreateNewEvent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedData();
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
|
||||||
|
const int userId = 1;
|
||||||
|
const int seriesId = 1;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.Equal(ScrobbleEventType.AddWantToRead, events[0].ScrobbleEventType);
|
||||||
|
Assert.Equal(userId, events[0].AppUserId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_RemoveWantToRead_ShouldCreateNewEvent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedData();
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
|
||||||
|
const int userId = 1;
|
||||||
|
const int seriesId = 1;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.Equal(ScrobbleEventType.RemoveWantToRead, events[0].ScrobbleEventType);
|
||||||
|
Assert.Equal(userId, events[0].AppUserId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_WantToRead_ShouldNotCreateNewEvent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedData();
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
|
||||||
|
const int userId = 1;
|
||||||
|
const int seriesId = 1;
|
||||||
|
|
||||||
|
// First, let's create an event through the service
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||||
|
|
||||||
|
// Act - Try to create the same event again
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||||
|
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.All(events, e => Assert.Equal(ScrobbleEventType.AddWantToRead, e.ScrobbleEventType));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_RemoveWantToRead_ShouldAddRemoveEvent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedData();
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
|
||||||
|
const int userId = 1;
|
||||||
|
const int seriesId = 1;
|
||||||
|
|
||||||
|
// First, let's create a want-to-read event through the service
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||||
|
|
||||||
|
// Act - Now remove from want-to-read
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||||
|
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.RemoveWantToRead);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_RemoveWantToRead_ShouldNotCreateNewEvent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedData();
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
|
||||||
|
const int userId = 1;
|
||||||
|
const int seriesId = 1;
|
||||||
|
|
||||||
|
// First, let's create a remove-from-want-to-read event through the service
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||||
|
|
||||||
|
// Act - Try to create the same event again
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||||
|
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.All(events, e => Assert.Equal(ScrobbleEventType.RemoveWantToRead, e.ScrobbleEventType));
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_WantToRead_ShouldAddWantToReadEvent()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
await SeedData();
|
||||||
|
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
|
||||||
|
|
||||||
|
const int userId = 1;
|
||||||
|
const int seriesId = 1;
|
||||||
|
|
||||||
|
// First, let's create a remove-from-want-to-read event through the service
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
|
||||||
|
|
||||||
|
// Act - Now add to want-to-read
|
||||||
|
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
|
||||||
|
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.AddWantToRead);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region Scrobble Rating Update Test
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleRatingUpdate_IgnoreNoLicense()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
_licenseService.HasActiveLicense().Returns(false);
|
||||||
|
|
||||||
|
await _service.ScrobbleRatingUpdate(1, 1, 1);
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Empty(events);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task ScrobbleRatingUpdate_UpdateExistingNotIsProcessed()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
await SeedData();
|
||||||
|
|
||||||
|
_licenseService.HasActiveLicense().Returns(true);
|
||||||
|
|
||||||
|
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
|
||||||
|
Assert.NotNull(user);
|
||||||
|
|
||||||
|
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
|
||||||
|
Assert.NotNull(series);
|
||||||
|
|
||||||
|
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 1);
|
||||||
|
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Single(events);
|
||||||
|
Assert.Equal(1, events.First().Rating);
|
||||||
|
|
||||||
|
// Mark as processed
|
||||||
|
events.First().IsProcessed = true;
|
||||||
|
await UnitOfWork.CommitAsync();
|
||||||
|
|
||||||
|
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Equal(2, events.Count);
|
||||||
|
Assert.Single(events, evt => evt.IsProcessed);
|
||||||
|
Assert.Single(events, evt => !evt.IsProcessed);
|
||||||
|
|
||||||
|
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
|
||||||
|
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
|
||||||
|
Assert.Single(events, evt => !evt.IsProcessed);
|
||||||
|
Assert.Equal(5, events.First(evt => !evt.IsProcessed).Rating);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("https://anilist.co/manga/35851/Byeontaega-Doeja/", 35851)]
|
[InlineData("https://anilist.co/manga/35851/Byeontaega-Doeja/", 35851)]
|
||||||
public void CanParseWeblink(string link, long expectedId)
|
[InlineData("https://anilist.co/manga/30105", 30105)]
|
||||||
|
[InlineData("https://anilist.co/manga/30105/Kekkaishi/", 30105)]
|
||||||
|
public void CanParseWeblink_AniList(string link, int? expectedId)
|
||||||
{
|
{
|
||||||
Assert.Equal(ScrobblingService.ExtractId<long>(link, ScrobblingService.AniListWeblinkWebsite), expectedId);
|
Assert.Equal(ScrobblingService.ExtractId<int?>(link, ScrobblingService.AniListWeblinkWebsite), expectedId);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData("https://mangadex.org/title/316d3d09-bb83-49da-9d90-11dc7ce40967/honzuki-no-gekokujou-shisho-ni-naru-tame-ni-wa-shudan-wo-erandeiraremasen-dai-3-bu-ryouchi-ni-hon-o", "316d3d09-bb83-49da-9d90-11dc7ce40967")]
|
||||||
|
public void CanParseWeblink_MangaDex(string link, string expectedId)
|
||||||
|
{
|
||||||
|
Assert.Equal(ScrobblingService.ExtractId<string?>(link, ScrobblingService.MangaDexWeblinkWebsite), expectedId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
292
API.Tests/Services/SettingsServiceTests.cs
Normal file
292
API.Tests/Services/SettingsServiceTests.cs
Normal file
|
|
@ -0,0 +1,292 @@
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO.Abstractions;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Data;
|
||||||
|
using API.Data.Repositories;
|
||||||
|
using API.DTOs.KavitaPlus.Metadata;
|
||||||
|
using API.Entities;
|
||||||
|
using API.Entities.Enums;
|
||||||
|
using API.Entities.MetadataMatching;
|
||||||
|
using API.Services;
|
||||||
|
using API.Services.Tasks.Scanner;
|
||||||
|
using Microsoft.Extensions.Logging;
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Services;
|
||||||
|
|
||||||
|
public class SettingsServiceTests
|
||||||
|
{
|
||||||
|
private readonly ISettingsService _settingsService;
|
||||||
|
private readonly IUnitOfWork _mockUnitOfWork;
|
||||||
|
|
||||||
|
public SettingsServiceTests()
|
||||||
|
{
|
||||||
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
|
||||||
|
|
||||||
|
_mockUnitOfWork = Substitute.For<IUnitOfWork>();
|
||||||
|
_settingsService = new SettingsService(_mockUnitOfWork, ds,
|
||||||
|
Substitute.For<ILibraryWatcher>(), Substitute.For<ITaskScheduler>(),
|
||||||
|
Substitute.For<ILogger<SettingsService>>());
|
||||||
|
}
|
||||||
|
|
||||||
|
#region UpdateMetadataSettings
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateMetadataSettings_ShouldUpdateExistingSettings()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var existingSettings = new MetadataSettings
|
||||||
|
{
|
||||||
|
Id = 1,
|
||||||
|
Enabled = false,
|
||||||
|
EnableSummary = false,
|
||||||
|
EnableLocalizedName = false,
|
||||||
|
EnablePublicationStatus = false,
|
||||||
|
EnableRelationships = false,
|
||||||
|
EnablePeople = false,
|
||||||
|
EnableStartDate = false,
|
||||||
|
EnableGenres = false,
|
||||||
|
EnableTags = false,
|
||||||
|
FirstLastPeopleNaming = false,
|
||||||
|
EnableCoverImage = false,
|
||||||
|
AgeRatingMappings = new Dictionary<string, AgeRating>(),
|
||||||
|
Blacklist = [],
|
||||||
|
Whitelist = [],
|
||||||
|
Overrides = [],
|
||||||
|
PersonRoles = [],
|
||||||
|
FieldMappings = []
|
||||||
|
};
|
||||||
|
|
||||||
|
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||||
|
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||||
|
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||||
|
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||||
|
|
||||||
|
var updateDto = new MetadataSettingsDto
|
||||||
|
{
|
||||||
|
Enabled = true,
|
||||||
|
EnableSummary = true,
|
||||||
|
EnableLocalizedName = true,
|
||||||
|
EnablePublicationStatus = true,
|
||||||
|
EnableRelationships = true,
|
||||||
|
EnablePeople = true,
|
||||||
|
EnableStartDate = true,
|
||||||
|
EnableGenres = true,
|
||||||
|
EnableTags = true,
|
||||||
|
FirstLastPeopleNaming = true,
|
||||||
|
EnableCoverImage = true,
|
||||||
|
AgeRatingMappings = new Dictionary<string, AgeRating> { { "Adult", AgeRating.R18Plus } },
|
||||||
|
Blacklist = ["blacklisted-tag"],
|
||||||
|
Whitelist = ["whitelisted-tag"],
|
||||||
|
Overrides = [MetadataSettingField.Summary],
|
||||||
|
PersonRoles = [PersonRole.Writer],
|
||||||
|
FieldMappings =
|
||||||
|
[
|
||||||
|
new MetadataFieldMappingDto
|
||||||
|
{
|
||||||
|
SourceType = MetadataFieldType.Genre,
|
||||||
|
DestinationType = MetadataFieldType.Tag,
|
||||||
|
SourceValue = "Action",
|
||||||
|
DestinationValue = "Fight",
|
||||||
|
ExcludeFromSource = true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||||
|
|
||||||
|
// Verify properties were updated
|
||||||
|
Assert.True(existingSettings.Enabled);
|
||||||
|
Assert.True(existingSettings.EnableSummary);
|
||||||
|
Assert.True(existingSettings.EnableLocalizedName);
|
||||||
|
Assert.True(existingSettings.EnablePublicationStatus);
|
||||||
|
Assert.True(existingSettings.EnableRelationships);
|
||||||
|
Assert.True(existingSettings.EnablePeople);
|
||||||
|
Assert.True(existingSettings.EnableStartDate);
|
||||||
|
Assert.True(existingSettings.EnableGenres);
|
||||||
|
Assert.True(existingSettings.EnableTags);
|
||||||
|
Assert.True(existingSettings.FirstLastPeopleNaming);
|
||||||
|
Assert.True(existingSettings.EnableCoverImage);
|
||||||
|
|
||||||
|
// Verify collections were updated
|
||||||
|
Assert.Single(existingSettings.AgeRatingMappings);
|
||||||
|
Assert.Equal(AgeRating.R18Plus, existingSettings.AgeRatingMappings["Adult"]);
|
||||||
|
|
||||||
|
Assert.Single(existingSettings.Blacklist);
|
||||||
|
Assert.Equal("blacklisted-tag", existingSettings.Blacklist[0]);
|
||||||
|
|
||||||
|
Assert.Single(existingSettings.Whitelist);
|
||||||
|
Assert.Equal("whitelisted-tag", existingSettings.Whitelist[0]);
|
||||||
|
|
||||||
|
Assert.Single(existingSettings.Overrides);
|
||||||
|
Assert.Equal(MetadataSettingField.Summary, existingSettings.Overrides[0]);
|
||||||
|
|
||||||
|
Assert.Single(existingSettings.PersonRoles);
|
||||||
|
Assert.Equal(PersonRole.Writer, existingSettings.PersonRoles[0]);
|
||||||
|
|
||||||
|
Assert.Single(existingSettings.FieldMappings);
|
||||||
|
Assert.Equal(MetadataFieldType.Genre, existingSettings.FieldMappings[0].SourceType);
|
||||||
|
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[0].DestinationType);
|
||||||
|
Assert.Equal("Action", existingSettings.FieldMappings[0].SourceValue);
|
||||||
|
Assert.Equal("Fight", existingSettings.FieldMappings[0].DestinationValue);
|
||||||
|
Assert.True(existingSettings.FieldMappings[0].ExcludeFromSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateMetadataSettings_WithNullCollections_ShouldUseEmptyCollections()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var existingSettings = new MetadataSettings
|
||||||
|
{
|
||||||
|
Id = 1,
|
||||||
|
FieldMappings = [new MetadataFieldMapping {Id = 1, SourceValue = "OldValue"}]
|
||||||
|
};
|
||||||
|
|
||||||
|
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||||
|
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||||
|
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||||
|
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||||
|
|
||||||
|
var updateDto = new MetadataSettingsDto
|
||||||
|
{
|
||||||
|
AgeRatingMappings = null,
|
||||||
|
Blacklist = null,
|
||||||
|
Whitelist = null,
|
||||||
|
Overrides = null,
|
||||||
|
PersonRoles = null,
|
||||||
|
FieldMappings = null
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||||
|
|
||||||
|
Assert.Empty(existingSettings.AgeRatingMappings);
|
||||||
|
Assert.Empty(existingSettings.Blacklist);
|
||||||
|
Assert.Empty(existingSettings.Whitelist);
|
||||||
|
Assert.Empty(existingSettings.Overrides);
|
||||||
|
Assert.Empty(existingSettings.PersonRoles);
|
||||||
|
|
||||||
|
// Verify existing field mappings were cleared
|
||||||
|
settingsRepo.Received(1).RemoveRange(Arg.Any<List<MetadataFieldMapping>>());
|
||||||
|
Assert.Empty(existingSettings.FieldMappings);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateMetadataSettings_WithFieldMappings_ShouldReplaceExistingMappings()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var existingSettings = new MetadataSettings
|
||||||
|
{
|
||||||
|
Id = 1,
|
||||||
|
FieldMappings =
|
||||||
|
[
|
||||||
|
new MetadataFieldMapping
|
||||||
|
{
|
||||||
|
Id = 1,
|
||||||
|
SourceType = MetadataFieldType.Genre,
|
||||||
|
DestinationType = MetadataFieldType.Genre,
|
||||||
|
SourceValue = "OldValue",
|
||||||
|
DestinationValue = "OldDestination",
|
||||||
|
ExcludeFromSource = false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||||
|
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||||
|
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||||
|
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||||
|
|
||||||
|
var updateDto = new MetadataSettingsDto
|
||||||
|
{
|
||||||
|
FieldMappings =
|
||||||
|
[
|
||||||
|
new MetadataFieldMappingDto
|
||||||
|
{
|
||||||
|
SourceType = MetadataFieldType.Tag,
|
||||||
|
DestinationType = MetadataFieldType.Genre,
|
||||||
|
SourceValue = "NewValue",
|
||||||
|
DestinationValue = "NewDestination",
|
||||||
|
ExcludeFromSource = true
|
||||||
|
},
|
||||||
|
|
||||||
|
new MetadataFieldMappingDto
|
||||||
|
{
|
||||||
|
SourceType = MetadataFieldType.Tag,
|
||||||
|
DestinationType = MetadataFieldType.Tag,
|
||||||
|
SourceValue = "AnotherValue",
|
||||||
|
DestinationValue = "AnotherDestination",
|
||||||
|
ExcludeFromSource = false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||||
|
|
||||||
|
// Verify existing field mappings were cleared and new ones added
|
||||||
|
settingsRepo.Received(1).RemoveRange(Arg.Any<List<MetadataFieldMapping>>());
|
||||||
|
Assert.Equal(2, existingSettings.FieldMappings.Count);
|
||||||
|
|
||||||
|
// Verify first mapping
|
||||||
|
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[0].SourceType);
|
||||||
|
Assert.Equal(MetadataFieldType.Genre, existingSettings.FieldMappings[0].DestinationType);
|
||||||
|
Assert.Equal("NewValue", existingSettings.FieldMappings[0].SourceValue);
|
||||||
|
Assert.Equal("NewDestination", existingSettings.FieldMappings[0].DestinationValue);
|
||||||
|
Assert.True(existingSettings.FieldMappings[0].ExcludeFromSource);
|
||||||
|
|
||||||
|
// Verify second mapping
|
||||||
|
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[1].SourceType);
|
||||||
|
Assert.Equal(MetadataFieldType.Tag, existingSettings.FieldMappings[1].DestinationType);
|
||||||
|
Assert.Equal("AnotherValue", existingSettings.FieldMappings[1].SourceValue);
|
||||||
|
Assert.Equal("AnotherDestination", existingSettings.FieldMappings[1].DestinationValue);
|
||||||
|
Assert.False(existingSettings.FieldMappings[1].ExcludeFromSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateMetadataSettings_WithBlacklistWhitelist_ShouldNormalizeAndDeduplicateEntries()
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
var existingSettings = new MetadataSettings
|
||||||
|
{
|
||||||
|
Id = 1,
|
||||||
|
Blacklist = [],
|
||||||
|
Whitelist = []
|
||||||
|
};
|
||||||
|
|
||||||
|
// We need to mock the repository and provide a custom implementation for ToNormalized
|
||||||
|
var settingsRepo = Substitute.For<ISettingsRepository>();
|
||||||
|
settingsRepo.GetMetadataSettings().Returns(Task.FromResult(existingSettings));
|
||||||
|
settingsRepo.GetMetadataSettingDto().Returns(Task.FromResult(new MetadataSettingsDto()));
|
||||||
|
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
|
||||||
|
|
||||||
|
var updateDto = new MetadataSettingsDto
|
||||||
|
{
|
||||||
|
// Include duplicates with different casing and whitespace
|
||||||
|
Blacklist = ["tag1", "Tag1", " tag2 ", "", " ", "tag3"],
|
||||||
|
Whitelist = ["allowed1", "Allowed1", " allowed2 ", "", "allowed3"]
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await _settingsService.UpdateMetadataSettings(updateDto);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
await _mockUnitOfWork.Received(1).CommitAsync();
|
||||||
|
|
||||||
|
Assert.Equal(3, existingSettings.Blacklist.Count);
|
||||||
|
Assert.Equal(3, existingSettings.Whitelist.Count);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
||||||
|
|
@ -9,6 +9,7 @@ using API.Services;
|
||||||
using API.Services.Tasks;
|
using API.Services.Tasks;
|
||||||
using API.SignalR;
|
using API.SignalR;
|
||||||
using Kavita.Common;
|
using Kavita.Common;
|
||||||
|
using Microsoft.Extensions.Caching.Memory;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NSubstitute;
|
using NSubstitute;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
@ -30,116 +31,61 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
||||||
|
|
||||||
protected override async Task ResetDb()
|
protected override async Task ResetDb()
|
||||||
{
|
{
|
||||||
_context.SiteTheme.RemoveRange(_context.SiteTheme);
|
Context.SiteTheme.RemoveRange(Context.SiteTheme);
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
// Recreate defaults
|
// Recreate defaults
|
||||||
await Seed.SeedThemes(_context);
|
await Seed.SeedThemes(Context);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task UpdateDefault_ShouldThrowOnInvalidId()
|
public async Task UpdateDefault_ShouldThrowOnInvalidId()
|
||||||
{
|
{
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldThrowOnInvalidId] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldThrowOnInvalidId] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||||
var filesystem = CreateFileSystem();
|
var filesystem = CreateFileSystem();
|
||||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||||
|
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||||
|
|
||||||
_context.SiteTheme.Add(new SiteTheme()
|
Context.SiteTheme.Add(new SiteTheme()
|
||||||
{
|
{
|
||||||
Name = "Custom",
|
Name = "Custom",
|
||||||
NormalizedName = "Custom".ToNormalized(),
|
NormalizedName = "Custom".ToNormalized(),
|
||||||
Provider = ThemeProvider.User,
|
Provider = ThemeProvider.Custom,
|
||||||
FileName = "custom.css",
|
FileName = "custom.css",
|
||||||
IsDefault = false
|
IsDefault = false
|
||||||
});
|
});
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
var ex = await Assert.ThrowsAsync<KavitaException>(() => siteThemeService.UpdateDefault(10));
|
var ex = await Assert.ThrowsAsync<KavitaException>(() => siteThemeService.UpdateDefault(10));
|
||||||
Assert.Equal("Theme file missing or invalid", ex.Message);
|
Assert.Equal("Theme file missing or invalid", ex.Message);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task Scan_ShouldFindCustomFile()
|
|
||||||
{
|
|
||||||
await ResetDb();
|
|
||||||
_testOutputHelper.WriteLine($"[Scan_ShouldOnlyInsertOnceOnSecondScan] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
|
||||||
var filesystem = CreateFileSystem();
|
|
||||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData(""));
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
|
||||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
|
||||||
await siteThemeService.Scan();
|
|
||||||
|
|
||||||
Assert.NotNull(await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("custom"));
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task Scan_ShouldOnlyInsertOnceOnSecondScan()
|
|
||||||
{
|
|
||||||
await ResetDb();
|
|
||||||
_testOutputHelper.WriteLine(
|
|
||||||
$"[Scan_ShouldOnlyInsertOnceOnSecondScan] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
|
||||||
var filesystem = CreateFileSystem();
|
|
||||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData(""));
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
|
||||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
|
||||||
await siteThemeService.Scan();
|
|
||||||
|
|
||||||
Assert.NotNull(await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("custom"));
|
|
||||||
|
|
||||||
await siteThemeService.Scan();
|
|
||||||
|
|
||||||
var customThemes = (await _unitOfWork.SiteThemeRepository.GetThemeDtos()).Where(t =>
|
|
||||||
t.Name.ToNormalized().Equals("custom".ToNormalized()));
|
|
||||||
|
|
||||||
Assert.Single(customThemes);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
public async Task Scan_ShouldDeleteWhenFileDoesntExistOnSecondScan()
|
|
||||||
{
|
|
||||||
await ResetDb();
|
|
||||||
_testOutputHelper.WriteLine($"[Scan_ShouldDeleteWhenFileDoesntExistOnSecondScan] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
|
||||||
var filesystem = CreateFileSystem();
|
|
||||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData(""));
|
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
|
||||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
|
||||||
await siteThemeService.Scan();
|
|
||||||
|
|
||||||
Assert.NotNull(await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("custom"));
|
|
||||||
|
|
||||||
filesystem.RemoveFile($"{SiteThemeDirectory}custom.css");
|
|
||||||
await siteThemeService.Scan();
|
|
||||||
|
|
||||||
var themes = (await _unitOfWork.SiteThemeRepository.GetThemeDtos());
|
|
||||||
|
|
||||||
Assert.Equal(0, themes.Count(t =>
|
|
||||||
t.Name.ToNormalized().Equals("custom".ToNormalized())));
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task GetContent_ShouldReturnContent()
|
public async Task GetContent_ShouldReturnContent()
|
||||||
{
|
{
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
_testOutputHelper.WriteLine($"[GetContent_ShouldReturnContent] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
_testOutputHelper.WriteLine($"[GetContent_ShouldReturnContent] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||||
var filesystem = CreateFileSystem();
|
var filesystem = CreateFileSystem();
|
||||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||||
|
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||||
|
|
||||||
_context.SiteTheme.Add(new SiteTheme()
|
Context.SiteTheme.Add(new SiteTheme()
|
||||||
{
|
{
|
||||||
Name = "Custom",
|
Name = "Custom",
|
||||||
NormalizedName = "Custom".ToNormalized(),
|
NormalizedName = "Custom".ToNormalized(),
|
||||||
Provider = ThemeProvider.User,
|
Provider = ThemeProvider.Custom,
|
||||||
FileName = "custom.css",
|
FileName = "custom.css",
|
||||||
IsDefault = false
|
IsDefault = false
|
||||||
});
|
});
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
var content = await siteThemeService.GetContent((await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom")).Id);
|
var content = await siteThemeService.GetContent((await UnitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom")).Id);
|
||||||
Assert.NotNull(content);
|
Assert.NotNull(content);
|
||||||
Assert.NotEmpty(content);
|
Assert.NotEmpty(content);
|
||||||
Assert.Equal("123", content);
|
Assert.Equal("123", content);
|
||||||
|
|
@ -149,30 +95,31 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
|
||||||
public async Task UpdateDefault_ShouldHaveOneDefault()
|
public async Task UpdateDefault_ShouldHaveOneDefault()
|
||||||
{
|
{
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldHaveOneDefault] All Themes: {(await _unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldHaveOneDefault] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
|
||||||
var filesystem = CreateFileSystem();
|
var filesystem = CreateFileSystem();
|
||||||
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
|
||||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||||
var siteThemeService = new ThemeService(ds, _unitOfWork, _messageHub);
|
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
|
||||||
|
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
|
||||||
|
|
||||||
_context.SiteTheme.Add(new SiteTheme()
|
Context.SiteTheme.Add(new SiteTheme()
|
||||||
{
|
{
|
||||||
Name = "Custom",
|
Name = "Custom",
|
||||||
NormalizedName = "Custom".ToNormalized(),
|
NormalizedName = "Custom".ToNormalized(),
|
||||||
Provider = ThemeProvider.User,
|
Provider = ThemeProvider.Custom,
|
||||||
FileName = "custom.css",
|
FileName = "custom.css",
|
||||||
IsDefault = false
|
IsDefault = false
|
||||||
});
|
});
|
||||||
await _context.SaveChangesAsync();
|
await Context.SaveChangesAsync();
|
||||||
|
|
||||||
var customTheme = (await _unitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom"));
|
var customTheme = (await UnitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom"));
|
||||||
|
|
||||||
Assert.NotNull(customTheme);
|
Assert.NotNull(customTheme);
|
||||||
await siteThemeService.UpdateDefault(customTheme.Id);
|
await siteThemeService.UpdateDefault(customTheme.Id);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Assert.Equal(customTheme.Id, (await _unitOfWork.SiteThemeRepository.GetDefaultTheme()).Id);
|
Assert.Equal(customTheme.Id, (await UnitOfWork.SiteThemeRepository.GetDefaultTheme()).Id);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,5 @@
|
||||||
using API.Extensions;
|
using API.Helpers.Builders;
|
||||||
using API.Helpers.Builders;
|
|
||||||
using API.Services.Plus;
|
using API.Services.Plus;
|
||||||
using API.Services.Tasks;
|
|
||||||
|
|
||||||
namespace API.Tests.Services;
|
namespace API.Tests.Services;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
@ -16,7 +14,6 @@ using API.Entities.Enums;
|
||||||
using API.Helpers;
|
using API.Helpers;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using SignalR;
|
using SignalR;
|
||||||
using Helpers;
|
|
||||||
using AutoMapper;
|
using AutoMapper;
|
||||||
using Microsoft.Data.Sqlite;
|
using Microsoft.Data.Sqlite;
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
@ -52,7 +49,7 @@ public class TachiyomiServiceTests
|
||||||
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
|
||||||
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()),
|
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()),
|
||||||
Substitute.For<IScrobblingService>());
|
Substitute.For<IScrobblingService>());
|
||||||
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<ReaderService>>(), _readerService);
|
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<TachiyomiService>>(), _readerService);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -125,12 +122,12 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||||
|
|
@ -170,12 +167,12 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||||
|
|
@ -221,7 +218,7 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -265,18 +262,19 @@ public class TachiyomiServiceTests
|
||||||
|
|
||||||
Assert.Equal("21", latestChapter.Number);
|
Assert.Equal("21", latestChapter.Number);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress()
|
public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress()
|
||||||
{
|
{
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
|
||||||
|
|
@ -323,13 +321,16 @@ public class TachiyomiServiceTests
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("0").WithPages(199).Build())
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithPages(199).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("0").WithPages(192).Build())
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithPages(192).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("3")
|
.WithVolume(new VolumeBuilder("3")
|
||||||
.WithChapter(new ChapterBuilder("0").WithPages(255).Build())
|
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
|
||||||
|
.WithPages(255).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithPages(646)
|
.WithPages(646)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
@ -368,7 +369,7 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -421,12 +422,12 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||||
|
|
@ -464,12 +465,12 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("3").WithPages(1).Build())
|
||||||
|
|
@ -514,7 +515,7 @@ public class TachiyomiServiceTests
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
|
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
|
|
@ -562,12 +563,12 @@ public class TachiyomiServiceTests
|
||||||
{
|
{
|
||||||
await ResetDb();
|
await ResetDb();
|
||||||
var series = new SeriesBuilder("Test")
|
var series = new SeriesBuilder("Test")
|
||||||
.WithVolume(new VolumeBuilder("0")
|
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
|
||||||
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
|
||||||
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("96").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("1")
|
.WithVolume(new VolumeBuilder("1")
|
||||||
.WithChapter(new ChapterBuilder("1").WithIsSpecial(true).WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("1").WithPages(1).Build())
|
||||||
.Build())
|
.Build())
|
||||||
.WithVolume(new VolumeBuilder("2")
|
.WithVolume(new VolumeBuilder("2")
|
||||||
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
|
.WithChapter(new ChapterBuilder("21").WithPages(1).Build())
|
||||||
|
|
|
||||||
Binary file not shown.
BIN
API.Tests/Services/Test Data/BookService/Rollo at Work SP01.pdf
Normal file
BIN
API.Tests/Services/Test Data/BookService/Rollo at Work SP01.pdf
Normal file
Binary file not shown.
BIN
API.Tests/Services/Test Data/BookService/encrypted.pdf
Normal file
BIN
API.Tests/Services/Test Data/BookService/encrypted.pdf
Normal file
Binary file not shown.
BIN
API.Tests/Services/Test Data/BookService/indirect.pdf
Normal file
BIN
API.Tests/Services/Test Data/BookService/indirect.pdf
Normal file
Binary file not shown.
BIN
API.Tests/Services/Test Data/CoverDbService/Existing/01.webp
Normal file
BIN
API.Tests/Services/Test Data/CoverDbService/Existing/01.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue