121 Commits

Author SHA1 Message Date
Antoine Aflalo
a6ad1dada3 fix(release): fix not having archive in the release 2025-09-10 08:49:50 -04:00
Antoine Aflalo
17fe01f27c fix(docker): remove unnecessary volume declaration and clean up bashrc setup 2025-09-09 22:58:24 -04:00
Antoine Aflalo
4fa3014d80 fix(docker): when we set the volume
To be sure we have the cache downloaded
2025-09-09 22:55:27 -04:00
Antoine Aflalo
a47af5a7a8 ci: fix cosign 2025-09-09 22:32:09 -04:00
Antoine Aflalo
d7f13132f4 ci: fix test workflow 2025-09-09 22:23:22 -04:00
Antoine Aflalo
a8587f3f1f fix(docker): have already the converter in the docker image 2025-09-09 22:21:50 -04:00
Antoine Aflalo
12817b1bff ci: full upgrade 2025-09-09 21:48:26 -04:00
Antoine Aflalo
19dcf9d40b fix(docker): missing ca-certificate 2025-09-09 21:47:47 -04:00
Antoine Aflalo
a7fa5bd0c7 ci: no need for interactive for docker image 2025-09-09 21:44:30 -04:00
Antoine Aflalo
9bde56d6c1 ci: have buildx setup 2025-09-09 21:38:11 -04:00
Antoine Aflalo
9c28923c35 ci: add verbosity 2025-09-09 21:25:28 -04:00
Antoine Aflalo
b878390b46 ci: goreleaser fixes 2025-09-09 21:17:27 -04:00
Antoine Aflalo
41ff843a80 ci(docker-sign): remove signing docker image 2025-09-09 20:53:49 -04:00
Antoine Aflalo
221945cb66 fix(docker): fix docker image 2025-09-09 20:47:46 -04:00
Antoine Aflalo
35bba7c088 ci: improve goreleaser config for docker image 2025-09-09 20:46:08 -04:00
Antoine Aflalo
b5a894deba fix(docker): fix issue with docker image not downloading the right webp converter 2025-09-09 20:36:42 -04:00
Antoine Aflalo
7ad0256b46 chore: cleanup deps 2025-09-09 20:15:35 -04:00
Antoine Aflalo
f08e8dad7b fix: releasing app 2025-09-09 19:36:05 -04:00
Antoine Aflalo
54de9bcdeb perf: use default for unlock the mutex 2025-09-09 16:59:11 -04:00
Antoine Aflalo
0a7cc506fd fix: possible race condition 2025-09-09 16:58:42 -04:00
Antoine Aflalo
fe8c5606fc chore: update deps 2025-09-09 16:49:24 -04:00
Antoine Aflalo
9a8a9693fb fix(webp): fix installing newer version of webp 2025-09-09 11:37:13 -04:00
Antoine Aflalo
7047710fdd test: no need for timeout for integration test 2025-09-09 10:13:56 -04:00
Antoine Aflalo
88786d4e53 fix(webp): fix using the right version 2025-09-09 10:12:38 -04:00
Antoine Aflalo
e0c8bf340b ci: add test preparation 2025-09-09 09:58:50 -04:00
Antoine Aflalo
36b9ddc80f fix(webp): fix issue with concurrent preparation of encoder. 2025-09-09 09:31:31 -04:00
Antoine Aflalo
a380de3fe5 ci: improve testing workflow 2025-09-09 09:19:07 -04:00
Antoine Aflalo
e47e21386f tests: update testing suite with more use cases
Actually try to convert existing chapters.
2025-09-09 09:14:40 -04:00
renovate[bot]
1b1be3a83a fix(deps): update module github.com/spf13/viper to v1.21.0 (#112)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-08 21:54:12 +00:00
Antoine Aflalo
44a919e4f3 test: add test with webp 2025-09-08 17:21:41 -04:00
renovate[bot]
1b9d83d2ff fix(deps): update module golang.org/x/image to v0.31.0 (#111)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-08 16:27:05 +00:00
Antoine Aflalo
ddc5121216 Merge pull request #110 from Belphemur/renovate/actions-setup-go-6.x
chore(deps): update actions/setup-go action to v6
2025-09-04 08:12:50 -04:00
renovate[bot]
a361f22951 chore(deps): update actions/setup-go action to v6 2025-09-04 05:07:02 +00:00
Antoine Aflalo
d245b80c65 fix: naming issue 2025-09-03 22:21:37 -04:00
Antoine Aflalo
011f7a7a7f Merge pull request #109 from Belphemur/feat/gif
Feat/gif
2025-09-03 22:19:07 -04:00
Antoine Aflalo
f159d3d0d0 fix: keep the error 2025-09-03 22:17:41 -04:00
Antoine Aflalo
ede8d62572 fix: Keep page as they are if we can't decode them and disable conversion 2025-09-03 22:15:10 -04:00
Antoine Aflalo
a151a1d4f8 tests(corruption): add test for corrupt pages 2025-09-03 21:38:21 -04:00
Antoine Aflalo
30ea3d4583 test: add test for page type 2025-09-03 21:34:39 -04:00
Antoine Aflalo
6205e3ea28 feat(gif): support gif file
See .gif file extension support and more exception handling
Fixes #105
2025-09-03 21:04:51 -04:00
Antoine Aflalo
f6bdc3cd86 Merge pull request #106 from Belphemur/dependabot/go_modules/go_modules-004c5295e3 2025-09-03 11:01:04 -04:00
Antoine Aflalo
70257a0439 Merge pull request #107 from Belphemur/renovate/actions-attest-build-provenance-3.x
chore(deps): update actions/attest-build-provenance action to v3
2025-09-03 08:40:01 -04:00
dependabot[bot]
41108514d9 chore(deps): bump github.com/ulikunitz/xz
Bumps the go_modules group with 1 update in the / directory: [github.com/ulikunitz/xz](https://github.com/ulikunitz/xz).


Updates `github.com/ulikunitz/xz` from 0.5.12 to 0.5.14
- [Commits](https://github.com/ulikunitz/xz/compare/v0.5.12...v0.5.14)

---
updated-dependencies:
- dependency-name: github.com/ulikunitz/xz
  dependency-version: 0.5.14
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 23:13:21 +00:00
renovate[bot]
7e2bb7cf90 fix(deps): update module github.com/spf13/cobra to v1.10.1 (#108)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-01 23:10:00 +00:00
renovate[bot]
8ab75421b1 chore(deps): update actions/attest-build-provenance action to v3 2025-08-31 08:55:43 +00:00
renovate[bot]
4894b14b90 fix(deps): update module github.com/stretchr/testify to v1.11.1 (#104)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-27 13:34:53 +00:00
Antoine Aflalo
9a29b6b45c fix: rollback dockerfile changes 2025-08-27 09:27:11 -04:00
Antoine Aflalo
fcc4ac57ca fix(ci): rollback to docker config instead of docker_v2 2025-08-26 23:08:08 -04:00
Antoine Aflalo
4cc33db553 fix(goreleaser): fix ci 2025-08-26 23:03:47 -04:00
Antoine Aflalo
d36c5cf0f1 fix: ci issue with goreleaser 2025-08-26 23:00:53 -04:00
Antoine Aflalo
ed70eb81cd ci: update to new setup for docker images 2025-08-26 22:59:13 -04:00
Antoine Aflalo
419edbce7b fix: ci config for goreleaser 2025-08-26 22:50:14 -04:00
Antoine Aflalo
4524e94b17 ci: fix goreleaser 2025-08-26 22:47:36 -04:00
Antoine Aflalo
c6823168af fix: add attestations 2025-08-26 22:45:35 -04:00
Antoine Aflalo
9bca0ceaf4 fix: add autocomplete defintion for log level 2025-08-26 22:39:23 -04:00
Antoine Aflalo
c2a6220fde fix(logging): fix logging parameter not taken into account 2025-08-26 22:36:23 -04:00
Antoine Aflalo
e26cf7a26a fix: test 2025-08-26 21:37:51 -04:00
Antoine Aflalo
4e5180f658 feat: add timeout option for chapter conversion to prevent hanging on problematic files
fixes #102
2025-08-26 21:34:52 -04:00
Antoine Aflalo
e7bbae1c25 chore: bump webp 2025-08-26 21:20:56 -04:00
Antoine Aflalo
32c009ed9b feat: integrate zerolog for enhanced logging across multiple components 2025-08-26 21:16:54 -04:00
Antoine Aflalo
94fb60c5c6 feat: enhance logging capabilities with zerolog integration and command-line support 2025-08-26 21:07:48 -04:00
Antoine Aflalo
dfee46812d feat: use Zerolog for logging. 2025-08-26 20:55:34 -04:00
Antoine Aflalo
d0e4037e15 Merge pull request #101 from Belphemur/dependabot/go_modules/go_modules-e1b2e84e8b 2025-08-26 20:38:53 -04:00
renovate[bot]
8539abe99e fix(deps): update module github.com/stretchr/testify to v1.11.0 (#103)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-24 17:46:08 +00:00
dependabot[bot]
f1151435e1 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.3.0 to 2.4.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.3.0...v2.4.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.4.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-21 15:25:11 +00:00
renovate[bot]
c6e00fda5d fix(deps): update golang.org/x/exp digest to 8b4c13b (#100)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-19 23:02:35 +00:00
renovate[bot]
2f37936a72 chore(deps): update anchore/sbom-action action to v0.20.5 (#99)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-14 18:05:44 +00:00
renovate[bot]
f0d5c254a6 fix(deps): update golang.org/x/exp digest to 42675ad (#98)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-13 16:28:31 +00:00
renovate[bot]
e35b7b3ae8 chore(deps): update dependency go to v1.25.0 (#97)
* chore(deps): update dependency go to v1.25.0

* chore: move ci/cd to 1.25

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Antoine Aflalo <197810+Belphemur@users.noreply.github.com>
2025-08-13 01:04:05 +00:00
Antoine Aflalo
43d9550e6e Merge pull request #95 from Belphemur/renovate/actions-checkout-5.x 2025-08-11 20:33:18 -04:00
renovate[bot]
e7fa06f4d3 fix(deps): update golang.org/x/exp digest to 51f8813 (#96)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-11 22:47:50 +00:00
renovate[bot]
8b48da1b25 chore(deps): update actions/checkout action to v5 2025-08-11 16:24:11 +00:00
renovate[bot]
fdcc9bf076 fix(deps): update golang.org/x/exp digest to a408d31 (#94)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-08 17:00:38 +00:00
renovate[bot]
38b9d4f1bd fix(deps): update module golang.org/x/image to v0.30.0 (#93)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-07 21:44:51 +00:00
renovate[bot]
fbc1ec7d75 chore(deps): update dependency go to v1.24.6 (#92)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-06 20:48:02 +00:00
renovate[bot]
e7b566ff63 chore(deps): update anchore/sbom-action action to v0.20.4 (#91)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-22 03:11:33 +00:00
renovate[bot]
d73d0347b1 fix(deps): update golang.org/x/exp digest to 645b1fa (#90)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-18 20:32:01 +00:00
renovate[bot]
04b9dbb2dd chore(deps): update sigstore/cosign-installer action to v3.9.2 (#89)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 22:49:46 +00:00
renovate[bot]
5d767470a8 fix(deps): update golang.org/x/exp digest to 542afb5 (#88)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 22:49:33 +00:00
renovate[bot]
473c6f40e8 fix(deps): update golang.org/x/exp digest to 6ae5c78 (#87)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 23:47:39 +00:00
renovate[bot]
403f43a417 fix(deps): update module golang.org/x/image to v0.29.0 (#86)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-10 03:11:22 +00:00
renovate[bot]
1bfe755dd9 chore(deps): update dependency go to v1.24.5 (#85)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-08 22:53:26 +00:00
renovate[bot]
3cd6a4ab1f chore(deps): update anchore/sbom-action action to v0.20.2 (#84)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-02 22:27:30 +00:00
Antoine Aflalo
117206e0ee Merge pull request #83 from Belphemur/dependabot/go_modules/go_modules-3464edad9a 2025-06-27 15:48:52 -04:00
dependabot[bot]
1e43f9d8a0 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.2.1 to 2.3.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.2.1...v2.3.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.3.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-27 16:49:44 +00:00
renovate[bot]
6f8b525a96 fix(deps): update module github.com/mholt/archives to v0.1.3 (#82)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-26 22:52:14 +00:00
renovate[bot]
9480cc0e36 chore(deps): update sigstore/cosign-installer action to v3.9.1 (#81)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-23 17:36:00 +00:00
renovate[bot]
a72cd3f84f fix(deps): update golang.org/x/exp digest to b7579e2 (#80)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-20 07:16:04 +00:00
renovate[bot]
a3424494cc chore(deps): update sigstore/cosign-installer action to v3.9.0 (#79)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-17 13:39:58 +00:00
renovate[bot]
85d0b8bbca chore(deps): update anchore/sbom-action action to v0.20.1 (#78)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-13 21:55:26 +00:00
Antoine Aflalo
29f7fbbc0d Merge pull request #77 from Belphemur/Belphemur/issue75 2025-06-12 09:26:34 -04:00
Antoine Aflalo
1258b06210 docs: update README to include CBR file support and clarify features 2025-06-12 09:24:02 -04:00
Antoine Aflalo
8a6ddc668e feat: enhance optimization logic for CBR/CBZ file handling and add tests 2025-06-12 09:23:00 -04:00
Antoine Aflalo
989ca2450d feat: support CBR files in optimize and watch commands
Fixes #75
2025-06-12 09:18:06 -04:00
Antoine Aflalo
970b9019df feat: load CBR files 2025-06-12 09:11:22 -04:00
renovate[bot]
a5f88fe0e9 fix(deps): update module github.com/samber/lo to v1.51.0 (#76)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-11 08:58:04 +00:00
renovate[bot]
c46700d0e5 fix(deps): update module golang.org/x/image to v0.28.0 (#74)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-07 00:59:04 +00:00
renovate[bot]
3d98fe036b chore(deps): update dependency go to v1.24.4 (#73)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-06 19:09:31 +00:00
renovate[bot]
00d7ec0ba9 fix(deps): update golang.org/x/exp digest to dcc06ee (#72)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-06 19:08:57 +00:00
renovate[bot]
8c09db9a9e fix(deps): update golang.org/x/exp digest to b6e5de4 (#71)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-31 02:07:12 +00:00
renovate[bot]
0390f1119f fix(deps): update golang.org/x/exp digest to 65e9200 (#70)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-30 18:45:36 +00:00
renovate[bot]
b62485de3b chore(deps): update anchore/sbom-action action to v0.20.0 (#69)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-14 20:21:27 +00:00
renovate[bot]
8e11eca719 chore(deps): update dependency go to v1.24.3 (#68)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-06 19:38:28 +00:00
renovate[bot]
841bdce097 fix(deps): update golang.org/x/exp digest to ce4c2cf (#67)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-06 03:55:49 +00:00
renovate[bot]
74c0954118 fix(deps): update module golang.org/x/image to v0.27.0 (#66)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-05 22:49:16 +00:00
renovate[bot]
7478f0b71c fix(deps): update module github.com/samber/lo to v1.50.0 (#65)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-26 18:28:12 +00:00
renovate[bot]
a03eba5400 chore(deps): update anchore/sbom-action action to v0.19.0 (#64)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-24 23:25:23 +00:00
renovate[bot]
7546e516cd chore(deps): update sigstore/cosign-installer action to v3.8.2 (#63)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-22 21:53:05 +00:00
renovate[bot]
bef7052163 fix(deps): update golang.org/x/exp digest to 7e4ce0a (#62)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-08 18:03:02 +00:00
renovate[bot]
e04b213fa4 fix(deps): update module golang.org/x/image to v0.26.0 (#61)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-06 17:53:06 +00:00
renovate[bot]
92fa3a54e7 chore(deps): update dependency go to v1.24.2 (#60)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-01 17:06:22 +00:00
renovate[bot]
bc92d36df2 fix(deps): update module github.com/spf13/viper to v1.20.1 (#59)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-26 19:06:44 +00:00
renovate[bot]
9863dd5d98 fix(deps): update module github.com/spf13/viper to v1.20.0 (#58)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-15 17:12:41 +00:00
renovate[bot]
ddd19292d5 fix(deps): update golang.org/x/exp digest to 054e65f (#57)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-06 02:41:33 +00:00
renovate[bot]
6a7914bd83 fix(deps): update module golang.org/x/image to v0.25.0 (#56)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-05 19:37:10 +00:00
renovate[bot]
005d2d35c3 chore(deps): update dependency go to v1.24.1 (#55)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-04 22:46:43 +00:00
renovate[bot]
abcce332e5 fix(deps): update golang.org/x/exp digest to dead583 (#54)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-28 22:01:17 +00:00
renovate[bot]
376656ba2c chore(deps): update sigstore/cosign-installer action to v3.8.1 (#53)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-20 15:55:10 +00:00
renovate[bot]
34288e6bbe fix(deps): update golang.org/x/exp digest to aa4b98e (#52)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-18 18:49:14 +00:00
renovate[bot]
d32ea3e8a9 fix(deps): update module github.com/spf13/cobra to v1.9.1 (#51)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-17 02:47:58 +00:00
renovate[bot]
23256013f5 fix(deps): update golang.org/x/exp digest to eff6e97 (#50)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-15 21:41:43 +00:00
30 changed files with 2503 additions and 504 deletions

View File

@@ -13,7 +13,7 @@ jobs:
pull-requests: write
checks: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
ref: ${{ github.event.pull_request.head.sha }} # to check out the actual pull request commit, not the merge commit
fetch-depth: 0 # a full history is required for pull request analysis

View File

@@ -8,36 +8,48 @@ name: release
on:
push:
tags:
- 'v*'
- "v*"
permissions:
contents: write # needed to write releases
id-token: write # needed for keyless signing
packages: write # needed for ghcr access
attestations: write # needed for attestations
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0 # this is important, otherwise it won't checkout the full tree (i.e. no previous tags)
- uses: actions/setup-go@v5
- name: Set up Go
uses: actions/setup-go@v6
with:
go-version: 1.24
go-version-file: go.mod
cache: true
- uses: sigstore/cosign-installer@v3.8.0 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.18.0 # installs syft
- uses: sigstore/cosign-installer@v3.9.2 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.20.5 # installs syft
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- uses: docker/login-action@v3 # login to ghcr
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3 # login to ghcr
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: goreleaser/goreleaser-action@v6 # run goreleaser
- uses: goreleaser/goreleaser-action@v6 # run goreleaser
with:
version: latest
args: release --clean
version: nightly
args: release --clean --verbose
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# After GoReleaser runs, attest all the files in ./dist/checksums.txt:
- uses: actions/attest-build-provenance@v3
with:
subject-checksums: ./dist/checksums.txt
# After GoReleaser runs, attest all the images in ./dist/digests.txt:
- uses: actions/attest-build-provenance@v3
with:
subject-checksums: ./dist/digests.txt

View File

@@ -6,49 +6,46 @@ on:
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Set up Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version: '1.24'
go-version-file: go.mod
cache: true
- name: Install gotestsum
run: go install gotest.tools/gotestsum@latest
- name: Install dependencies
run: go mod tidy
- name: Install Junit reporter
- name: Setup test environment
run: |
wget https://github.com/jstemmer/go-junit-report/releases/download/v2.1.0/go-junit-report-v2.1.0-linux-amd64.tar.gz && \
tar -xzf go-junit-report-v2.1.0-linux-amd64.tar.gz && \
chmod +x go-junit-report && \
mv go-junit-report /usr/local/bin/
go build -tags encoder_setup -o encoder-setup ./cmd/encoder-setup
./encoder-setup
- name: Run tests
run: |
set -o pipefail
go test -v 2>&1 ./... -coverprofile=coverage.txt | tee test-results.txt
- name: Analyse test results
if: ${{ !cancelled() }}
run: go-junit-report < test-results.txt > junit.xml
mkdir -p test-results
gotestsum --junitfile test-results/junit.xml --format testname -- -race -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload test result artifact
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v4
with:
name: test-results
path: |
test-results.txt
junit.xml
path: |
test-results/junit.xml
test-results/coverage.txt
retention-days: 7
- name: Upload results to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov
if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
token: ${{ secrets.CODECOV_TOKEN }}
files: test-results/junit.xml
- name: Upload coverage reports to Codecov
if: ${{ !cancelled() }}
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}

3
.gitignore vendored
View File

@@ -102,4 +102,5 @@ fabric.properties
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
.idea/caches/build_file_checksums.ser
*__debug_bin*

View File

@@ -5,13 +5,22 @@ release:
github:
owner: belphemur
name: CBZOptimizer
archives:
- ids:
- cbzoptimizer
formats: ["tar.zst"]
format_overrides:
- # Which GOOS to override the format for.
goos: windows
formats: ["zip"] # Plural form, multiple formats. Since: v2.6
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
- '^chore:'
- "^docs:"
- "^test:"
- "^chore:"
groups:
- title: Features
regexp: '^.*?feat(\([[:word:]]+\))??!?:.+$'
@@ -44,10 +53,32 @@ builds:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }}
env:
- CGO_ENABLED=0
- id: encoder-setup
main: cmd/encoder-setup/main.go
binary: encoder-setup
goos:
- linux
goarch:
- amd64
- arm64
# ensures mod timestamp to be the commit timestamp
mod_timestamp: "{{ .CommitTimestamp }}"
flags:
# trims path
- -trimpath
tags:
- encoder_setup
ldflags:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }}
env:
- CGO_ENABLED=0
# config the checksum filename
# https://goreleaser.com/customization/checksum
checksum:
name_template: "checksums.txt"
# Change the digests filename:
docker_digest:
name_template: "digests.txt"
# create a source tarball
# https://goreleaser.com/customization/source/
source:
@@ -64,32 +95,26 @@ sboms:
artifacts: source
# create a docker image
# https://goreleaser.com/customization/docker
dockers:
- image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-amd64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-amd64"
use: buildx
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source={{.GitURL}}"
- image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-arm64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-arm64"
use: buildx
goarch: arm64
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source={{.GitURL}}"
dockers_v2:
- id: cbzoptimizer-image
ids:
- cbzoptimizer
- encoder-setup
platforms:
- linux/amd64
- linux/arm64
images:
- "ghcr.io/belphemur/cbzoptimizer"
tags:
- "{{ .Version }}"
- latest
labels:
"org.opencontainers.image.created": "{{.Date}}"
"org.opencontainers.image.name": "{{.ProjectName}}"
"org.opencontainers.image.revision": "{{.FullCommit}}"
"org.opencontainers.image.version": "{{.Version}}"
"org.opencontainers.image.source": "{{.GitURL}}"
"org.opencontainers.image.description": "CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality."
# signs the checksum file
# all files (including the sboms) are included in the checksum, so we don't need to sign each one if we don't want to
# https://goreleaser.com/customization/sign
@@ -112,18 +137,8 @@ docker_signs:
- cmd: cosign
env:
- COSIGN_EXPERIMENTAL=1
artifacts: images
output: true
args:
- "sign"
- "${artifact}"
- "--yes" # needed on cosign 2.0.0+
docker_manifests:
- name_template: "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}"
image_templates:
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-amd64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-arm64"
- name_template: "ghcr.io/belphemur/cbzoptimizer:latest"
image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-amd64"
- "ghcr.io/belphemur/cbzoptimizer:latest-arm64"

38
.vscode/launch.json vendored
View File

@@ -1,16 +1,24 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Package",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${fileDirname}"
}
]
}
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch file",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${file}"
},
{
"name": "Optimize Testdata",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/cbzoptimizer",
"args": ["optimize", "${workspaceFolder}/testdata", "-l", "debug"],
"cwd": "${workspaceFolder}"
}
]
}

View File

@@ -1,29 +1,41 @@
FROM alpine:latest
FROM debian:trixie-slim
LABEL authors="Belphemur"
ARG TARGETPLATFORM
ARG APP_PATH=/usr/local/bin/CBZOptimizer
ENV USER=abc
ENV CONFIG_FOLDER=/config
ENV PUID=99
ENV DEBIAN_FRONTEND=noninteractive
RUN mkdir -p "${CONFIG_FOLDER}" && \
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update && apt-get install -y --no-install-recommends adduser && \
addgroup --system users && \
adduser \
-S \
-H \
-h "${CONFIG_FOLDER}" \
-G "users" \
-u "${PUID}" \
"${USER}" && \
chown ${PUID}:users "${CONFIG_FOLDER}"
--system \
--home "${CONFIG_FOLDER}" \
--uid "${PUID}" \
--ingroup users \
--disabled-password \
"${USER}" && \
apt-get purge -y --auto-remove adduser
COPY CBZOptimizer ${APP_PATH}
COPY ${TARGETPLATFORM}/CBZOptimizer ${APP_PATH}
RUN apk add --no-cache \
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
--mount=type=bind,source=${TARGETPLATFORM},target=/tmp/target \
apt-get update && \
apt-get full-upgrade -y && \
apt-get install -y --no-install-recommends \
inotify-tools \
bash \
ca-certificates \
bash-completion && \
/tmp/target/encoder-setup && \
chmod +x ${APP_PATH} && \
${APP_PATH} completion bash > /etc/bash_completion.d/CBZOptimizer
${APP_PATH} completion bash > /etc/bash_completion.d/CBZOptimizer.bash
VOLUME ${CONFIG_FOLDER}
USER ${USER}
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]

153
README.md
View File

@@ -1,85 +1,166 @@
# CBZOptimizer
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
**Note**: CBR files are supported as input but are always converted to CBZ format for output.
## Features
- Convert images within CBZ files to different formats (e.g., WebP).
- Convert images within CBZ and CBR files to different formats (e.g., WebP).
- Support for multiple archive formats including CBZ and CBR (CBR files are converted to CBZ format).
- Adjust the quality of the converted images.
- Process multiple chapters in parallel.
- Option to override the original CBZ files.
- Watch a folder for new CBZ files and optimize them automatically.
- Option to override the original files (CBR files are converted to CBZ and original CBR is deleted).
- Watch a folder for new CBZ/CBR files and optimize them automatically.
- Set time limits for chapter conversion to avoid hanging on problematic files.
## Installation
1. Clone the repository:
```sh
git clone https://github.com/belphemur/CBZOptimizer.git
cd CBZOptimizer
```
### Download Binary
2. Install dependencies:
```sh
go mod tidy
```
Download the latest release from [GitHub Releases](https://github.com/belphemur/CBZOptimizer/releases).
### Docker
Pull the Docker image:
```sh
docker pull ghcr.io/belphemur/cbzoptimizer:latest
```
## Usage
### Command Line Interface
The tool provides CLI commands to optimize and watch CBZ files. Below are examples of how to use them:
The tool provides CLI commands to optimize and watch CBZ/CBR files. Below are examples of how to use them:
#### Optimize Command
Optimize all CBZ files in a folder recursively:
Optimize all CBZ/CBR files in a folder recursively:
```sh
go run main.go optimize [folder] --quality 85 --parallelism 2 --override --format webp --split
cbzconverter optimize [folder] --quality 85 --parallelism 2 --override --format webp --split
```
With timeout to avoid hanging on problematic chapters:
```sh
cbzconverter optimize [folder] --timeout 10m --quality 85
```
Or with Docker:
```sh
docker run -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics --quality 85 --parallelism 2 --override --format webp --split
```
#### Watch Command
Watch a folder for new CBZ files and optimize them automatically:
Watch a folder for new CBZ/CBR files and optimize them automatically:
```sh
go run main.go watch [folder] --quality 85 --override --format webp --split
cbzconverter watch [folder] --quality 85 --override --format webp --split
```
Or with Docker:
```sh
docker run -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest watch /comics --quality 85 --override --format webp --split
```
### Flags
- `--quality`, `-q`: Quality for conversion (0-100). Default is 85.
- `--parallelism`, `-n`: Number of chapters to convert in parallel. Default is 2.
- `--override`, `-o`: Override the original CBZ files. Default is false.
- `--override`, `-o`: Override the original files. For CBZ files, overwrites the original. For CBR files, deletes the original CBR and creates a new CBZ. Default is false.
- `--split`, `-s`: Split long pages into smaller chunks. Default is false.
- `--format`, `-f`: Format to convert the images to (e.g., webp). Default is webp.
- `--timeout`, `-t`: Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout. Default is 0.
- `--log`, `-l`: Set log level; can be 'panic', 'fatal', 'error', 'warn', 'info', 'debug', or 'trace'. Default is info.
## Testing
## Logging
To run the tests, use the following command:
CBZOptimizer uses structured logging with [zerolog](https://github.com/rs/zerolog) for consistent and performant logging output.
### Log Levels
You can control the verbosity of logging using either command-line flags or environment variables:
**Command Line:**
```sh
go test ./... -v
# Set log level to debug for detailed output
cbzconverter --log debug optimize [folder]
# Set log level to error for minimal output
cbzconverter --log error optimize [folder]
```
## Requirement
Needs to have libwep installed on the machine if you're not using the docker image
**Environment Variable:**
## Docker
`ghcr.io/belphemur/cbzoptimizer:latest`
```sh
# Set log level via environment variable
LOG_LEVEL=debug cbzconverter optimize [folder]
```
## GitHub Actions
**Docker:**
The project includes a GitHub Actions workflow to run tests on every push and pull request to the `main` branch. The workflow is defined in `.github/workflows/go.yml`.
```sh
# Set log level via environment variable in Docker
docker run -e LOG_LEVEL=debug -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics
```
## Contributing
### Available Log Levels
1. Fork the repository.
2. Create a new branch (`git checkout -b feature-branch`).
3. Commit your changes (`git commit -am 'Add new feature'`).
4. Push to the branch (`git push origin feature-branch`).
5. Create a new Pull Request.
- `panic`: Logs panic level messages and above
- `fatal`: Logs fatal level messages and above
- `error`: Logs error level messages and above
- `warn`: Logs warning level messages and above
- `info`: Logs info level messages and above (default)
- `debug`: Logs debug level messages and above
- `trace`: Logs all messages including trace level
### Examples
```sh
# Default info level logging
cbzconverter optimize comics/
# Debug level for troubleshooting
cbzconverter --log debug optimize comics/
# Quiet operation (only errors and above)
cbzconverter --log error optimize comics/
# Using environment variable
LOG_LEVEL=warn cbzconverter optimize comics/
# Docker with debug logging
docker run -e LOG_LEVEL=debug -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics
```
## Requirements
- For Docker usage: No additional requirements needed
- For binary usage: Needs `libwebp` installed on the system for WebP conversion
## Docker Image
The official Docker image is available at: `ghcr.io/belphemur/cbzoptimizer:latest`
## Troubleshooting
If you encounter issues:
1. Use `--log debug` for detailed logging output
2. Check that all required dependencies are installed
3. Ensure proper file permissions for input/output directories
4. For Docker usage, verify volume mounts are correct
## Support
For issues and questions, please use [GitHub Issues](https://github.com/belphemur/CBZOptimizer/issues).
## License
This project is licensed under the MIT License. See the `LICENSE` file for details.
This project is licensed under the MIT License. See the `LICENSE` file for details.

View File

@@ -2,15 +2,17 @@ package commands
import (
"fmt"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra"
"github.com/thediveo/enumflag/v2"
"os"
"path/filepath"
"strings"
"sync"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/thediveo/enumflag/v2"
)
var converterType constant.ConversionFormat
@@ -18,8 +20,8 @@ var converterType constant.ConversionFormat
func init() {
command := &cobra.Command{
Use: "optimize [folder]",
Short: "Optimize all CBZ files in a folder recursively",
Long: "Optimize all CBZ files in a folder recursively.\nIt will take all the different pages in the CBZ files and convert them to the given format.\nThe original CBZ files will be kept intact depending if you choose to override or not.",
Short: "Optimize all CBZ/CBR files in a folder recursively",
Long: "Optimize all CBZ/CBR files in a folder recursively.\nIt will take all the different pages in the CBZ/CBR files and convert them to the given format.\nThe original CBZ/CBR files will be kept intact depending if you choose to override or not.",
RunE: ConvertCbzCommand,
Args: cobra.ExactArgs(1),
}
@@ -28,8 +30,9 @@ func init() {
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
command.Flags().BoolP("override", "o", false, "Override the original CBZ files")
command.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
command.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
@@ -40,101 +43,149 @@ func init() {
}
func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
log.Info().Str("command", "optimize").Msg("Starting optimize command")
path := args[0]
if path == "" {
log.Error().Msg("Path argument is required but empty")
return fmt.Errorf("path is required")
}
log.Debug().Str("input_path", path).Msg("Validating input path")
if !utils2.IsValidFolder(path) {
log.Error().Str("input_path", path).Msg("Path validation failed - not a valid folder")
return fmt.Errorf("the path needs to be a folder")
}
log.Debug().Str("input_path", path).Msg("Input path validated successfully")
log.Debug().Msg("Parsing command-line flags")
quality, err := cmd.Flags().GetUint8("quality")
if err != nil || quality <= 0 || quality > 100 {
log.Error().Err(err).Uint8("quality", quality).Msg("Invalid quality value")
return fmt.Errorf("invalid quality value")
}
log.Debug().Uint8("quality", quality).Msg("Quality parameter validated")
override, err := cmd.Flags().GetBool("override")
if err != nil {
log.Error().Err(err).Msg("Failed to parse override flag")
return fmt.Errorf("invalid quality value")
}
log.Debug().Bool("override", override).Msg("Override parameter parsed")
split, err := cmd.Flags().GetBool("split")
if err != nil {
log.Error().Err(err).Msg("Failed to parse split flag")
return fmt.Errorf("invalid split value")
}
log.Debug().Bool("split", split).Msg("Split parameter parsed")
timeout, err := cmd.Flags().GetDuration("timeout")
if err != nil {
log.Error().Err(err).Msg("Failed to parse timeout flag")
return fmt.Errorf("invalid timeout value")
}
log.Debug().Dur("timeout", timeout).Msg("Timeout parameter parsed")
parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 {
log.Error().Err(err).Int("parallelism", parallelism).Msg("Invalid parallelism value")
return fmt.Errorf("invalid parallelism value")
}
log.Debug().Int("parallelism", parallelism).Msg("Parallelism parameter validated")
log.Debug().Str("converter_format", converterType.String()).Msg("Initializing converter")
chapterConverter, err := converter.Get(converterType)
if err != nil {
log.Error().Str("converter_format", converterType.String()).Err(err).Msg("Failed to get chapter converter")
return fmt.Errorf("failed to get chapterConverter: %v", err)
}
log.Debug().Str("converter_format", converterType.String()).Msg("Converter initialized successfully")
log.Debug().Msg("Preparing converter")
err = chapterConverter.PrepareConverter()
if err != nil {
log.Error().Err(err).Msg("Failed to prepare converter")
return fmt.Errorf("failed to prepare converter: %v", err)
}
log.Debug().Msg("Converter prepared successfully")
// Channel to manage the files to process
fileChan := make(chan string)
// Channel to collect errors
errorChan := make(chan error, parallelism)
// Slice to collect errors with mutex for thread safety
var errs []error
var errMutex sync.Mutex
// WaitGroup to wait for all goroutines to finish
var wg sync.WaitGroup
// Start worker goroutines
log.Debug().Int("worker_count", parallelism).Msg("Starting worker goroutines")
for i := 0; i < parallelism; i++ {
wg.Add(1)
go func() {
go func(workerID int) {
defer wg.Done()
log.Debug().Int("worker_id", workerID).Msg("Worker started")
for path := range fileChan {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker processing file")
err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: path,
Quality: quality,
Override: override,
Split: split,
Timeout: timeout,
})
if err != nil {
errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
log.Error().Int("worker_id", workerID).Str("file_path", path).Err(err).Msg("Worker encountered error")
errMutex.Lock()
errs = append(errs, fmt.Errorf("error processing file %s: %w", path, err))
errMutex.Unlock()
} else {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker completed file successfully")
}
}
}()
log.Debug().Int("worker_id", workerID).Msg("Worker finished")
}(i)
}
log.Debug().Int("worker_count", parallelism).Msg("All worker goroutines started")
// Walk the path and send files to the channel
err = filepath.WalkDir(path, func(path string, info os.DirEntry, err error) error {
log.Debug().Str("search_path", path).Msg("Starting filesystem walk for CBZ/CBR files")
err = filepath.WalkDir(path, func(filePath string, info os.DirEntry, err error) error {
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Error during filesystem walk")
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
fileChan <- path
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
log.Debug().Str("file_path", filePath).Str("file_name", fileName).Msg("Found CBZ/CBR file")
fileChan <- filePath
}
}
return nil
})
if err != nil {
log.Error().Str("search_path", path).Err(err).Msg("Filesystem walk failed")
return fmt.Errorf("error walking the path: %w", err)
}
log.Debug().Str("search_path", path).Msg("Filesystem walk completed")
close(fileChan) // Close the channel to signal workers to stop
wg.Wait() // Wait for all workers to finish
close(errorChan) // Close the error channel
var errs []error
for err := range errorChan {
errs = append(errs, err)
}
close(fileChan) // Close the channel to signal workers to stop
log.Debug().Msg("File channel closed, waiting for workers to complete")
wg.Wait() // Wait for all workers to finish
log.Debug().Msg("All workers completed")
if len(errs) > 0 {
log.Error().Int("error_count", len(errs)).Msg("Command completed with errors")
return fmt.Errorf("encountered errors: %v", errs)
}
log.Info().Str("search_path", path).Msg("Optimize command completed successfully")
return nil
}

View File

@@ -1,24 +1,26 @@
package commands
import (
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra"
"context"
"log"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra"
)
// MockConverter is a mock implementation of the Converter interface
type MockConverter struct{}
func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
func (m *MockConverter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
chapter.IsConverted = true
chapter.ConvertedTime = time.Now()
return chapter, nil
@@ -46,18 +48,21 @@ func TestConvertCbzCommand(t *testing.T) {
t.Fatalf("testdata directory not found")
}
// Copy sample CBZ files from testdata to the temporary directory
// Copy sample CBZ/CBR files from testdata to the temporary directory
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
return os.WriteFile(destPath, data, info.Mode())
}
return os.WriteFile(destPath, data, info.Mode())
}
return nil
})
@@ -78,8 +83,9 @@ func TestConvertCbzCommand(t *testing.T) {
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
@@ -87,36 +93,82 @@ func TestConvertCbzCommand(t *testing.T) {
t.Fatalf("Command execution failed: %v", err)
}
// Verify the results
// Track expected converted files for verification
expectedFiles := make(map[string]bool)
convertedFiles := make(map[string]bool)
// First pass: identify original files and expected converted filenames
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() || !strings.HasSuffix(info.Name(), "_converted.cbz") {
if info.IsDir() {
return nil
}
t.Logf("CBZ file found: %s", path)
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
if !strings.Contains(fileName, "_converted") {
// This is an original file, determine expected converted filename
baseName := strings.TrimSuffix(info.Name(), filepath.Ext(info.Name()))
expectedConverted := baseName + "_converted.cbz"
expectedFiles[expectedConverted] = false // false means not yet found
}
}
return nil
})
if err != nil {
t.Fatalf("Error identifying original files: %v", err)
}
// Load the converted chapter
chapter, err := cbz.LoadChapter(path)
// Second pass: verify converted files exist and are properly converted
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Check if the chapter is marked as converted
if !chapter.IsConverted {
t.Errorf("Chapter is not marked as converted: %s", path)
if info.IsDir() {
return nil
}
fileName := info.Name()
// Check if the ConvertedTime is set
if chapter.ConvertedTime.IsZero() {
t.Errorf("ConvertedTime is not set for chapter: %s", path)
// Check if this is a converted file (should only be .cbz, never .cbr)
if strings.HasSuffix(fileName, "_converted.cbz") {
convertedFiles[fileName] = true
expectedFiles[fileName] = true // Mark as found
t.Logf("Archive file found: %s", path)
// Load the converted chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return err
}
// Check if the chapter is marked as converted
if !chapter.IsConverted {
t.Errorf("Chapter is not marked as converted: %s", path)
}
// Check if the ConvertedTime is set
if chapter.ConvertedTime.IsZero() {
t.Errorf("ConvertedTime is not set for chapter: %s", path)
}
t.Logf("Archive file [%s] is converted: %s", path, chapter.ConvertedTime)
} else if strings.HasSuffix(fileName, "_converted.cbr") {
t.Errorf("Found incorrectly named converted file: %s (should be .cbz, not .cbr)", fileName)
}
t.Logf("CBZ file [%s] is converted: %s", path, chapter.ConvertedTime)
return nil
})
if err != nil {
t.Fatalf("Error verifying converted files: %v", err)
}
// Verify all expected files were found
for expectedFile, found := range expectedFiles {
if !found {
t.Errorf("Expected converted file not found: %s", expectedFile)
}
}
// Log summary
t.Logf("Found %d converted files", len(convertedFiles))
}

View File

@@ -2,13 +2,31 @@ package commands
import (
"fmt"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"os"
"path/filepath"
"runtime"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
)
// Map zerolog levels to their textual representations
var LogLevelIds = map[zerolog.Level][]string{
zerolog.PanicLevel: {"panic"},
zerolog.FatalLevel: {"fatal"},
zerolog.ErrorLevel: {"error"},
zerolog.WarnLevel: {"warn", "warning"},
zerolog.InfoLevel: {"info"},
zerolog.DebugLevel: {"debug"},
zerolog.TraceLevel: {"trace"},
}
// Global log level variable with default
var logLevel zerolog.Level = zerolog.InfoLevel
var rootCmd = &cobra.Command{
Use: "cbzconverter",
Short: "Convert CBZ files using a specified converter",
@@ -34,6 +52,33 @@ func init() {
viper.AddConfigPath(configFolder)
viper.SetEnvPrefix("CBZ")
viper.AutomaticEnv()
// Add log level flag (accepts zerolog levels: panic, fatal, error, warn, info, debug, trace)
ef := enumflag.New(&logLevel, "log", LogLevelIds, enumflag.EnumCaseInsensitive)
rootCmd.PersistentFlags().VarP(
ef,
"log", "l",
"Set log level; can be 'panic', 'fatal', 'error', 'warn', 'info', 'debug', or 'trace'")
ef.RegisterCompletion(rootCmd, "log", enumflag.Help[zerolog.Level]{
zerolog.PanicLevel: "Only log panic messages",
zerolog.FatalLevel: "Log fatal and panic messages",
zerolog.ErrorLevel: "Log error, fatal, and panic messages",
zerolog.WarnLevel: "Log warn, error, fatal, and panic messages",
zerolog.InfoLevel: "Log info, warn, error, fatal, and panic messages",
zerolog.DebugLevel: "Log debug, info, warn, error, fatal, and panic messages",
zerolog.TraceLevel: "Log all messages including trace",
})
// Add log level environment variable support
viper.BindEnv("log", "LOG_LEVEL")
viper.BindPFlag("log", rootCmd.PersistentFlags().Lookup("log"))
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
ConfigureLogging()
}
// Ensure the configuration directory exists
err := os.MkdirAll(configFolder, os.ModePerm)
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
@@ -53,10 +98,38 @@ func init() {
// Execute executes the root command.
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
log.Fatal().Err(err).Msg("Command execution failed")
}
}
func AddCommand(cmd *cobra.Command) {
rootCmd.AddCommand(cmd)
}
// ConfigureLogging sets up zerolog based on command-line flags and environment variables
func ConfigureLogging() {
// Start with default log level (info)
level := zerolog.InfoLevel
// Check LOG_LEVEL environment variable first
envLogLevel := viper.GetString("log")
if envLogLevel != "" {
if parsedLevel, err := zerolog.ParseLevel(envLogLevel); err == nil {
level = parsedLevel
}
}
// Command-line log flag takes precedence over environment variable
// The logLevel variable will be set by the flag parsing, so if it's different from default, use it
if logLevel != zerolog.InfoLevel {
level = logLevel
}
// Set the global log level
zerolog.SetGlobalLevel(level)
// Configure console writer for readable output
log.Logger = log.Output(zerolog.ConsoleWriter{
Out: os.Stderr,
NoColor: false,
})
}

View File

@@ -2,17 +2,18 @@ package commands
import (
"fmt"
"runtime"
"strings"
"sync"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/pablodz/inotifywaitgo/inotifywaitgo"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
"log"
"runtime"
"strings"
"sync"
)
func init() {
@@ -21,8 +22,8 @@ func init() {
}
command := &cobra.Command{
Use: "watch [folder]",
Short: "Watch a folder for new CBZ files",
Long: "Watch a folder for new CBZ files.\nIt will watch a folder for new CBZ files and optimize them.",
Short: "Watch a folder for new CBZ/CBR files",
Long: "Watch a folder for new CBZ/CBR files.\nIt will watch a folder for new CBZ/CBR files and optimize them.",
RunE: WatchCommand,
Args: cobra.ExactArgs(1),
}
@@ -32,12 +33,15 @@ func init() {
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
_ = viper.BindPFlag("quality", command.Flags().Lookup("quality"))
command.Flags().BoolP("override", "o", true, "Override the original CBZ files")
command.Flags().BoolP("override", "o", true, "Override the original CBZ/CBR files")
_ = viper.BindPFlag("override", command.Flags().Lookup("override"))
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
_ = viper.BindPFlag("split", command.Flags().Lookup("split"))
command.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
_ = viper.BindPFlag("timeout", command.Flags().Lookup("timeout"))
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
@@ -66,6 +70,8 @@ func WatchCommand(_ *cobra.Command, args []string) error {
split := viper.GetBool("split")
timeout := viper.GetDuration("timeout")
converterType := constant.FindConversionFormat(viper.GetString("format"))
chapterConverter, err := converter.Get(converterType)
if err != nil {
@@ -76,7 +82,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err)
}
log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s, split: %t]", path, override, quality, converterType.String(), split)
log.Info().Str("path", path).Bool("override", override).Uint8("quality", quality).Str("format", converterType.String()).Bool("split", split).Msg("Watching directory")
events := make(chan inotifywaitgo.FileEvent)
errors := make(chan error)
@@ -105,9 +111,10 @@ func WatchCommand(_ *cobra.Command, args []string) error {
go func() {
defer wg.Done()
for event := range events {
log.Printf("[Event]%s, %v\n", event.Filename, event.Events)
log.Debug().Str("file", event.Filename).Interface("events", event.Events).Msg("File event")
if !strings.HasSuffix(strings.ToLower(event.Filename), ".cbz") {
filename := strings.ToLower(event.Filename)
if !strings.HasSuffix(filename, ".cbz") && !strings.HasSuffix(filename, ".cbr") {
continue
}
@@ -120,6 +127,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
Quality: quality,
Override: override,
Split: split,
Timeout: timeout,
})
if err != nil {
errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err)
@@ -135,7 +143,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
go func() {
defer wg.Done()
for err := range errors {
log.Printf("Error: %v\n", err)
log.Error().Err(err).Msg("Watch error")
}
}()

View File

@@ -12,5 +12,6 @@ var (
func main() {
commands.SetVersionInfo(version, commit, date)
commands.Execute()
}

19
cmd/encoder-setup/main.go Normal file
View File

@@ -0,0 +1,19 @@
//go:build encoder_setup
// +build encoder_setup
package main
import (
"fmt"
"log"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/webp"
)
func main() {
fmt.Println("Setting up WebP encoder for tests...")
if err := webp.PrepareEncoder(); err != nil {
log.Fatalf("Failed to prepare WebP encoder: %v", err)
}
fmt.Println("WebP encoder setup complete.")
}

75
go.mod
View File

@@ -1,54 +1,57 @@
module github.com/belphemur/CBZOptimizer/v2
go 1.24
toolchain go1.24.0
go 1.25
require (
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de
github.com/belphemur/go-webpbin/v2 v2.0.0
github.com/belphemur/go-webpbin/v2 v2.1.0
github.com/mholt/archives v0.1.3
github.com/oliamb/cutter v0.2.2
github.com/pablodz/inotifywaitgo v0.0.9
github.com/samber/lo v1.49.1
github.com/spf13/cobra v1.8.1
github.com/spf13/viper v1.19.0
github.com/stretchr/testify v1.10.0
github.com/rs/zerolog v1.34.0
github.com/samber/lo v1.51.0
github.com/spf13/cobra v1.10.1
github.com/spf13/viper v1.21.0
github.com/stretchr/testify v1.11.1
github.com/thediveo/enumflag/v2 v2.0.7
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
golang.org/x/image v0.24.0
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b
golang.org/x/image v0.31.0
)
require (
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 // indirect
github.com/STARRY-S/zip v0.2.3 // indirect
github.com/andybalholm/brotli v1.2.0 // indirect
github.com/belphemur/go-binwrapper v1.0.0 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/sevenzip v1.6.1 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dsnet/compress v0.0.1 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jfrog/archiver/v3 v3.6.1 // indirect
github.com/klauspost/compress v1.17.9 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.1 // indirect
github.com/nwaples/rardecode/v2 v2.1.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/sagikazarmark/locafero v0.11.0 // indirect
github.com/sorairolake/lzip-go v0.3.8 // indirect
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.10.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.22.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
github.com/ulikunitz/xz v0.5.15 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/text v0.29.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

412
go.sum
View File

@@ -1,59 +1,137 @@
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/STARRY-S/zip v0.2.3 h1:luE4dMvRPDOWQdeDdUxUoZkzUIpTccdKdhHHsQJ1fm4=
github.com/STARRY-S/zip v0.2.3/go.mod h1:lqJ9JdeRipyOQJrYSOtpNAiaesFO6zVDsE8GIGFaoSk=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 h1:EzKgPYK90TyAOmytK7bvapqlkG/m7KWKK28mOAdQEaM=
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0/go.mod h1:s2Dv+CfgVbNM9ucqvE5qCCC0AkI1PE2OZb7N8PPlOh4=
github.com/belphemur/go-webpbin/v2 v2.0.0 h1:Do0TTTJ6cS6lgi+R67De+jXRYe+ZOwxFqTiFggyX5p8=
github.com/belphemur/go-webpbin/v2 v2.0.0/go.mod h1:VIHXZQaIwaIYDn08w0qeJFPj1MuYt5pyJnkQALPYc5g=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/belphemur/go-binwrapper v1.0.0 h1:kXNRqO3vrqex4O0Q1pfD9w5kKwrQT1Mg9CJOd/IWbtI=
github.com/belphemur/go-binwrapper v1.0.0/go.mod h1:PNID1xFdXpkAwjr7gCidIiC/JA8tpYl3zzNSIK9lCjc=
github.com/belphemur/go-webpbin/v2 v2.1.0 h1:SvdjLz/9wb7kqD7jYDjlbTA2xRwwQRo3L/a5Ee+Br5E=
github.com/belphemur/go-webpbin/v2 v2.1.0/go.mod h1:jRdjIZYdSkW6DM9pfiH2fjSYgX/jshRooDI03f6o658=
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
github.com/bodgit/sevenzip v1.6.1 h1:kikg2pUMYC9ljU7W9SaqHXhym5HyKm8/M/jd31fYan4=
github.com/bodgit/sevenzip v1.6.1/go.mod h1:GVoYQbEVbOGT8n2pfqCIMRUaRjQ8F9oSqoBEqZh5fQ8=
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg=
github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jfrog/archiver/v3 v3.6.1 h1:LOxnkw9pOn45DzCbZNFV6K0+6dCsQ0L8mR3ZcujO5eI=
github.com/jfrog/archiver/v3 v3.6.1/go.mod h1:VgR+3WZS4N+i9FaDwLZbq+jeU4B4zctXL+gL4EMzfLw=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/mholt/archives v0.1.3 h1:aEAaOtNra78G+TvV5ohmXrJOAzf++dIlYeDW3N9q458=
github.com/mholt/archives v0.1.3/go.mod h1:LUCGp++/IbV/I0Xq4SzcIR6uwgeh2yjnQWamjRQfLTU=
github.com/mikelolasagasti/xz v1.0.1 h1:Q2F2jX0RYJUG3+WsM+FJknv+6eVjsjXNDV0KJXZzkD0=
github.com/mikelolasagasti/xz v1.0.1/go.mod h1:muAirjiOUxPRXwm9HdDtB3uoRPrGnL85XHtokL9Hcgc=
github.com/minio/minlz v1.0.1 h1:OUZUzXcib8diiX+JYxyRLIdomyZYzHct6EShOKtQY2A=
github.com/minio/minlz v1.0.1/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
github.com/nwaples/rardecode/v2 v2.1.1 h1:OJaYalXdliBUXPmC8CZGQ7oZDxzX1/5mQmgn0/GASew=
github.com/nwaples/rardecode/v2 v2.1.1/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
@@ -62,82 +140,258 @@ github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/pablodz/inotifywaitgo v0.0.9 h1:njquRbBU7fuwIe5rEvtaniVBjwWzcpdUVptSgzFqZsw=
github.com/pablodz/inotifywaitgo v0.0.9/go.mod h1:hAfx2oN+WKg8miwUKPs52trySpPignlRBRxWcXVHku0=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc=
github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik=
github.com/samber/lo v1.51.0 h1:kysRYLbHy/MB7kQZf5DSN50JHmMsNEdeY24VzJFu7wI=
github.com/samber/lo v1.51.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0=
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/thediveo/enumflag/v2 v2.0.7 h1:uxXDU+rTel7Hg4X0xdqICpG9rzuI/mzLAEYXWLflOfs=
github.com/thediveo/enumflag/v2 v2.0.7/go.mod h1:bWlnNvTJuUK+huyzf3WECFLy557Ttlc+yk3o+BPs0EA=
github.com/thediveo/success v1.0.2 h1:w+r3RbSjLmd7oiNnlCblfGqItcsaShcuAorRVh/+0xk=
github.com/thediveo/success v1.0.2/go.mod h1:hdPJB77k70w764lh8uLUZgNhgeTl3DYeZ4d4bwMO2CU=
github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac h1:l5+whBCLH3iH2ZNHYLbAe58bo7yrN4mVcnkHDYz5vvs=
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac/go.mod h1:hH+7mtFmImwwcMvScyxUhjuVHR3HGaDPMn9rMSUUbxo=
golang.org/x/image v0.24.0 h1:AN7zRgVsbvmTfNyqIbbOraYL8mSwcKncEj8ofjgzcMQ=
golang.org/x/image v0.24.0/go.mod h1:4b/ITuLfqYq1hqZcjofwctIhi7sZh2WaCjvsBNjjya8=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY=
github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b h1:DXr+pvt3nC887026GRP39Ej11UATqWDmWuS99x26cD0=
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.31.0 h1:mLChjE2MV6g1S7oqbXC0/UcKijjm5fnJLUYKIYrLESA=
golang.org/x/image v0.31.0/go.mod h1:R9ec5Lcp96v9FTF+ajwaH3uGxPH4fKfHHAVbUILxghA=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=

View File

@@ -3,28 +3,42 @@ package cbz
import (
"archive/zip"
"fmt"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"os"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/rs/zerolog/log"
)
func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
log.Debug().
Str("chapter_file", chapter.FilePath).
Str("output_path", outputFilePath).
Int("page_count", len(chapter.Pages)).
Bool("is_converted", chapter.IsConverted).
Msg("Starting CBZ file creation")
// Create a new ZIP file
log.Debug().Str("output_path", outputFilePath).Msg("Creating output CBZ file")
zipFile, err := os.Create(outputFilePath)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create CBZ file")
return fmt.Errorf("failed to create .cbz file: %w", err)
}
defer errs.Capture(&err, zipFile.Close, "failed to close .cbz file")
// Create a new ZIP writer
log.Debug().Str("output_path", outputFilePath).Msg("Creating ZIP writer")
zipWriter := zip.NewWriter(zipFile)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create ZIP writer")
return err
}
defer errs.Capture(&err, zipWriter.Close, "failed to close .cbz writer")
// Write each page to the ZIP archive
log.Debug().Str("output_path", outputFilePath).Int("pages_to_write", len(chapter.Pages)).Msg("Writing pages to CBZ archive")
for _, page := range chapter.Pages {
// Construct the file name for the page
var fileName string
@@ -36,6 +50,15 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
fileName = fmt.Sprintf("%04d%s", page.Index, page.Extension)
}
log.Debug().
Str("output_path", outputFilePath).
Uint16("page_index", page.Index).
Bool("is_splitted", page.IsSplitted).
Uint16("split_part", page.SplitPartIndex).
Str("filename", fileName).
Int("size", len(page.Contents.Bytes())).
Msg("Writing page to CBZ archive")
// Create a new file in the ZIP archive
fileWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: fileName,
@@ -43,41 +66,58 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
Modified: time.Now(),
})
if err != nil {
log.Error().Str("output_path", outputFilePath).Str("filename", fileName).Err(err).Msg("Failed to create file in CBZ archive")
return fmt.Errorf("failed to create file in .cbz: %w", err)
}
// Write the page contents to the file
_, err = fileWriter.Write(page.Contents.Bytes())
bytesWritten, err := fileWriter.Write(page.Contents.Bytes())
if err != nil {
log.Error().Str("output_path", outputFilePath).Str("filename", fileName).Err(err).Msg("Failed to write page contents")
return fmt.Errorf("failed to write page contents: %w", err)
}
log.Debug().
Str("output_path", outputFilePath).
Str("filename", fileName).
Int("bytes_written", bytesWritten).
Msg("Page written successfully")
}
// Optionally, write the ComicInfo.xml file if present
if chapter.ComicInfoXml != "" {
log.Debug().Str("output_path", outputFilePath).Int("xml_size", len(chapter.ComicInfoXml)).Msg("Writing ComicInfo.xml to CBZ archive")
comicInfoWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "ComicInfo.xml",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create ComicInfo.xml in CBZ archive")
return fmt.Errorf("failed to create ComicInfo.xml in .cbz: %w", err)
}
_, err = comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
bytesWritten, err := comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to write ComicInfo.xml contents")
return fmt.Errorf("failed to write ComicInfo.xml contents: %w", err)
}
log.Debug().Str("output_path", outputFilePath).Int("bytes_written", bytesWritten).Msg("ComicInfo.xml written successfully")
} else {
log.Debug().Str("output_path", outputFilePath).Msg("No ComicInfo.xml to write")
}
if chapter.IsConverted {
convertedString := fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)
log.Debug().Str("output_path", outputFilePath).Str("comment", convertedString).Msg("Setting CBZ comment for converted chapter")
err = zipWriter.SetComment(convertedString)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to write CBZ comment")
return fmt.Errorf("failed to write comment: %w", err)
}
log.Debug().Str("output_path", outputFilePath).Msg("CBZ comment set successfully")
}
log.Debug().Str("output_path", outputFilePath).Msg("CBZ file creation completed successfully")
return nil
}

View File

@@ -4,80 +4,127 @@ import (
"archive/zip"
"bufio"
"bytes"
"context"
"fmt"
"io"
"io/fs"
"path/filepath"
"strings"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"io"
"path/filepath"
"strings"
"github.com/mholt/archives"
"github.com/rs/zerolog/log"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
// Open the .cbz file
r, err := zip.OpenReader(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open .cbz file: %w", err)
}
defer errs.Capture(&err, r.Close, "failed to close opened .cbz file")
log.Debug().Str("file_path", filePath).Msg("Starting chapter loading")
ctx := context.Background()
chapter := &manga.Chapter{
FilePath: filePath,
}
// Check for comment
if r.Comment != "" {
scanner := bufio.NewScanner(strings.NewReader(r.Comment))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err == nil {
chapter.IsConverted = true
// First, try to read the comment using zip.OpenReader for CBZ files
if strings.ToLower(filepath.Ext(filePath)) == ".cbz" {
log.Debug().Str("file_path", filePath).Msg("Checking CBZ comment for conversion status")
r, err := zip.OpenReader(filePath)
if err == nil {
defer errs.Capture(&err, r.Close, "failed to close zip reader for comment")
// Check for comment
if r.Comment != "" {
log.Debug().Str("file_path", filePath).Str("comment", r.Comment).Msg("Found CBZ comment")
scanner := bufio.NewScanner(strings.NewReader(r.Comment))
if scanner.Scan() {
convertedTime := scanner.Text()
log.Debug().Str("file_path", filePath).Str("converted_time", convertedTime).Msg("Parsing conversion timestamp")
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err == nil {
chapter.IsConverted = true
log.Debug().Str("file_path", filePath).Time("converted_time", chapter.ConvertedTime).Msg("Chapter marked as previously converted")
} else {
log.Debug().Str("file_path", filePath).Err(err).Msg("Failed to parse conversion timestamp")
}
}
} else {
log.Debug().Str("file_path", filePath).Msg("No CBZ comment found")
}
} else {
log.Debug().Str("file_path", filePath).Err(err).Msg("Failed to open CBZ file for comment reading")
}
// Continue even if comment reading fails
}
for _, f := range r.File {
if f.FileInfo().IsDir() {
continue
}
err := func() error {
// Open the file inside the zip
rc, err := f.Open()
if err != nil {
return fmt.Errorf("failed to open file inside .cbz: %w", err)
}
// Open the archive using archives library for file operations
log.Debug().Str("file_path", filePath).Msg("Opening archive file system")
fsys, err := archives.FileSystem(ctx, filePath, nil)
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed to open archive file system")
return nil, fmt.Errorf("failed to open archive file: %w", err)
}
defer errs.Capture(&err, rc.Close, "failed to close file inside .cbz")
// Walk through all files in the filesystem
log.Debug().Str("file_path", filePath).Msg("Starting filesystem walk")
err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
return func() error {
// Open the file
file, err := fsys.Open(path)
if err != nil {
return fmt.Errorf("failed to open file %s: %w", path, err)
}
defer errs.Capture(&err, file.Close, fmt.Sprintf("failed to close file %s", path))
// Determine the file extension
ext := strings.ToLower(filepath.Ext(f.Name))
ext := strings.ToLower(filepath.Ext(path))
fileName := strings.ToLower(filepath.Base(path))
if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
if ext == ".xml" && fileName == "comicinfo.xml" {
log.Debug().Str("file_path", filePath).Str("archive_file", path).Msg("Found ComicInfo.xml")
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(rc)
xmlContent, err := io.ReadAll(file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read ComicInfo.xml")
return fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
} else if !chapter.IsConverted && ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" {
textContent, err := io.ReadAll(rc)
log.Debug().Str("file_path", filePath).Int("xml_size", len(xmlContent)).Msg("ComicInfo.xml loaded")
} else if !chapter.IsConverted && ext == ".txt" && fileName == "converted.txt" {
log.Debug().Str("file_path", filePath).Str("archive_file", path).Msg("Found converted.txt")
textContent, err := io.ReadAll(file)
if err != nil {
return fmt.Errorf("failed to read Converted.xml content: %w", err)
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read converted.txt")
return fmt.Errorf("failed to read converted.txt content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
log.Debug().Str("file_path", filePath).Str("converted_time", convertedTime).Msg("Parsing converted.txt timestamp")
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed to parse converted time from converted.txt")
return fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
log.Debug().Str("file_path", filePath).Time("converted_time", chapter.ConvertedTime).Msg("Chapter marked as converted from converted.txt")
}
} else {
// Read the file contents for page
log.Debug().Str("file_path", filePath).Str("archive_file", path).Str("extension", ext).Msg("Processing page file")
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
bytesCopied, err := io.Copy(buf, file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read page file contents")
return fmt.Errorf("failed to read file contents: %w", err)
}
@@ -92,13 +139,28 @@ func LoadChapter(filePath string) (*manga.Chapter, error) {
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
log.Debug().
Str("file_path", filePath).
Str("archive_file", path).
Uint16("page_index", page.Index).
Int64("bytes_read", bytesCopied).
Msg("Page loaded successfully")
}
return nil
}()
if err != nil {
return nil, err
}
})
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed during filesystem walk")
return nil, err
}
log.Debug().
Str("file_path", filePath).
Int("pages_loaded", len(chapter.Pages)).
Bool("is_converted", chapter.IsConverted).
Bool("has_comic_info", chapter.ComicInfoXml != "").
Msg("Chapter loading completed successfully")
return chapter, nil
}

View File

@@ -16,8 +16,15 @@ func TestLoadChapter(t *testing.T) {
testCases := []testCase{
{
name: "Original Chapter",
filePath: "../../testdata/Chapter 1.cbz",
name: "Original Chapter CBZ",
filePath: "../../testdata/Chapter 128.cbz",
expectedPages: 14,
expectedSeries: "<Series>The Knight King Who Returned with a God</Series>",
expectedConversion: false,
},
{
name: "Original Chapter CBR",
filePath: "../../testdata/Chapter 1.cbr",
expectedPages: 16,
expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: false,

View File

@@ -1,13 +1,18 @@
package utils
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
errors2 "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"log"
"strings"
"github.com/rs/zerolog/log"
)
type OptimizeOptions struct {
@@ -16,52 +21,150 @@ type OptimizeOptions struct {
Quality uint8
Override bool
Split bool
Timeout time.Duration
}
// Optimize optimizes a CBZ file using the specified converter.
// Optimize optimizes a CBZ/CBR file using the specified converter.
func Optimize(options *OptimizeOptions) error {
log.Printf("Processing file: %s\n", options.Path)
log.Info().Str("file", options.Path).Msg("Processing file")
log.Debug().
Str("file", options.Path).
Uint8("quality", options.Quality).
Bool("override", options.Override).
Bool("split", options.Split).
Msg("Optimization parameters")
// Load the chapter
log.Debug().Str("file", options.Path).Msg("Loading chapter")
chapter, err := cbz.LoadChapter(options.Path)
if err != nil {
log.Error().Str("file", options.Path).Err(err).Msg("Failed to load chapter")
return fmt.Errorf("failed to load chapter: %v", err)
}
log.Debug().
Str("file", options.Path).
Int("pages", len(chapter.Pages)).
Bool("converted", chapter.IsConverted).
Msg("Chapter loaded successfully")
if chapter.IsConverted {
log.Printf("Chapter already converted: %s", options.Path)
log.Info().Str("file", options.Path).Msg("Chapter already converted")
return nil
}
// Convert the chapter
convertedChapter, err := options.ChapterConverter.ConvertChapter(chapter, options.Quality, options.Split, func(msg string, current uint32, total uint32) {
log.Debug().
Str("file", chapter.FilePath).
Int("pages", len(chapter.Pages)).
Uint8("quality", options.Quality).
Bool("split", options.Split).
Msg("Starting chapter conversion")
var ctx context.Context
if options.Timeout > 0 {
var cancel context.CancelFunc
ctx, cancel = context.WithTimeout(context.Background(), options.Timeout)
defer cancel()
log.Debug().Str("file", chapter.FilePath).Dur("timeout", options.Timeout).Msg("Applying timeout to chapter conversion")
} else {
ctx = context.Background()
}
convertedChapter, err := options.ChapterConverter.ConvertChapter(ctx, chapter, options.Quality, options.Split, func(msg string, current uint32, total uint32) {
if current%10 == 0 || current == total {
log.Printf("[%s] Converting: %d/%d", chapter.FilePath, current, total)
log.Info().Str("file", chapter.FilePath).Uint32("current", current).Uint32("total", total).Msg("Converting")
} else {
log.Debug().Str("file", chapter.FilePath).Uint32("current", current).Uint32("total", total).Msg("Converting page")
}
})
if err != nil {
var pageIgnoredError *errors2.PageIgnoredError
if !errors.As(err, &pageIgnoredError) {
if errors.As(err, &pageIgnoredError) {
log.Debug().Str("file", chapter.FilePath).Err(err).Msg("Page conversion error (non-fatal)")
} else {
log.Error().Str("file", chapter.FilePath).Err(err).Msg("Chapter conversion failed")
return fmt.Errorf("failed to convert chapter: %v", err)
}
}
if convertedChapter == nil {
log.Error().Str("file", chapter.FilePath).Msg("Conversion returned nil chapter")
return fmt.Errorf("failed to convert chapter")
}
log.Debug().
Str("file", chapter.FilePath).
Int("original_pages", len(chapter.Pages)).
Int("converted_pages", len(convertedChapter.Pages)).
Msg("Chapter conversion completed")
convertedChapter.SetConverted()
// Write the converted chapter back to a CBZ file
// Determine output path and handle CBR override logic
log.Debug().
Str("input_path", options.Path).
Bool("override", options.Override).
Msg("Determining output path")
outputPath := options.Path
if !options.Override {
outputPath = strings.TrimSuffix(options.Path, ".cbz") + "_converted.cbz"
}
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
return fmt.Errorf("failed to write converted chapter: %v", err)
originalPath := options.Path
isCbrOverride := false
if options.Override {
// For override mode, check if it's a CBR file that needs to be converted to CBZ
pathLower := strings.ToLower(options.Path)
if strings.HasSuffix(pathLower, ".cbr") {
// Convert CBR to CBZ: change extension and mark for deletion
outputPath = strings.TrimSuffix(options.Path, filepath.Ext(options.Path)) + ".cbz"
isCbrOverride = true
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("CBR to CBZ conversion: will delete original after conversion")
} else {
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("CBZ override mode: will overwrite original file")
}
} else {
// Handle both .cbz and .cbr files - strip the extension and add _converted.cbz
pathLower := strings.ToLower(options.Path)
if strings.HasSuffix(pathLower, ".cbz") {
outputPath = strings.TrimSuffix(options.Path, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(pathLower, ".cbr") {
outputPath = strings.TrimSuffix(options.Path, ".cbr") + "_converted.cbz"
} else {
// Fallback for other extensions - just add _converted.cbz
outputPath = options.Path + "_converted.cbz"
}
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("Non-override mode: creating converted file alongside original")
}
log.Printf("Converted file written to: %s\n", outputPath)
// Write the converted chapter to CBZ file
log.Debug().Str("output_path", outputPath).Msg("Writing converted chapter to CBZ file")
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
log.Error().Str("output_path", outputPath).Err(err).Msg("Failed to write converted chapter")
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Debug().Str("output_path", outputPath).Msg("Successfully wrote converted chapter")
// If we're overriding a CBR file, delete the original CBR after successful write
if isCbrOverride {
log.Debug().Str("file", originalPath).Msg("Attempting to delete original CBR file")
err = os.Remove(originalPath)
if err != nil {
// Log the error but don't fail the operation since conversion succeeded
log.Warn().Str("file", originalPath).Err(err).Msg("Failed to delete original CBR file")
} else {
log.Info().Str("file", originalPath).Msg("Deleted original CBR file")
}
}
log.Info().Str("output", outputPath).Msg("Converted file written")
return nil
}

View File

@@ -0,0 +1,402 @@
package utils
import (
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
)
func TestOptimizeIntegration(t *testing.T) {
// Skip integration tests if no libwebp is available or testdata doesn't exist
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if testdata directory exists
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping integration tests")
}
// Create temporary directory for tests
tempDir, err := os.MkdirTemp("", "test_optimize_integration")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
// Collect all test files (CBZ/CBR, excluding converted ones)
var testFiles []string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if (strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr")) && !strings.Contains(fileName, "converted") {
testFiles = append(testFiles, path)
}
}
return nil
})
if err != nil {
t.Fatal(err)
}
if len(testFiles) == 0 {
t.Skip("No test files found")
}
tests := []struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
}{}
// Generate test cases for each available test file
for _, testFile := range testFiles {
baseName := strings.TrimSuffix(filepath.Base(testFile), filepath.Ext(testFile))
isCBR := strings.HasSuffix(strings.ToLower(testFile), ".cbr")
// Test without override
tests = append(tests, struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
}{
name: fmt.Sprintf("%s file without override", strings.ToUpper(filepath.Ext(testFile)[1:])),
inputFile: testFile,
override: false,
expectedOutput: filepath.Join(filepath.Dir(testFile), baseName+"_converted.cbz"),
shouldDelete: false,
expectError: false,
})
// Test with override
if isCBR {
tests = append(tests, struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
}{
name: fmt.Sprintf("%s file with override", strings.ToUpper(filepath.Ext(testFile)[1:])),
inputFile: testFile,
override: true,
expectedOutput: filepath.Join(filepath.Dir(testFile), baseName+".cbz"),
shouldDelete: true,
expectError: false,
})
}
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a copy of the input file for this test
testFile := filepath.Join(tempDir, tt.name+"_"+filepath.Base(tt.inputFile))
data, err := os.ReadFile(tt.inputFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(testFile, data, 0644)
if err != nil {
t.Fatal(err)
}
// Setup options with real converter
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: testFile,
Quality: 85,
Override: tt.override,
Split: false,
Timeout: 0,
}
// Run optimization
err = Optimize(options)
if tt.expectError {
if err == nil {
t.Error("Expected error but got none")
}
return
}
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Determine expected output path for this test
expectedOutput := tt.expectedOutput
if tt.override && strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, filepath.Ext(testFile)) + ".cbz"
} else if !tt.override {
if strings.HasSuffix(strings.ToLower(testFile), ".cbz") {
expectedOutput = strings.TrimSuffix(testFile, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, ".cbr") + "_converted.cbz"
}
} else {
expectedOutput = testFile
}
// Verify output file exists
if _, err := os.Stat(expectedOutput); os.IsNotExist(err) {
t.Errorf("Expected output file not found: %s", expectedOutput)
}
// Verify output is a valid CBZ with converted content
chapter, err := cbz.LoadChapter(expectedOutput)
if err != nil {
t.Errorf("Failed to load converted chapter: %v", err)
}
if !chapter.IsConverted {
t.Error("Chapter is not marked as converted")
}
// Verify all pages are in WebP format (real conversion indicator)
for i, page := range chapter.Pages {
if page.Extension != ".webp" {
t.Errorf("Page %d is not converted to WebP format (got: %s)", i, page.Extension)
}
}
// Verify original file deletion for CBR override
if tt.shouldDelete {
if _, err := os.Stat(testFile); !os.IsNotExist(err) {
t.Error("Original CBR file should have been deleted but still exists")
}
} else {
// Verify original file still exists (unless it's the same as output)
if testFile != expectedOutput {
if _, err := os.Stat(testFile); os.IsNotExist(err) {
t.Error("Original file should not have been deleted")
}
}
}
// Clean up output file
os.Remove(expectedOutput)
})
}
}
func TestOptimizeIntegration_AlreadyConverted(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_integration_converted")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Use a converted test file
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping integration tests")
}
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
var convertedFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.Contains(strings.ToLower(info.Name()), "converted") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
convertedFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if convertedFile == "" {
t.Skip("No converted test file found")
}
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: convertedFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 30 * time.Second,
}
err = Optimize(options)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Should not create a new file since it's already converted
expectedOutput := strings.TrimSuffix(convertedFile, ".cbz") + "_converted.cbz"
if _, err := os.Stat(expectedOutput); !os.IsNotExist(err) {
t.Error("Should not have created a new converted file for already converted chapter")
}
}
func TestOptimizeIntegration_InvalidFile(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: "/nonexistent/file.cbz",
Quality: 85,
Override: false,
Split: false,
Timeout: 30 * time.Second,
}
err = Optimize(options)
if err == nil {
t.Error("Expected error for nonexistent file")
}
}
func TestOptimizeIntegration_Timeout(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_integration_timeout")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping integration tests")
}
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
var cbzFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Test with short timeout to force timeout during conversion
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: cbzFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 10 * time.Millisecond, // Very short timeout to force timeout
}
err = Optimize(options)
if err == nil {
t.Error("Expected timeout error but got none")
}
// Check that the error contains timeout information
if err != nil && !strings.Contains(err.Error(), "context deadline exceeded") && !strings.Contains(err.Error(), "timeout") {
t.Errorf("Expected timeout error message, got: %v", err)
}
}

View File

@@ -0,0 +1,424 @@
package utils
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
)
// MockConverter for testing
type MockConverter struct {
shouldFail bool
}
func (m *MockConverter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
if m.shouldFail {
return nil, &MockError{message: "mock conversion error"}
}
// Check if context is already cancelled
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
// Simulate some work that can be interrupted by context cancellation
for i := 0; i < len(chapter.Pages); i++ {
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
// Simulate processing time
time.Sleep(100 * time.Microsecond)
if progress != nil {
progress(fmt.Sprintf("Converting page %d/%d", i+1, len(chapter.Pages)), uint32(i+1), uint32(len(chapter.Pages)))
}
}
}
// Create a copy of the chapter to simulate conversion
converted := &manga.Chapter{
FilePath: chapter.FilePath,
Pages: chapter.Pages,
ComicInfoXml: chapter.ComicInfoXml,
IsConverted: true,
ConvertedTime: time.Now(),
}
return converted, nil
}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error {
if m.shouldFail {
return &MockError{message: "mock prepare error"}
}
return nil
}
type MockError struct {
message string
}
func (e *MockError) Error() string {
return e.message
}
func TestOptimize(t *testing.T) {
// Create temporary directory for tests
tempDir, err := os.MkdirTemp("", "test_optimize")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
// Copy sample files
var cbzFile, cbrFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") && !strings.Contains(fileName, "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
} else if strings.HasSuffix(fileName, ".cbr") {
destPath := filepath.Join(tempDir, "test.cbr")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbrFile = destPath
}
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Create a CBR file by copying the CBZ file if no CBR exists
if cbrFile == "" {
cbrFile = filepath.Join(tempDir, "test.cbr")
data, err := os.ReadFile(cbzFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(cbrFile, data, 0644)
if err != nil {
t.Fatal(err)
}
}
tests := []struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
mockFail bool
}{
{
name: "CBZ file without override",
inputFile: cbzFile,
override: false,
expectedOutput: strings.TrimSuffix(cbzFile, ".cbz") + "_converted.cbz",
shouldDelete: false,
expectError: false,
},
{
name: "CBZ file with override",
inputFile: cbzFile,
override: true,
expectedOutput: cbzFile,
shouldDelete: false,
expectError: false,
},
{
name: "CBR file without override",
inputFile: cbrFile,
override: false,
expectedOutput: strings.TrimSuffix(cbrFile, ".cbr") + "_converted.cbz",
shouldDelete: false,
expectError: false,
},
{
name: "CBR file with override",
inputFile: cbrFile,
override: true,
expectedOutput: strings.TrimSuffix(cbrFile, ".cbr") + ".cbz",
shouldDelete: true,
expectError: false,
},
{
name: "Converter failure",
inputFile: cbzFile,
override: false,
expectError: true,
mockFail: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a copy of the input file for this test
testFile := filepath.Join(tempDir, tt.name+"_"+filepath.Base(tt.inputFile))
data, err := os.ReadFile(tt.inputFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(testFile, data, 0644)
if err != nil {
t.Fatal(err)
}
// Setup options
options := &OptimizeOptions{
ChapterConverter: &MockConverter{shouldFail: tt.mockFail},
Path: testFile,
Quality: 85,
Override: tt.override,
Split: false,
Timeout: 0,
}
// Run optimization
err = Optimize(options)
if tt.expectError {
if err == nil {
t.Error("Expected error but got none")
}
return
}
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Determine expected output path for this test
expectedOutput := tt.expectedOutput
if tt.override && strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, filepath.Ext(testFile)) + ".cbz"
} else if !tt.override {
if strings.HasSuffix(strings.ToLower(testFile), ".cbz") {
expectedOutput = strings.TrimSuffix(testFile, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, ".cbr") + "_converted.cbz"
}
} else {
expectedOutput = testFile
}
// Verify output file exists
if _, err := os.Stat(expectedOutput); os.IsNotExist(err) {
t.Errorf("Expected output file not found: %s", expectedOutput)
}
// Verify output is a valid CBZ
chapter, err := cbz.LoadChapter(expectedOutput)
if err != nil {
t.Errorf("Failed to load converted chapter: %v", err)
}
if !chapter.IsConverted {
t.Error("Chapter is not marked as converted")
}
// Verify original file deletion for CBR override
if tt.shouldDelete {
if _, err := os.Stat(testFile); !os.IsNotExist(err) {
t.Error("Original CBR file should have been deleted but still exists")
}
} else {
// Verify original file still exists (unless it's the same as output)
if testFile != expectedOutput {
if _, err := os.Stat(testFile); os.IsNotExist(err) {
t.Error("Original file should not have been deleted")
}
}
}
// Clean up output file
os.Remove(expectedOutput)
})
}
}
func TestOptimize_AlreadyConverted(t *testing.T) {
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_converted")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Use a converted test file
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
var convertedFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.Contains(strings.ToLower(info.Name()), "converted") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
convertedFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if convertedFile == "" {
t.Skip("No converted test file found")
}
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: convertedFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 0,
}
err = Optimize(options)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Should not create a new file since it's already converted
expectedOutput := strings.TrimSuffix(convertedFile, ".cbz") + "_converted.cbz"
if _, err := os.Stat(expectedOutput); !os.IsNotExist(err) {
t.Error("Should not have created a new converted file for already converted chapter")
}
}
func TestOptimize_InvalidFile(t *testing.T) {
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: "/nonexistent/file.cbz",
Quality: 85,
Override: false,
Split: false,
Timeout: 0,
}
err := Optimize(options)
if err == nil {
t.Error("Expected error for nonexistent file")
}
}
func TestOptimize_Timeout(t *testing.T) {
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_timeout")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
var cbzFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Test with short timeout (500 microseconds) to force timeout during conversion
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: cbzFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 500 * time.Microsecond, // 500 microseconds - should timeout during page processing
}
err = Optimize(options)
if err == nil {
t.Error("Expected timeout error but got none")
}
// Check that the error contains timeout information
if !strings.Contains(err.Error(), "context deadline exceeded") {
t.Errorf("Expected timeout error message, got: %v", err)
}
}

View File

@@ -1,12 +1,14 @@
package converter
import (
"context"
"fmt"
"strings"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/webp"
"github.com/samber/lo"
"strings"
)
type Converter interface {
@@ -15,7 +17,7 @@ type Converter interface {
// ConvertChapter converts a manga chapter to the specified format.
//
// Returns partial success where some pages are converted and some are not.
ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
PrepareConverter() error
}

View File

@@ -2,59 +2,56 @@ package converter
import (
"bytes"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"golang.org/x/exp/slices"
"context"
"image"
"image/jpeg"
"os"
"testing"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
split bool
expectFailure []constant.ConversionFormat
expectPartialSuccess []constant.ConversionFormat
name string
genTestChapter func(path string, isSplit bool) (*manga.Chapter, []string, error)
split bool
expectError bool
}{
{
name: "All split pages",
genTestChapter: genHugePage,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
name: "All split pages",
genTestChapter: genHugePage,
split: true,
},
{
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectFailure: []constant.ConversionFormat{constant.WebP},
expectPartialSuccess: []constant.ConversionFormat{},
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectError: true,
},
{
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
},
{
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{constant.WebP},
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectError: true,
},
{
name: "Two corrupted pages",
genTestChapter: genTwoCorrupted,
split: false,
expectError: true,
},
}
// Load test genTestChapter from testdata
@@ -72,7 +69,7 @@ func TestConvertChapter(t *testing.T) {
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
chapter, expectedExtensions, err := tc.genTestChapter(temp.Name(), tc.split)
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
@@ -83,32 +80,24 @@ func TestConvertChapter(t *testing.T) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(chapter, quality, tc.split, progress)
if err != nil {
if convertedChapter != nil && slices.Contains(tc.expectPartialSuccess, converter.Format()) {
t.Logf("Partial success to convert genTestChapter: %v", err)
return
}
if slices.Contains(tc.expectFailure, converter.Format()) {
t.Logf("Expected failure to convert genTestChapter: %v", err)
return
}
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, quality, tc.split, progress)
if err != nil && !tc.expectError {
t.Fatalf("failed to convert genTestChapter: %v", err)
} else if slices.Contains(tc.expectFailure, converter.Format()) {
t.Fatalf("expected failure to convert genTestChapter didn't happen")
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
if len(convertedChapter.Pages) != len(chapter.Pages) {
t.Fatalf("converted chapter has different number of pages")
if len(convertedChapter.Pages) != len(expectedExtensions) {
t.Fatalf("converted chapter has %d pages but expected %d", len(convertedChapter.Pages), len(expectedExtensions))
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
// Check each page's extension against the expected array
for i, page := range convertedChapter.Pages {
expectedExt := expectedExtensions[i]
if page.Extension != expectedExt {
t.Errorf("page %d has extension %s but expected %s", page.Index, page.Extension, expectedExt)
}
}
})
@@ -117,39 +106,43 @@ func TestConvertChapter(t *testing.T) {
}
}
func genHugePage(path string) (*manga.Chapter, error) {
func genHugePage(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 1; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
expectedExtensions := []string{".jpg"} // One image that's generated as JPEG
if isSplit {
expectedExtensions = []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp"}
}
// Create one tall page
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
page := &manga.Page{
Index: 0,
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, expectedExtensions, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
func genSmallPages(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
@@ -157,9 +150,9 @@ func genSmallPages(path string) (*manga.Chapter, error) {
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
@@ -172,13 +165,13 @@ func genSmallPages(path string) (*manga.Chapter, error) {
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, []string{".webp", ".webp", ".webp", ".webp", ".webp"}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
func genMixSmallBig(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
@@ -188,7 +181,7 @@ func genMixSmallBig(path string) (*manga.Chapter, error) {
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
@@ -197,17 +190,21 @@ func genMixSmallBig(path string) (*manga.Chapter, error) {
}
pages = append(pages, page)
}
expectedExtensions := []string{".webp", ".webp", ".webp", ".webp", ".webp"}
if isSplit {
expectedExtensions = []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp"}
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, expectedExtensions, nil
}
func genMixSmallHuge(path string) (*manga.Chapter, error) {
func genMixSmallHuge(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
@@ -217,7 +214,7 @@ func genMixSmallHuge(path string) (*manga.Chapter, error) {
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
@@ -230,5 +227,55 @@ func genMixSmallHuge(path string) (*manga.Chapter, error) {
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".jpg", ".jpg"}, nil
}
func genTwoCorrupted(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
numPages := 5
corruptedIndices := []int{2, 4} // Pages 2 and 4 are too tall to convert without splitting
for i := 0; i < numPages; i++ {
var buf *bytes.Buffer
var ext string
isCorrupted := false
for _, ci := range corruptedIndices {
if i == ci {
isCorrupted = true
break
}
}
if isCorrupted {
buf = bytes.NewBufferString("corrupted data") // Invalid data, can't decode as image
ext = ".jpg"
} else {
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf = new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
ext = ".jpg"
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ext,
}
pages = append(pages, page)
}
// Expected: small pages to .webp, corrupted pages to .jpg (kept as is)
expectedExtensions := []string{".webp", ".webp", ".jpg", ".webp", ".jpg"}
// Even with split, corrupted pages can't be decoded so stay as is
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, expectedExtensions, nil
}

View File

@@ -2,9 +2,11 @@ package webp
import (
"bytes"
"context"
"errors"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
"image/png"
"runtime"
@@ -15,6 +17,7 @@ import (
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
converterrors "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"github.com/oliamb/cutter"
"github.com/rs/zerolog/log"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
)
@@ -52,9 +55,18 @@ func (converter *Converter) PrepareConverter() error {
return nil
}
func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
func (converter *Converter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
log.Debug().
Str("chapter", chapter.FilePath).
Int("pages", len(chapter.Pages)).
Uint8("quality", quality).
Bool("split", split).
Int("max_goroutines", runtime.NumCPU()).
Msg("Starting chapter conversion")
err := converter.PrepareConverter()
if err != nil {
log.Error().Str("chapter", chapter.FilePath).Err(err).Msg("Failed to prepare converter")
return nil, err
}
@@ -73,26 +85,61 @@ func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8
var pages []*manga.Page
var totalPages = uint32(len(chapter.Pages))
log.Debug().
Str("chapter", chapter.FilePath).
Int("total_pages", len(chapter.Pages)).
Int("worker_count", maxGoroutines).
Msg("Initialized conversion worker pool")
// Check if context is already cancelled
select {
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
default:
}
// Start the worker pool
go func() {
defer close(doneChan)
for page := range pagesChan {
guard <- struct{}{} // would block if guard channel is already filled
select {
case <-ctx.Done():
return
case guard <- struct{}{}: // would block if guard channel is already filled
}
go func(pageToConvert *manga.PageContainer) {
defer func() {
wgConvertedPages.Done()
<-guard
}()
// Check context cancellation before processing
select {
case <-ctx.Done():
return
default:
}
convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil {
if convertedPage == nil {
errChan <- err
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
buffer := new(bytes.Buffer)
err := png.Encode(buffer, convertedPage.Image)
if err != nil {
errChan <- err
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
convertedPage.Page.Contents = buffer
@@ -100,50 +147,88 @@ func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8
convertedPage.Page.Size = uint64(buffer.Len())
}
pagesMutex.Lock()
defer pagesMutex.Unlock()
pages = append(pages, convertedPage.Page)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format()), uint32(len(pages)), totalPages)
pagesMutex.Unlock()
currentTotalPages := atomic.LoadUint32(&totalPages)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), currentTotalPages, converter.Format()), uint32(len(pages)), currentTotalPages)
}(page)
}
close(doneChan)
}()
// Process pages
for _, page := range chapter.Pages {
select {
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
default:
}
go func(page *manga.Page) {
defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page, split)
if err != nil {
errChan <- err
if img != nil {
wgConvertedPages.Add(1)
pagesChan <- manga.NewContainer(page, img, format, false)
var pageIgnoredError *converterrors.PageIgnoredError
if errors.As(err, &pageIgnoredError) {
log.Info().Err(err).Msg("Page ignored due to image decode error")
}
select {
case errChan <- err:
case <-ctx.Done():
return
}
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, false):
case <-ctx.Done():
return
}
return
}
if !splitNeeded {
wgConvertedPages.Add(1)
pagesChan <- manga.NewContainer(page, img, format, true)
select {
case pagesChan <- manga.NewContainer(page, img, format, true):
case <-ctx.Done():
return
}
return
}
images, err := converter.cropImage(img)
if err != nil {
errChan <- err
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images {
select {
case <-ctx.Done():
return
default:
}
newPage := &manga.Page{
Index: page.Index,
IsSplitted: true,
SplitPartIndex: uint16(i),
}
wgConvertedPages.Add(1)
pagesChan <- manga.NewContainer(newPage, img, "N/A", true)
select {
case pagesChan <- manga.NewContainer(newPage, img, "N/A", true):
case <-ctx.Done():
return
}
}
}(page)
}
@@ -151,9 +236,21 @@ func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8
wgPages.Wait()
close(pagesChan)
// Wait for all conversions to complete
<-doneChan
wgConvertedPages.Wait()
// Wait for all conversions to complete or context cancellation
done := make(chan struct{})
go func() {
defer close(done)
wgConvertedPages.Wait()
}()
select {
case <-done:
// Conversion completed successfully
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
}
close(errChan)
close(guard)
@@ -165,6 +262,16 @@ func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8
var aggregatedError error = nil
if len(errList) > 0 {
aggregatedError = errors.Join(errList...)
log.Debug().
Str("chapter", chapter.FilePath).
Int("error_count", len(errList)).
Err(errors.Join(errList...)).
Msg("Conversion completed with errors")
} else {
log.Debug().
Str("chapter", chapter.FilePath).
Int("pages_converted", len(pages)).
Msg("Conversion completed successfully")
}
slices.SortFunc(pages, func(a, b *manga.Page) int {
@@ -175,7 +282,13 @@ func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8
})
chapter.Pages = pages
log.Debug().
Str("chapter", chapter.FilePath).
Int("final_page_count", len(pages)).
Msg("Pages sorted and chapter updated")
runtime.GC()
log.Debug().Str("chapter", chapter.FilePath).Msg("Garbage collection completed")
return chapter, aggregatedError
}
@@ -183,12 +296,20 @@ func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
bounds := img.Bounds()
height := bounds.Dy()
width := bounds.Dx()
numParts := height / converter.cropHeight
if height%converter.cropHeight != 0 {
numParts++
}
log.Debug().
Int("original_width", width).
Int("original_height", height).
Int("crop_height", converter.cropHeight).
Int("num_parts", numParts).
Msg("Starting image cropping for page splitting")
parts := make([]image.Image, numParts)
for i := 0; i < numParts; i++ {
@@ -197,6 +318,12 @@ func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
partHeight = height - i*converter.cropHeight
}
log.Debug().
Int("part_index", i).
Int("part_height", partHeight).
Int("y_offset", i*converter.cropHeight).
Msg("Cropping image part")
part, err := cutter.Crop(img, cutter.Config{
Width: bounds.Dx(),
Height: partHeight,
@@ -204,45 +331,119 @@ func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
Mode: cutter.TopLeft,
})
if err != nil {
log.Error().
Int("part_index", i).
Err(err).
Msg("Failed to crop image part")
return nil, fmt.Errorf("error cropping part %d: %v", i+1, err)
}
parts[i] = part
log.Debug().
Int("part_index", i).
Int("cropped_width", part.Bounds().Dx()).
Int("cropped_height", part.Bounds().Dy()).
Msg("Image part cropped successfully")
}
log.Debug().
Int("total_parts", len(parts)).
Msg("Image cropping completed")
return parts, nil
}
func (converter *Converter) checkPageNeedsSplit(page *manga.Page, splitRequested bool) (bool, image.Image, string, error) {
log.Debug().
Uint16("page_index", page.Index).
Bool("split_requested", splitRequested).
Int("page_size", len(page.Contents.Bytes())).
Msg("Analyzing page for splitting")
reader := bytes.NewBuffer(page.Contents.Bytes())
img, format, err := image.Decode(reader)
if err != nil {
return false, nil, format, err
log.Debug().Uint16("page_index", page.Index).Err(err).Msg("Failed to decode page image")
return false, nil, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d: failed to decode image (%s)", page.Index, err.Error()))
}
bounds := img.Bounds()
height := bounds.Dy()
width := bounds.Dx()
log.Debug().
Uint16("page_index", page.Index).
Int("width", width).
Int("height", height).
Str("format", format).
Int("max_height", converter.maxHeight).
Int("webp_max_height", webpMaxHeight).
Msg("Page dimensions analyzed")
if height >= webpMaxHeight && !splitRequested {
log.Debug().
Uint16("page_index", page.Index).
Int("height", height).
Int("webp_max", webpMaxHeight).
Msg("Page too tall for WebP format, would be ignored")
return false, img, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d is too tall [max: %dpx] to be converted to webp format", page.Index, webpMaxHeight))
}
return height >= converter.maxHeight && splitRequested, img, format, nil
needsSplit := height >= converter.maxHeight && splitRequested
log.Debug().
Uint16("page_index", page.Index).
Bool("needs_split", needsSplit).
Msg("Page splitting decision made")
return needsSplit, img, format, nil
}
func (converter *Converter) convertPage(container *manga.PageContainer, quality uint8) (*manga.PageContainer, error) {
log.Debug().
Uint16("page_index", container.Page.Index).
Str("format", container.Format).
Bool("to_be_converted", container.IsToBeConverted).
Uint8("quality", quality).
Msg("Converting page")
// Fix WebP format detection (case insensitive)
if container.Format == "webp" || container.Format == "WEBP" {
log.Debug().
Uint16("page_index", container.Page.Index).
Msg("Page already in WebP format, skipping conversion")
container.Page.Extension = ".webp"
return container, nil
}
if !container.IsToBeConverted {
log.Debug().
Uint16("page_index", container.Page.Index).
Msg("Page marked as not to be converted, skipping")
return container, nil
}
log.Debug().
Uint16("page_index", container.Page.Index).
Uint8("quality", quality).
Msg("Encoding page to WebP format")
converted, err := converter.convert(container.Image, uint(quality))
if err != nil {
log.Error().
Uint16("page_index", container.Page.Index).
Err(err).
Msg("Failed to convert page to WebP")
return nil, err
}
container.SetConverted(converted, ".webp");
container.SetConverted(converted, ".webp")
log.Debug().
Uint16("page_index", container.Page.Index).
Int("original_size", len(container.Page.Contents.Bytes())).
Int("converted_size", len(converted.Bytes())).
Msg("Page conversion completed")
return container, nil
}

View File

@@ -2,12 +2,15 @@ package webp
import (
"bytes"
"context"
"image"
"image/color"
"image/gif"
"image/jpeg"
"image/png"
"sync"
"testing"
"image/jpeg"
_ "golang.org/x/image/webp"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
@@ -42,6 +45,11 @@ func encodeImage(img image.Image, format string) (*bytes.Buffer, string, error)
return nil, "", err
}
return buf, ".jpg", nil
case "gif":
if err := gif.Encode(buf, img, nil); err != nil {
return nil, "", err
}
return buf, ".gif", nil
case "webp":
PrepareEncoder()
if err := Encode(buf, img, 80); err != nil {
@@ -129,10 +137,23 @@ func TestConverter_ConvertChapter(t *testing.T) {
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "gif"),
},
split: false,
expectSplit: false,
numExpected: 2,
numExpected: 3,
},
{
name: "Multiple normal images with webp",
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "gif"),
createTestPage(t, 4, 800, 1200, "webp"),
},
split: false,
expectSplit: false,
numExpected: 4,
},
{
name: "Tall image with split enabled",
@@ -170,7 +191,7 @@ func TestConverter_ConvertChapter(t *testing.T) {
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
convertedChapter, err := converter.ConvertChapter(chapter, 80, tt.split, progress)
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, 80, tt.split, progress)
if tt.expectError {
assert.Error(t, err)
@@ -227,24 +248,35 @@ func TestConverter_convertPage(t *testing.T) {
format string
isToBeConverted bool
expectWebP bool
expectError bool
}{
{
name: "Convert PNG to WebP",
format: "png",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Convert GIF to WebP",
format: "gif",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Already WebP",
format: "webp",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Skip conversion",
format: "png",
isToBeConverted: false,
expectWebP: false,
expectError: false,
},
}
@@ -256,19 +288,48 @@ func TestConverter_convertPage(t *testing.T) {
container := manga.NewContainer(page, img, tt.format, tt.isToBeConverted)
converted, err := converter.convertPage(container, 80)
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
if tt.expectError {
assert.Error(t, err)
assert.Nil(t, converted)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
}
}
})
}
}
func TestConverter_convertPage_EncodingError(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a test case with nil image to test encoding error path
// when isToBeConverted is true but the image is nil, simulating a failure in the encoding step
corruptedPage := &manga.Page{
Index: 1,
Contents: &bytes.Buffer{}, // Empty buffer
Extension: ".png",
Size: 0,
}
container := manga.NewContainer(corruptedPage, nil, "png", true)
converted, err := converter.convertPage(container, 80)
// This should return nil container and error because encoding will fail with nil image
assert.Error(t, err)
assert.Nil(t, converted)
}
func TestConverter_checkPageNeedsSplit(t *testing.T) {
converter := New()
@@ -322,3 +383,42 @@ func TestConverter_Format(t *testing.T) {
converter := New()
assert.Equal(t, constant.WebP, converter.Format())
}
func TestConverter_ConvertChapter_Timeout(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a test chapter with a few pages
pages := []*manga.Page{
createTestPage(t, 1, 800, 1200, "jpeg"),
createTestPage(t, 2, 800, 1200, "png"),
createTestPage(t, 3, 800, 1200, "gif"),
}
chapter := &manga.Chapter{
FilePath: "/test/chapter.cbz",
Pages: pages,
}
var progressMutex sync.Mutex
var lastProgress uint32
progress := func(message string, current uint32, total uint32) {
progressMutex.Lock()
defer progressMutex.Unlock()
assert.GreaterOrEqual(t, current, lastProgress, "Progress should never decrease")
lastProgress = current
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
// Test with very short timeout (1 nanosecond)
ctx, cancel := context.WithTimeout(context.Background(), 1)
defer cancel()
convertedChapter, err := converter.ConvertChapter(ctx, chapter, 80, false, progress)
// Should return context error due to timeout
assert.Error(t, err)
assert.Nil(t, convertedChapter)
assert.Equal(t, context.DeadlineExceeded, err)
}

View File

@@ -1,20 +1,44 @@
package webp
import (
"github.com/belphemur/go-webpbin/v2"
"fmt"
"image"
"io"
"strings"
"sync"
"github.com/belphemur/go-webpbin/v2"
)
const libwebpVersion = "1.5.0"
const libwebpVersion = "1.6.0"
var config = webpbin.NewConfig()
var prepareMutex sync.Mutex
func init() {
config.SetLibVersion(libwebpVersion)
}
func PrepareEncoder() error {
webpbin.SetLibVersion(libwebpVersion)
container := webpbin.NewCWebP()
return container.BinWrapper.Run()
prepareMutex.Lock()
defer prepareMutex.Unlock()
container := webpbin.NewCWebP(config)
version, err := container.Version()
if err != nil {
return err
}
if !strings.HasPrefix(version, libwebpVersion) {
return fmt.Errorf("unexpected webp version: got %s, want %s", version, libwebpVersion)
}
return nil
}
func Encode(w io.Writer, m image.Image, quality uint) error {
return webpbin.NewCWebP().
return webpbin.NewCWebP(config).
Quality(quality).
InputImage(m).
Output(w).

Binary file not shown.

BIN
testdata/Chapter 128.cbz vendored Normal file

Binary file not shown.

Binary file not shown.