172 Commits

Author SHA1 Message Date
Antoine Aflalo
9bca0ceaf4 fix: add autocomplete defintion for log level 2025-08-26 22:39:23 -04:00
Antoine Aflalo
c2a6220fde fix(logging): fix logging parameter not taken into account 2025-08-26 22:36:23 -04:00
Antoine Aflalo
e26cf7a26a fix: test 2025-08-26 21:37:51 -04:00
Antoine Aflalo
4e5180f658 feat: add timeout option for chapter conversion to prevent hanging on problematic files
fixes #102
2025-08-26 21:34:52 -04:00
Antoine Aflalo
e7bbae1c25 chore: bump webp 2025-08-26 21:20:56 -04:00
Antoine Aflalo
32c009ed9b feat: integrate zerolog for enhanced logging across multiple components 2025-08-26 21:16:54 -04:00
Antoine Aflalo
94fb60c5c6 feat: enhance logging capabilities with zerolog integration and command-line support 2025-08-26 21:07:48 -04:00
Antoine Aflalo
dfee46812d feat: use Zerolog for logging. 2025-08-26 20:55:34 -04:00
Antoine Aflalo
d0e4037e15 Merge pull request #101 from Belphemur/dependabot/go_modules/go_modules-e1b2e84e8b 2025-08-26 20:38:53 -04:00
renovate[bot]
8539abe99e fix(deps): update module github.com/stretchr/testify to v1.11.0 (#103)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-24 17:46:08 +00:00
dependabot[bot]
f1151435e1 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.3.0 to 2.4.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.3.0...v2.4.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.4.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-21 15:25:11 +00:00
renovate[bot]
c6e00fda5d fix(deps): update golang.org/x/exp digest to 8b4c13b (#100)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-19 23:02:35 +00:00
renovate[bot]
2f37936a72 chore(deps): update anchore/sbom-action action to v0.20.5 (#99)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-14 18:05:44 +00:00
renovate[bot]
f0d5c254a6 fix(deps): update golang.org/x/exp digest to 42675ad (#98)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-13 16:28:31 +00:00
renovate[bot]
e35b7b3ae8 chore(deps): update dependency go to v1.25.0 (#97)
* chore(deps): update dependency go to v1.25.0

* chore: move ci/cd to 1.25

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Antoine Aflalo <197810+Belphemur@users.noreply.github.com>
2025-08-13 01:04:05 +00:00
Antoine Aflalo
43d9550e6e Merge pull request #95 from Belphemur/renovate/actions-checkout-5.x 2025-08-11 20:33:18 -04:00
renovate[bot]
e7fa06f4d3 fix(deps): update golang.org/x/exp digest to 51f8813 (#96)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-11 22:47:50 +00:00
renovate[bot]
8b48da1b25 chore(deps): update actions/checkout action to v5 2025-08-11 16:24:11 +00:00
renovate[bot]
fdcc9bf076 fix(deps): update golang.org/x/exp digest to a408d31 (#94)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-08 17:00:38 +00:00
renovate[bot]
38b9d4f1bd fix(deps): update module golang.org/x/image to v0.30.0 (#93)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-07 21:44:51 +00:00
renovate[bot]
fbc1ec7d75 chore(deps): update dependency go to v1.24.6 (#92)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-06 20:48:02 +00:00
renovate[bot]
e7b566ff63 chore(deps): update anchore/sbom-action action to v0.20.4 (#91)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-22 03:11:33 +00:00
renovate[bot]
d73d0347b1 fix(deps): update golang.org/x/exp digest to 645b1fa (#90)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-18 20:32:01 +00:00
renovate[bot]
04b9dbb2dd chore(deps): update sigstore/cosign-installer action to v3.9.2 (#89)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 22:49:46 +00:00
renovate[bot]
5d767470a8 fix(deps): update golang.org/x/exp digest to 542afb5 (#88)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 22:49:33 +00:00
renovate[bot]
473c6f40e8 fix(deps): update golang.org/x/exp digest to 6ae5c78 (#87)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 23:47:39 +00:00
renovate[bot]
403f43a417 fix(deps): update module golang.org/x/image to v0.29.0 (#86)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-10 03:11:22 +00:00
renovate[bot]
1bfe755dd9 chore(deps): update dependency go to v1.24.5 (#85)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-08 22:53:26 +00:00
renovate[bot]
3cd6a4ab1f chore(deps): update anchore/sbom-action action to v0.20.2 (#84)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-02 22:27:30 +00:00
Antoine Aflalo
117206e0ee Merge pull request #83 from Belphemur/dependabot/go_modules/go_modules-3464edad9a 2025-06-27 15:48:52 -04:00
dependabot[bot]
1e43f9d8a0 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.2.1 to 2.3.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.2.1...v2.3.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.3.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-27 16:49:44 +00:00
renovate[bot]
6f8b525a96 fix(deps): update module github.com/mholt/archives to v0.1.3 (#82)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-26 22:52:14 +00:00
renovate[bot]
9480cc0e36 chore(deps): update sigstore/cosign-installer action to v3.9.1 (#81)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-23 17:36:00 +00:00
renovate[bot]
a72cd3f84f fix(deps): update golang.org/x/exp digest to b7579e2 (#80)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-20 07:16:04 +00:00
renovate[bot]
a3424494cc chore(deps): update sigstore/cosign-installer action to v3.9.0 (#79)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-17 13:39:58 +00:00
renovate[bot]
85d0b8bbca chore(deps): update anchore/sbom-action action to v0.20.1 (#78)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-13 21:55:26 +00:00
Antoine Aflalo
29f7fbbc0d Merge pull request #77 from Belphemur/Belphemur/issue75 2025-06-12 09:26:34 -04:00
Antoine Aflalo
1258b06210 docs: update README to include CBR file support and clarify features 2025-06-12 09:24:02 -04:00
Antoine Aflalo
8a6ddc668e feat: enhance optimization logic for CBR/CBZ file handling and add tests 2025-06-12 09:23:00 -04:00
Antoine Aflalo
989ca2450d feat: support CBR files in optimize and watch commands
Fixes #75
2025-06-12 09:18:06 -04:00
Antoine Aflalo
970b9019df feat: load CBR files 2025-06-12 09:11:22 -04:00
renovate[bot]
a5f88fe0e9 fix(deps): update module github.com/samber/lo to v1.51.0 (#76)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-11 08:58:04 +00:00
renovate[bot]
c46700d0e5 fix(deps): update module golang.org/x/image to v0.28.0 (#74)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-07 00:59:04 +00:00
renovate[bot]
3d98fe036b chore(deps): update dependency go to v1.24.4 (#73)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-06 19:09:31 +00:00
renovate[bot]
00d7ec0ba9 fix(deps): update golang.org/x/exp digest to dcc06ee (#72)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-06 19:08:57 +00:00
renovate[bot]
8c09db9a9e fix(deps): update golang.org/x/exp digest to b6e5de4 (#71)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-31 02:07:12 +00:00
renovate[bot]
0390f1119f fix(deps): update golang.org/x/exp digest to 65e9200 (#70)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-30 18:45:36 +00:00
renovate[bot]
b62485de3b chore(deps): update anchore/sbom-action action to v0.20.0 (#69)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-14 20:21:27 +00:00
renovate[bot]
8e11eca719 chore(deps): update dependency go to v1.24.3 (#68)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-06 19:38:28 +00:00
renovate[bot]
841bdce097 fix(deps): update golang.org/x/exp digest to ce4c2cf (#67)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-06 03:55:49 +00:00
renovate[bot]
74c0954118 fix(deps): update module golang.org/x/image to v0.27.0 (#66)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-05 22:49:16 +00:00
renovate[bot]
7478f0b71c fix(deps): update module github.com/samber/lo to v1.50.0 (#65)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-26 18:28:12 +00:00
renovate[bot]
a03eba5400 chore(deps): update anchore/sbom-action action to v0.19.0 (#64)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-24 23:25:23 +00:00
renovate[bot]
7546e516cd chore(deps): update sigstore/cosign-installer action to v3.8.2 (#63)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-22 21:53:05 +00:00
renovate[bot]
bef7052163 fix(deps): update golang.org/x/exp digest to 7e4ce0a (#62)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-08 18:03:02 +00:00
renovate[bot]
e04b213fa4 fix(deps): update module golang.org/x/image to v0.26.0 (#61)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-06 17:53:06 +00:00
renovate[bot]
92fa3a54e7 chore(deps): update dependency go to v1.24.2 (#60)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-01 17:06:22 +00:00
renovate[bot]
bc92d36df2 fix(deps): update module github.com/spf13/viper to v1.20.1 (#59)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-26 19:06:44 +00:00
renovate[bot]
9863dd5d98 fix(deps): update module github.com/spf13/viper to v1.20.0 (#58)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-15 17:12:41 +00:00
renovate[bot]
ddd19292d5 fix(deps): update golang.org/x/exp digest to 054e65f (#57)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-06 02:41:33 +00:00
renovate[bot]
6a7914bd83 fix(deps): update module golang.org/x/image to v0.25.0 (#56)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-05 19:37:10 +00:00
renovate[bot]
005d2d35c3 chore(deps): update dependency go to v1.24.1 (#55)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-04 22:46:43 +00:00
renovate[bot]
abcce332e5 fix(deps): update golang.org/x/exp digest to dead583 (#54)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-28 22:01:17 +00:00
renovate[bot]
376656ba2c chore(deps): update sigstore/cosign-installer action to v3.8.1 (#53)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-20 15:55:10 +00:00
renovate[bot]
34288e6bbe fix(deps): update golang.org/x/exp digest to aa4b98e (#52)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-18 18:49:14 +00:00
renovate[bot]
d32ea3e8a9 fix(deps): update module github.com/spf13/cobra to v1.9.1 (#51)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-17 02:47:58 +00:00
renovate[bot]
23256013f5 fix(deps): update golang.org/x/exp digest to eff6e97 (#50)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-15 21:41:43 +00:00
Antoine Aflalo
c87fde31c4 chore: Removes unused import
Removes the `image/jpeg` import, as it is already imported via blank identifier.
2025-02-14 10:55:39 -05:00
Antoine Aflalo
23eb43c691 fix(chapter): fix chapter conversion.
Still need to figure out the memory issues

Consolidates image conversion logic into a dedicated method.

This change streamlines the conversion process by centralizing the
setting of converted image data, extension, and size. It also
introduces a flag to track whether an image has been converted.

The old resource cleanup has been removed since it is not needed anymore.
2025-02-14 10:03:35 -05:00
Antoine Aflalo
4d3391273c ci: Sets up QEMU for cross-platform builds
Configures QEMU to enable emulation of different architectures,
allowing for cross-platform builds and testing in the release workflow.
2025-02-13 20:18:14 -05:00
Antoine Aflalo
2da3bae04a Updates build configuration for multi-platform support
Configures the build process to support multiple platforms (Linux, Darwin, Windows) and architectures (amd64, arm64).

Disables CGO to simplify cross-compilation.

Updates Docker image creation to produce separate images for amd64 and arm64, and creates manifest lists for `latest` and versioned tags.
2025-02-13 20:12:49 -05:00
Antoine Aflalo
a3dfec642c test: add webp converter test 2025-02-13 20:05:08 -05:00
Antoine Aflalo
0303c80feb test: fix path 2025-02-13 20:04:33 -05:00
Antoine Aflalo
efe1696bfa fix(memory): fix possible memory leak and add better tests 2025-02-13 20:02:45 -05:00
Antoine Aflalo
25cd4585b7 feat: revert to use webp executable 2025-02-13 19:47:13 -05:00
Antoine Aflalo
dd7b6a332c refactor: update import paths to use internal package 2025-02-13 19:43:18 -05:00
renovate[bot]
5428134d15 chore(deps): update dependency go to v1.24.0 (#49)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-11 20:04:36 +00:00
renovate[bot]
8d59530234 fix(deps): update golang.org/x/exp digest to 939b2ce (#48)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-10 20:23:31 +00:00
renovate[bot]
af30f34aa6 fix(deps): update golang.org/x/exp digest to f9890c6 (#47)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-07 05:00:13 +00:00
renovate[bot]
b3c412c09d chore(deps): update sigstore/cosign-installer action to v3.8.0 (#46)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-05 00:35:33 +00:00
renovate[bot]
16ba484f28 fix(deps): update module golang.org/x/image to v0.24.0 (#45)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-04 20:39:34 +00:00
renovate[bot]
2de8a81137 chore(deps): update dependency go to v1.23.6 (#44)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-04 17:36:25 +00:00
renovate[bot]
c223c9dca6 fix(deps): update golang.org/x/exp digest to e0ece0d (#43)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 21:04:20 +00:00
renovate[bot]
dcf57c7646 fix(deps): update golang.org/x/exp digest to 3edf0e9 (#42)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 17:46:34 +00:00
renovate[bot]
77e7724de2 fix(deps): update module github.com/samber/lo to v1.49.1 (#41)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 13:22:17 +00:00
renovate[bot]
ea8fd55cc2 fix(deps): update module github.com/samber/lo to v1.49.0 (#40)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-27 04:30:54 +00:00
renovate[bot]
709c53d647 fix(deps): update module github.com/pablodz/inotifywaitgo to v0.0.9 (#39)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-26 20:57:06 +00:00
renovate[bot]
919a53fec7 fix(deps): update module github.com/samber/lo to v1.48.0 (#38)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-26 10:22:50 +00:00
renovate[bot]
d3b3a73b8f chore(deps): update anchore/sbom-action action to v0.18.0 (#37)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-23 20:47:16 +00:00
renovate[bot]
188211e26d fix(deps): update golang.org/x/exp digest to 7588d65 (#36)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-06 21:12:16 +00:00
renovate[bot]
f57a88eaf4 fix(deps): update module github.com/thediveo/enumflag/v2 to v2.0.7 (#35)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-05 19:29:38 +00:00
renovate[bot]
6e6b66b5eb fix(deps): update golang.org/x/exp digest to 7d7fa50 (#34)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-03 21:05:57 +00:00
renovate[bot]
1ff1bed3cc fix(deps): update golang.org/x/exp digest to dd03c70 (#33)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-03 18:34:17 +00:00
renovate[bot]
196938718c fix(deps): update golang.org/x/exp digest to b2144cd (#32)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-17 20:15:57 +00:00
renovate[bot]
9972709d32 fix(deps): update golang.org/x/exp digest to 4a55095 (#31)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-15 18:36:01 +00:00
renovate[bot]
152fa85577 chore(deps): update anchore/sbom-action action to v0.17.9 (#30)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-13 23:01:28 +00:00
renovate[bot]
554fce5d1e fix(deps): update golang.org/x/exp digest to 1829a12 (#29)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-10 23:09:05 +00:00
renovate[bot]
25357e9ec6 fix(deps): update golang.org/x/exp digest to 1443442 (#28)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-10 17:58:23 +00:00
Antoine Aflalo
ecc561263f fix: move image to debian 2024-12-06 18:00:24 -05:00
Antoine Aflalo
fb1056e5e7 ci: remove bash completion 2024-12-06 17:40:42 -05:00
Antoine Aflalo
07bc88bb04 fix: v2 versioning 2024-12-06 17:28:09 -05:00
Antoine Aflalo
8c3665fa53 ci: fix dockerfile 2024-12-06 17:26:25 -05:00
Antoine Aflalo
8dce346997 ci: debug release 2 2024-12-06 17:22:27 -05:00
Antoine Aflalo
4646789e4e ci: debug release 2024-12-06 17:21:12 -05:00
Antoine Aflalo
22ca56c98b ci: fix building 2024-12-06 17:18:28 -05:00
Antoine Aflalo
f45a1d4ed0 ci: remove arm64 2024-12-06 17:10:52 -05:00
Antoine Aflalo
ee53fddf02 ci: fix version 2024-12-06 17:03:42 -05:00
Antoine Aflalo
f416f1ff32 feat: replace webp lib by C libwebp
Avoid having to download anything
2024-12-06 17:01:00 -05:00
renovate[bot]
969993161f fix(deps): update golang.org/x/exp digest to 43b7b7c (#27)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-05 01:57:59 +00:00
renovate[bot]
f6b41f6391 fix(deps): update module golang.org/x/image to v0.23.0 (#26)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-04 18:33:59 +00:00
renovate[bot]
0bb9e4320c chore(deps): update anchore/sbom-action action to v0.17.8 (#25)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-21 18:47:50 +00:00
Antoine Aflalo
35cfe41aa6 Merge pull request #24 from Belphemur/renovate/codecov-codecov-action-5.x
chore(deps): update codecov/codecov-action action to v5
2024-11-18 14:37:12 -05:00
renovate[bot]
021c647a6e chore(deps): update codecov/codecov-action action to v5 2024-11-14 19:14:51 +00:00
renovate[bot]
6217254305 fix(deps): update golang.org/x/exp digest to 2d47ceb (#23)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 23:01:56 +00:00
renovate[bot]
0ad711a24d fix(deps): update golang.org/x/exp digest to 04b2079 (#22)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 19:25:44 +00:00
renovate[bot]
f24e4cc26e fix(deps): update module golang.org/x/image to v0.22.0 (#21)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 00:06:54 +00:00
Antoine Aflalo
1d3a8396f2 Merge pull request #20 from Belphemur/renovate/anchore-sbom-action-0.x 2024-11-05 09:49:07 -05:00
renovate[bot]
497f206c50 chore(deps): update anchore/sbom-action action to v0.17.7 2024-11-05 14:37:06 +00:00
renovate[bot]
9ade876952 chore(deps): update anchore/sbom-action action to v0.17.6 (#19)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-29 16:46:36 +00:00
Antoine Aflalo
103d38c74b Merge pull request #18 from Belphemur/renovate/anchore-sbom-action-0.x 2024-10-22 10:23:05 -04:00
renovate[bot]
80a1afe7c3 chore(deps): update anchore/sbom-action action to v0.17.5 2024-10-21 20:34:41 +00:00
Antoine Aflalo
2de7bc7a04 Merge pull request #17 from Belphemur/renovate/anchore-sbom-action-0.x 2024-10-15 14:29:56 -04:00
renovate[bot]
bccf7a7029 chore(deps): update anchore/sbom-action action to v0.17.4 2024-10-15 17:14:00 +00:00
renovate[bot]
4e80ddfb3a chore(deps): update anchore/sbom-action action to v0.17.3 (#16)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-12 00:29:35 +00:00
renovate[bot]
090bbac593 fix(deps): update golang.org/x/exp digest to f66d83c (#15)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-09 22:19:26 +00:00
renovate[bot]
c8b0f11784 fix(deps): update golang.org/x/exp digest to 225e2ab (#14)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-05 00:22:41 +00:00
Antoine Aflalo
449b57b14e Merge pull request #13 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.21.0
2024-10-04 14:25:05 -04:00
renovate[bot]
0dcb55f06d fix(deps): update module golang.org/x/image to v0.21.0 2024-10-04 16:40:30 +00:00
Antoine Aflalo
33ae460caf Merge pull request #12 from Belphemur/renovate/sigstore-cosign-installer-3.x
chore(deps): update sigstore/cosign-installer action to v3.7.0
2024-10-04 09:57:48 -04:00
renovate[bot]
1af484aea8 chore(deps): update sigstore/cosign-installer action to v3.7.0 2024-10-04 13:31:05 +00:00
Antoine Aflalo
e798a59a43 perf: fix any unhandled errors 2024-09-10 15:10:40 -04:00
Antoine Aflalo
72086d658e refactor: clean up 2024-09-10 13:58:52 -04:00
Antoine Aflalo
a7bca7ee05 ci(qodana): add qodana 2024-09-10 13:54:08 -04:00
Antoine Aflalo
ba82003b53 Merge pull request #11 from Belphemur/renovate
perf(error): better deal with deferred errors
2024-09-09 14:47:12 -04:00
Antoine Aflalo
5f7e7de644 ci(tests): fix possible error with tests 2024-09-09 14:45:30 -04:00
Antoine Aflalo
5b183cca29 perf(error): better deal with deferred errors 2024-09-09 14:45:30 -04:00
Antoine Aflalo
d901be14fa Merge pull request #9 from Belphemur/renovatebot
ci(renovate): auto merge digest
2024-09-09 14:11:56 -04:00
Antoine Aflalo
a80997835a chore(deps): update deps 2024-09-09 14:09:39 -04:00
Antoine Aflalo
37bb12fd61 ci(renovate): auto merge digest 2024-09-09 14:09:23 -04:00
Antoine Aflalo
c19afb9f40 Merge pull request #7 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to 701f63a
2024-09-09 14:06:57 -04:00
renovate[bot]
911e1041ff fix(deps): update golang.org/x/exp digest to 701f63a 2024-09-09 18:06:01 +00:00
Antoine Aflalo
a10d589b67 Merge pull request #8 from Belphemur/fix-ci
ci: always generate and upload test results
2024-09-09 14:05:05 -04:00
Antoine Aflalo
da508fcb3f ci: always generate and upload test results 2024-09-09 14:03:28 -04:00
Antoine Aflalo
57f5282032 ci(renovate): add automerge 2024-09-09 09:27:33 -04:00
Antoine Aflalo
d4f8d8b5ff ci(test): fix the report xml file 2024-09-09 09:25:46 -04:00
Antoine Aflalo
1b026b9dbd fix(watch): add missing split option in log 2024-09-09 09:11:45 -04:00
Antoine Aflalo
12cc8d4e25 Merge pull request #6 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to e7e105d
2024-09-06 17:10:42 -04:00
renovate[bot]
3442b2a845 fix(deps): update golang.org/x/exp digest to e7e105d 2024-09-06 21:08:46 +00:00
Antoine Aflalo
b9a1fb213a Merge pull request #5 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.20.0
2024-09-06 17:07:40 -04:00
renovate[bot]
278ee130e3 fix(deps): update module golang.org/x/image to v0.20.0 2024-09-04 19:30:08 +00:00
Antoine Aflalo
5357ece2b7 perf: use comment of the zip to know if it's converted instead of txt file 2024-08-29 09:38:39 -04:00
Antoine Aflalo
dbef43d376 fix(watch): fix watch command not using proper path 2024-08-28 15:06:47 -04:00
Antoine Aflalo
7c63ea49c0 fix(docker): fix docker image config folder 2024-08-28 14:36:14 -04:00
Antoine Aflalo
8a067939af Merge pull request #4 from Belphemur/renovate/major-github-artifact-actions
chore(deps): update actions/upload-artifact action to v4
2024-08-28 14:24:57 -04:00
Antoine Aflalo
f89974ac79 ci: Another attempt at reducing 2024-08-28 14:22:25 -04:00
Antoine Aflalo
ce365a6bdf ci: reduce size of page to pass tests
Fix failing test
2024-08-28 14:16:51 -04:00
renovate[bot]
9e61ff4634 chore(deps): update actions/upload-artifact action to v4 2024-08-28 17:56:03 +00:00
Antoine Aflalo
63a1b592c3 ci: add test result to pipeline 2024-08-28 13:55:33 -04:00
Antoine Aflalo
673484692b perf(webp): improve the error message for page too tall 2024-08-28 13:52:27 -04:00
Antoine Aflalo
ad35e2655f feat(webp): add partial success to conversion
So we only keep images that couldn't be optimized and return the chapter
2024-08-28 13:49:14 -04:00
Antoine Aflalo
d7f55fa886 fix(webp): improve error message in page not convertible 2024-08-28 12:09:40 -04:00
Antoine Aflalo
62638517e4 test: improve testing suite for expected failure 2024-08-28 12:03:33 -04:00
Antoine Aflalo
dbf7f6c262 fix(webp): be sure we split big page when requested 2024-08-28 11:55:53 -04:00
Antoine Aflalo
9ecd5ff3a5 fix(webp): fix the actual maximum limit 2024-08-28 11:53:26 -04:00
Antoine Aflalo
a63d2395f0 fix(webp): better handling of error for page too big for webp 2024-08-28 11:51:06 -04:00
Antoine Aflalo
839ad9ed9d fix(cbz): make pages be the first in the cbz by only be number 2024-08-28 09:16:19 -04:00
Antoine Aflalo
c8879349e1 feat(split): Make the split configurable for the watch command 2024-08-28 09:10:08 -04:00
Antoine Aflalo
5ac59a93c5 feat(split): Make the split configurable for the optimize command 2024-08-28 09:06:49 -04:00
Antoine Aflalo
72c6776793 fix: make the progress more readable 2024-08-27 20:42:26 -04:00
Antoine Aflalo
e0b6d7fcef ci: add bash completion to image 2024-08-27 20:29:38 -04:00
Antoine Aflalo
9305c8fa76 perf: add completion to bash docker image 2024-08-27 20:27:45 -04:00
Antoine Aflalo
9cc45e75cf fix(ci): fix built date 2024-08-27 20:26:29 -04:00
49 changed files with 3374 additions and 1154 deletions

23
.github/workflows/qodana.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: Qodana
on:
workflow_dispatch:
pull_request:
push:
branches:
jobs:
qodana:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
checks: write
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.event.pull_request.head.sha }} # to check out the actual pull request commit, not the merge commit
fetch-depth: 0 # a full history is required for pull request analysis
- name: 'Qodana Scan'
uses: JetBrains/qodana-action@v2024.1
env:
QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }}

View File

@@ -19,15 +19,17 @@ jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
with:
fetch-depth: 0 # this is important, otherwise it won't checkout the full tree (i.e. no previous tags)
- uses: actions/setup-go@v5
with:
go-version: 1.23
go-version: 1.25
cache: true
- uses: sigstore/cosign-installer@v3.6.0 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.17.2 # installs syft
- uses: sigstore/cosign-installer@v3.9.2 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.20.5 # installs syft
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- uses: docker/login-action@v3 # login to ghcr
with:
registry: ghcr.io

View File

@@ -10,12 +10,12 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.23'
go-version: '1.25'
- name: Install dependencies
run: go mod tidy
@@ -28,9 +28,23 @@ jobs:
mv go-junit-report /usr/local/bin/
- name: Run tests
run: go test -v 2>&1 ./... -coverprofile=coverage.txt | go-junit-report -set-exit-code > junit.xml
run: |
set -o pipefail
go test -v 2>&1 ./... -coverprofile=coverage.txt | tee test-results.txt
- name: Analyse test results
if: ${{ !cancelled() }}
run: go-junit-report < test-results.txt > junit.xml
- name: Upload test result artifact
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v4
with:
name: test-results
path: |
test-results.txt
junit.xml
retention-days: 7
- name: Upload results to Codecov
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov

3
.gitignore vendored
View File

@@ -102,4 +102,5 @@ fabric.properties
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
.idea/caches/build_file_checksums.ser
*__debug_bin*

View File

@@ -9,9 +9,9 @@ changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
- '^chore:'
- "^docs:"
- "^test:"
- "^chore:"
groups:
- title: Features
regexp: '^.*?feat(\([[:word:]]+\))??!?:.+$'
@@ -24,19 +24,24 @@ changelog:
order: 2
builds:
- id: cbzoptimizer
main: main.go
main: cmd/cbzoptimizer/main.go
goos:
- linux
- darwin
- windows
goarch:
- amd64
- arm64
ignore:
- goos: windows
goarch: arm64
# ensures mod timestamp to be the commit timestamp
mod_timestamp: "{{ .CommitTimestamp }}"
flags:
# trims path
- -trimpath
ldflags:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X meta.date={{ .CommitDate }}
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }}
env:
- CGO_ENABLED=0
# config the checksum filename
@@ -61,11 +66,25 @@ sboms:
# https://goreleaser.com/customization/docker
dockers:
- image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}"
dockerfile: Dockerfile
- "ghcr.io/belphemur/cbzoptimizer:latest-amd64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-amd64"
use: buildx
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source={{.GitURL}}"
- image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-arm64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-arm64"
use: buildx
goarch: arm64
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
@@ -98,4 +117,13 @@ docker_signs:
args:
- "sign"
- "${artifact}"
- "--yes" # needed on cosign 2.0.0+
- "--yes" # needed on cosign 2.0.0+
docker_manifests:
- name_template: "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}"
image_templates:
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-amd64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-arm64"
- name_template: "ghcr.io/belphemur/cbzoptimizer:latest"
image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-amd64"
- "ghcr.io/belphemur/cbzoptimizer:latest-arm64"

View File

@@ -0,0 +1,10 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="GoDfaErrorMayBeNotNil" enabled="true" level="WARNING" enabled_by_default="true">
<methods>
<method importPath="github.com/belphemur/CBZOptimizer/converter" receiver="Converter" name="ConvertChapter" />
</methods>
</inspection_tool>
</profile>
</component>

17
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,17 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Optimize Testdata",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/cbzoptimizer",
"args": ["optimize", "${workspaceFolder}/testdata", "-l", "debug"],
"cwd": "${workspaceFolder}"
}
]
}

View File

@@ -1,21 +1,29 @@
FROM alpine:latest
LABEL authors="Belphemur"
ARG APP_PATH=/usr/local/bin/CBZOptimizer
ENV USER=abc
ENV CONFIG_FOLDER=/config
ENV PUID=99
RUN mkdir -p "${CONFIG_FOLDER}" && adduser \
--disabled-password \
--gecos "" \
--home "$(pwd)" \
--ingroup "users" \
--no-create-home \
--uid "${PUID}" \
"${USER}" && \
chown ${PUID}:${GUID} "${CONFIG_FOLDER}"
COPY CBZOptimizer /usr/local/bin/CBZOptimizer
RUN mkdir -p "${CONFIG_FOLDER}" && \
adduser \
-S \
-H \
-h "${CONFIG_FOLDER}" \
-G "users" \
-u "${PUID}" \
"${USER}" && \
chown ${PUID}:users "${CONFIG_FOLDER}"
RUN apk add --no-cache inotify-tools && chmod +x /usr/local/bin/CBZOptimizer
COPY CBZOptimizer ${APP_PATH}
RUN apk add --no-cache \
inotify-tools \
bash \
bash-completion && \
chmod +x ${APP_PATH} && \
${APP_PATH} completion bash > /etc/bash_completion.d/CBZOptimizer
VOLUME ${CONFIG_FOLDER}
USER ${USER}
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]

155
README.md
View File

@@ -1,62 +1,165 @@
# CBZOptimizer
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
**Note**: CBR files are supported as input but are always converted to CBZ format for output.
## Features
- Convert images within CBZ files to different formats (e.g., WebP).
- Convert images within CBZ and CBR files to different formats (e.g., WebP).
- Support for multiple archive formats including CBZ and CBR (CBR files are converted to CBZ format).
- Adjust the quality of the converted images.
- Process multiple chapters in parallel.
- Option to override the original CBZ files.
- Option to override the original files (CBR files are converted to CBZ and original CBR is deleted).
- Watch a folder for new CBZ/CBR files and optimize them automatically.
- Set time limits for chapter conversion to avoid hanging on problematic files.
## Installation
1. Clone the repository:
```sh
git clone https://github.com/belphemur/CBZOptimizer.git
cd CBZOptimizer
```
### Download Binary
2. Install dependencies:
```sh
go mod tidy
```
Download the latest release from [GitHub Releases](https://github.com/belphemur/CBZOptimizer/releases).
### Docker
Pull the Docker image:
```sh
docker pull ghcr.io/belphemur/cbzoptimizer:latest
```
## Usage
### Command Line Interface
The tool provides a CLI command to optimize CBZ files. Below is an example of how to use it:
The tool provides CLI commands to optimize and watch CBZ/CBR files. Below are examples of how to use them:
#### Optimize Command
Optimize all CBZ/CBR files in a folder recursively:
```sh
go run main.go optimize --quality 85 --parallelism 2 --override /path/to/cbz/files
cbzconverter optimize [folder] --quality 85 --parallelism 2 --override --format webp --split
```
With timeout to avoid hanging on problematic chapters:
```sh
cbzconverter optimize [folder] --timeout 10m --quality 85
```
Or with Docker:
```sh
docker run -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics --quality 85 --parallelism 2 --override --format webp --split
```
#### Watch Command
Watch a folder for new CBZ/CBR files and optimize them automatically:
```sh
cbzconverter watch [folder] --quality 85 --override --format webp --split
```
Or with Docker:
```sh
docker run -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest watch /comics --quality 85 --override --format webp --split
```
### Flags
- `--quality`, `-q`: Quality for conversion (0-100). Default is 85.
- `--parallelism`, `-n`: Number of chapters to convert in parallel. Default is 2.
- `--override`, `-o`: Override the original CBZ files. Default is false.
- `--override`, `-o`: Override the original files. For CBZ files, overwrites the original. For CBR files, deletes the original CBR and creates a new CBZ. Default is false.
- `--split`, `-s`: Split long pages into smaller chunks. Default is false.
- `--format`, `-f`: Format to convert the images to (e.g., webp). Default is webp.
- `--timeout`, `-t`: Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout. Default is 0.
- `--log`, `-l`: Set log level; can be 'panic', 'fatal', 'error', 'warn', 'info', 'debug', or 'trace'. Default is info.
## Testing
## Logging
To run the tests, use the following command:
CBZOptimizer uses structured logging with [zerolog](https://github.com/rs/zerolog) for consistent and performant logging output.
### Log Levels
You can control the verbosity of logging using either command-line flags or environment variables:
**Command Line:**
```sh
go test ./... -v
# Set log level to debug for detailed output
cbzconverter --log debug optimize [folder]
# Set log level to error for minimal output
cbzconverter --log error optimize [folder]
```
## GitHub Actions
**Environment Variable:**
The project includes a GitHub Actions workflow to run tests on every push and pull request to the `main` branch. The workflow is defined in `.github/workflows/go.yml`.
```sh
# Set log level via environment variable
LOG_LEVEL=debug cbzconverter optimize [folder]
```
## Contributing
**Docker:**
1. Fork the repository.
2. Create a new branch (`git checkout -b feature-branch`).
3. Commit your changes (`git commit -am 'Add new feature'`).
4. Push to the branch (`git push origin feature-branch`).
5. Create a new Pull Request.
```sh
# Set log level via environment variable in Docker
docker run -e LOG_LEVEL=debug -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics
```
### Available Log Levels
- `panic`: Logs panic level messages and above
- `fatal`: Logs fatal level messages and above
- `error`: Logs error level messages and above
- `warn`: Logs warning level messages and above
- `info`: Logs info level messages and above (default)
- `debug`: Logs debug level messages and above
- `trace`: Logs all messages including trace level
### Examples
```sh
# Default info level logging
cbzconverter optimize comics/
# Debug level for troubleshooting
cbzconverter --log debug optimize comics/
# Quiet operation (only errors and above)
cbzconverter --log error optimize comics/
# Using environment variable
LOG_LEVEL=warn cbzconverter optimize comics/
# Docker with debug logging
docker run -e LOG_LEVEL=debug -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics
```
## Requirements
- For Docker usage: No additional requirements needed
- For binary usage: Needs `libwebp` installed on the system for WebP conversion
## Docker Image
The official Docker image is available at: `ghcr.io/belphemur/cbzoptimizer:latest`
## Troubleshooting
If you encounter issues:
1. Use `--log debug` for detailed logging output
2. Check that all required dependencies are installed
3. Ensure proper file permissions for input/output directories
4. For Docker usage, verify volume mounts are correct
## Support
For issues and questions, please use [GitHub Issues](https://github.com/belphemur/CBZOptimizer/issues).
## License

View File

@@ -1,90 +0,0 @@
package cbz
import (
"archive/zip"
"fmt"
"github.com/belphemur/CBZOptimizer/manga"
"os"
"time"
)
func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
// Create a new ZIP file
zipFile, err := os.Create(outputFilePath)
if err != nil {
return fmt.Errorf("failed to create .cbz file: %w", err)
}
defer zipFile.Close()
// Create a new ZIP writer
zipWriter := zip.NewWriter(zipFile)
err = zipWriter.SetComment("Created by CBZOptimizer")
if err != nil {
return err
}
defer zipWriter.Close()
// Write each page to the ZIP archive
for _, page := range chapter.Pages {
// Construct the file name for the page
var fileName string
if page.IsSplitted {
// Use the format page%03d-%02d for split pages
fileName = fmt.Sprintf("page_%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
} else {
// Use the format page%03d for non-split pages
fileName = fmt.Sprintf("page_%04d%s", page.Index, page.Extension)
}
// Create a new file in the ZIP archive
fileWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: fileName,
Method: zip.Store,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create file in .cbz: %w", err)
}
// Write the page contents to the file
_, err = fileWriter.Write(page.Contents.Bytes())
if err != nil {
return fmt.Errorf("failed to write page contents: %w", err)
}
}
// Optionally, write the ComicInfo.xml file if present
if chapter.ComicInfoXml != "" {
comicInfoWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "ComicInfo.xml",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create ComicInfo.xml in .cbz: %w", err)
}
_, err = comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
if err != nil {
return fmt.Errorf("failed to write ComicInfo.xml contents: %w", err)
}
}
if chapter.IsConverted {
convertedWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "Converted.txt",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create Converted.txt in .cbz: %w", err)
}
_, err = convertedWriter.Write([]byte(fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)))
if err != nil {
return fmt.Errorf("failed to write Converted.txt contents: %w", err)
}
}
return nil
}

View File

@@ -1,90 +0,0 @@
package cbz
import (
"archive/zip"
"bufio"
"bytes"
"fmt"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/manga"
"io"
"path/filepath"
"strings"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
// Open the .cbz file
r, err := zip.OpenReader(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open .cbz file: %w", err)
}
defer r.Close()
chapter := &manga.Chapter{
FilePath: filePath,
}
for _, f := range r.File {
if f.FileInfo().IsDir() {
continue
}
// Open the file inside the zip
rc, err := f.Open()
if err != nil {
return nil, fmt.Errorf("failed to open file inside .cbz: %w", err)
}
// Determine the file extension
ext := strings.ToLower(filepath.Ext(f.Name))
if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
} else if ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" {
textContent, err := io.ReadAll(rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read Converted.xml content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
}
} else {
// Read the file contents for page
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object
page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext,
Size: uint64(buf.Len()),
Contents: buf,
IsSplitted: false,
}
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
}
rc.Close()
}
return chapter, nil
}

View File

@@ -0,0 +1,195 @@
package commands
import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/thediveo/enumflag/v2"
)
var converterType constant.ConversionFormat
func init() {
command := &cobra.Command{
Use: "optimize [folder]",
Short: "Optimize all CBZ/CBR files in a folder recursively",
Long: "Optimize all CBZ/CBR files in a folder recursively.\nIt will take all the different pages in the CBZ/CBR files and convert them to the given format.\nThe original CBZ/CBR files will be kept intact depending if you choose to override or not.",
RunE: ConvertCbzCommand,
Args: cobra.ExactArgs(1),
}
formatFlag := enumflag.New(&converterType, "format", constant.CommandValue, enumflag.EnumCaseInsensitive)
_ = formatFlag.RegisterCompletion(command, "format", constant.HelpText)
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
command.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
command.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
fmt.Sprintf("Format to convert the images to: %s", constant.ListAll()))
command.PersistentFlags().Lookup("format").NoOptDefVal = constant.DefaultConversion.String()
AddCommand(command)
}
func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
log.Info().Str("command", "optimize").Msg("Starting optimize command")
path := args[0]
if path == "" {
log.Error().Msg("Path argument is required but empty")
return fmt.Errorf("path is required")
}
log.Debug().Str("input_path", path).Msg("Validating input path")
if !utils2.IsValidFolder(path) {
log.Error().Str("input_path", path).Msg("Path validation failed - not a valid folder")
return fmt.Errorf("the path needs to be a folder")
}
log.Debug().Str("input_path", path).Msg("Input path validated successfully")
log.Debug().Msg("Parsing command-line flags")
quality, err := cmd.Flags().GetUint8("quality")
if err != nil || quality <= 0 || quality > 100 {
log.Error().Err(err).Uint8("quality", quality).Msg("Invalid quality value")
return fmt.Errorf("invalid quality value")
}
log.Debug().Uint8("quality", quality).Msg("Quality parameter validated")
override, err := cmd.Flags().GetBool("override")
if err != nil {
log.Error().Err(err).Msg("Failed to parse override flag")
return fmt.Errorf("invalid quality value")
}
log.Debug().Bool("override", override).Msg("Override parameter parsed")
split, err := cmd.Flags().GetBool("split")
if err != nil {
log.Error().Err(err).Msg("Failed to parse split flag")
return fmt.Errorf("invalid split value")
}
log.Debug().Bool("split", split).Msg("Split parameter parsed")
timeout, err := cmd.Flags().GetDuration("timeout")
if err != nil {
log.Error().Err(err).Msg("Failed to parse timeout flag")
return fmt.Errorf("invalid timeout value")
}
log.Debug().Dur("timeout", timeout).Msg("Timeout parameter parsed")
parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 {
log.Error().Err(err).Int("parallelism", parallelism).Msg("Invalid parallelism value")
return fmt.Errorf("invalid parallelism value")
}
log.Debug().Int("parallelism", parallelism).Msg("Parallelism parameter validated")
log.Debug().Str("converter_format", converterType.String()).Msg("Initializing converter")
chapterConverter, err := converter.Get(converterType)
if err != nil {
log.Error().Str("converter_format", converterType.String()).Err(err).Msg("Failed to get chapter converter")
return fmt.Errorf("failed to get chapterConverter: %v", err)
}
log.Debug().Str("converter_format", converterType.String()).Msg("Converter initialized successfully")
log.Debug().Msg("Preparing converter")
err = chapterConverter.PrepareConverter()
if err != nil {
log.Error().Err(err).Msg("Failed to prepare converter")
return fmt.Errorf("failed to prepare converter: %v", err)
}
log.Debug().Msg("Converter prepared successfully")
// Channel to manage the files to process
fileChan := make(chan string)
// Channel to collect errors
errorChan := make(chan error, parallelism)
// WaitGroup to wait for all goroutines to finish
var wg sync.WaitGroup
// Start worker goroutines
log.Debug().Int("worker_count", parallelism).Msg("Starting worker goroutines")
for i := 0; i < parallelism; i++ {
wg.Add(1)
go func(workerID int) {
defer wg.Done()
log.Debug().Int("worker_id", workerID).Msg("Worker started")
for path := range fileChan {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker processing file")
err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: path,
Quality: quality,
Override: override,
Split: split,
Timeout: timeout,
})
if err != nil {
log.Error().Int("worker_id", workerID).Str("file_path", path).Err(err).Msg("Worker encountered error")
errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
} else {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker completed file successfully")
}
}
log.Debug().Int("worker_id", workerID).Msg("Worker finished")
}(i)
}
log.Debug().Int("worker_count", parallelism).Msg("All worker goroutines started")
// Walk the path and send files to the channel
log.Debug().Str("search_path", path).Msg("Starting filesystem walk for CBZ/CBR files")
err = filepath.WalkDir(path, func(filePath string, info os.DirEntry, err error) error {
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Error during filesystem walk")
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
log.Debug().Str("file_path", filePath).Str("file_name", fileName).Msg("Found CBZ/CBR file")
fileChan <- filePath
}
}
return nil
})
if err != nil {
log.Error().Str("search_path", path).Err(err).Msg("Filesystem walk failed")
return fmt.Errorf("error walking the path: %w", err)
}
log.Debug().Str("search_path", path).Msg("Filesystem walk completed")
close(fileChan) // Close the channel to signal workers to stop
log.Debug().Msg("File channel closed, waiting for workers to complete")
wg.Wait() // Wait for all workers to finish
log.Debug().Msg("All workers completed")
close(errorChan) // Close the error channel
var errs []error
for err := range errorChan {
errs = append(errs, err)
log.Error().Err(err).Msg("Collected processing error")
}
if len(errs) > 0 {
log.Error().Int("error_count", len(errs)).Msg("Command completed with errors")
return fmt.Errorf("encountered errors: %v", errs)
}
log.Info().Str("search_path", path).Msg("Optimize command completed successfully")
return nil
}

View File

@@ -0,0 +1,174 @@
package commands
import (
"context"
"log"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra"
)
// MockConverter is a mock implementation of the Converter interface
type MockConverter struct{}
func (m *MockConverter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
chapter.IsConverted = true
chapter.ConvertedTime = time.Now()
return chapter, nil
}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error {
return nil
}
func TestConvertCbzCommand(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_cbz")
if err != nil {
log.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Locate the testdata directory
testdataDir := filepath.Join("../../../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found")
}
// Copy sample CBZ/CBR files from testdata to the temporary directory
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
return os.WriteFile(destPath, data, info.Mode())
}
}
return nil
})
if err != nil {
t.Fatalf("Failed to copy sample files: %v", err)
}
// Mock the converter.Get function
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
defer func() { converter.Get = originalGet }()
// Set up the command
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed: %v", err)
}
// Track expected converted files for verification
expectedFiles := make(map[string]bool)
convertedFiles := make(map[string]bool)
// First pass: identify original files and expected converted filenames
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
if !strings.Contains(fileName, "_converted") {
// This is an original file, determine expected converted filename
baseName := strings.TrimSuffix(info.Name(), filepath.Ext(info.Name()))
expectedConverted := baseName + "_converted.cbz"
expectedFiles[expectedConverted] = false // false means not yet found
}
}
return nil
})
if err != nil {
t.Fatalf("Error identifying original files: %v", err)
}
// Second pass: verify converted files exist and are properly converted
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fileName := info.Name()
// Check if this is a converted file (should only be .cbz, never .cbr)
if strings.HasSuffix(fileName, "_converted.cbz") {
convertedFiles[fileName] = true
expectedFiles[fileName] = true // Mark as found
t.Logf("Archive file found: %s", path)
// Load the converted chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return err
}
// Check if the chapter is marked as converted
if !chapter.IsConverted {
t.Errorf("Chapter is not marked as converted: %s", path)
}
// Check if the ConvertedTime is set
if chapter.ConvertedTime.IsZero() {
t.Errorf("ConvertedTime is not set for chapter: %s", path)
}
t.Logf("Archive file [%s] is converted: %s", path, chapter.ConvertedTime)
} else if strings.HasSuffix(fileName, "_converted.cbr") {
t.Errorf("Found incorrectly named converted file: %s (should be .cbz, not .cbr)", fileName)
}
return nil
})
if err != nil {
t.Fatalf("Error verifying converted files: %v", err)
}
// Verify all expected files were found
for expectedFile, found := range expectedFiles {
if !found {
t.Errorf("Expected converted file not found: %s", expectedFile)
}
}
// Log summary
t.Logf("Found %d converted files", len(convertedFiles))
}

View File

@@ -0,0 +1,135 @@
package commands
import (
"fmt"
"os"
"path/filepath"
"runtime"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
)
// Map zerolog levels to their textual representations
var LogLevelIds = map[zerolog.Level][]string{
zerolog.PanicLevel: {"panic"},
zerolog.FatalLevel: {"fatal"},
zerolog.ErrorLevel: {"error"},
zerolog.WarnLevel: {"warn", "warning"},
zerolog.InfoLevel: {"info"},
zerolog.DebugLevel: {"debug"},
zerolog.TraceLevel: {"trace"},
}
// Global log level variable with default
var logLevel zerolog.Level = zerolog.InfoLevel
var rootCmd = &cobra.Command{
Use: "cbzconverter",
Short: "Convert CBZ files using a specified converter",
}
func SetVersionInfo(version, commit, date string) {
rootCmd.Version = fmt.Sprintf("%s (Built on %s from Git SHA %s)", version, date, commit)
}
func getPath() string {
return filepath.Join(map[string]string{
"windows": filepath.Join(os.Getenv("APPDATA")),
"darwin": filepath.Join(os.Getenv("HOME"), ".config"),
"linux": filepath.Join(os.Getenv("HOME"), ".config"),
}[runtime.GOOS], "CBZOptimizer")
}
func init() {
configFolder := getPath()
viper.SetConfigName("config")
viper.SetConfigType("yaml")
viper.AddConfigPath(configFolder)
viper.SetEnvPrefix("CBZ")
viper.AutomaticEnv()
// Add log level flag (accepts zerolog levels: panic, fatal, error, warn, info, debug, trace)
ef := enumflag.New(&logLevel, "log", LogLevelIds, enumflag.EnumCaseInsensitive)
rootCmd.PersistentFlags().VarP(
ef,
"log", "l",
"Set log level; can be 'panic', 'fatal', 'error', 'warn', 'info', 'debug', or 'trace'")
ef.RegisterCompletion(rootCmd, "log", enumflag.Help[zerolog.Level]{
zerolog.PanicLevel: "Only log panic messages",
zerolog.FatalLevel: "Log fatal and panic messages",
zerolog.ErrorLevel: "Log error, fatal, and panic messages",
zerolog.WarnLevel: "Log warn, error, fatal, and panic messages",
zerolog.InfoLevel: "Log info, warn, error, fatal, and panic messages",
zerolog.DebugLevel: "Log debug, info, warn, error, fatal, and panic messages",
zerolog.TraceLevel: "Log all messages including trace",
})
// Add log level environment variable support
viper.BindEnv("log", "LOG_LEVEL")
viper.BindPFlag("log", rootCmd.PersistentFlags().Lookup("log"))
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
ConfigureLogging()
}
// Ensure the configuration directory exists
err := os.MkdirAll(configFolder, os.ModePerm)
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
if err := viper.ReadInConfig(); err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
err := viper.SafeWriteConfig()
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
} else {
panic(fmt.Errorf("fatal error config file: %w", err))
}
}
}
// Execute executes the root command.
func Execute() {
if err := rootCmd.Execute(); err != nil {
log.Fatal().Err(err).Msg("Command execution failed")
}
}
func AddCommand(cmd *cobra.Command) {
rootCmd.AddCommand(cmd)
}
// ConfigureLogging sets up zerolog based on command-line flags and environment variables
func ConfigureLogging() {
// Start with default log level (info)
level := zerolog.InfoLevel
// Check LOG_LEVEL environment variable first
envLogLevel := viper.GetString("log")
if envLogLevel != "" {
if parsedLevel, err := zerolog.ParseLevel(envLogLevel); err == nil {
level = parsedLevel
}
}
// Command-line log flag takes precedence over environment variable
// The logLevel variable will be set by the flag parsing, so if it's different from default, use it
if logLevel != zerolog.InfoLevel {
level = logLevel
}
// Set the global log level
zerolog.SetGlobalLevel(level)
// Configure console writer for readable output
log.Logger = log.Output(zerolog.ConsoleWriter{
Out: os.Stderr,
NoColor: false,
})
}

View File

@@ -1,18 +1,19 @@
package cmd
package commands
import (
"fmt"
"github.com/belphemur/CBZOptimizer/converter"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/utils"
"github.com/pablodz/inotifywaitgo/inotifywaitgo"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
"log"
"runtime"
"strings"
"sync"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/pablodz/inotifywaitgo/inotifywaitgo"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
)
func init() {
@@ -21,8 +22,8 @@ func init() {
}
command := &cobra.Command{
Use: "watch [folder]",
Short: "Watch a folder for new CBZ files",
Long: "Watch a folder for new CBZ files.\nIt will watch a folder for new CBZ files and optimize them.",
Short: "Watch a folder for new CBZ/CBR files",
Long: "Watch a folder for new CBZ/CBR files.\nIt will watch a folder for new CBZ/CBR files and optimize them.",
RunE: WatchCommand,
Args: cobra.ExactArgs(1),
}
@@ -32,9 +33,15 @@ func init() {
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
_ = viper.BindPFlag("quality", command.Flags().Lookup("quality"))
command.Flags().BoolP("override", "o", true, "Override the original CBZ files")
command.Flags().BoolP("override", "o", true, "Override the original CBZ/CBR files")
_ = viper.BindPFlag("override", command.Flags().Lookup("override"))
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
_ = viper.BindPFlag("split", command.Flags().Lookup("split"))
command.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
_ = viper.BindPFlag("timeout", command.Flags().Lookup("timeout"))
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
@@ -50,7 +57,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
return fmt.Errorf("path is required")
}
if !utils.IsValidFolder(path) {
if !utils2.IsValidFolder(path) {
return fmt.Errorf("the path needs to be a folder")
}
@@ -61,6 +68,10 @@ func WatchCommand(_ *cobra.Command, args []string) error {
override := viper.GetBool("override")
split := viper.GetBool("split")
timeout := viper.GetDuration("timeout")
converterType := constant.FindConversionFormat(viper.GetString("format"))
chapterConverter, err := converter.Get(converterType)
if err != nil {
@@ -71,7 +82,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err)
}
log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s]", path, override, quality, converterType.String())
log.Info().Str("path", path).Bool("override", override).Uint8("quality", quality).Str("format", converterType.String()).Bool("split", split).Msg("Watching directory")
events := make(chan inotifywaitgo.FileEvent)
errors := make(chan error)
@@ -100,16 +111,24 @@ func WatchCommand(_ *cobra.Command, args []string) error {
go func() {
defer wg.Done()
for event := range events {
log.Printf("[Event]%s, %v\n", event.Filename, event.Events)
log.Debug().Str("file", event.Filename).Interface("events", event.Events).Msg("File event")
if !strings.HasSuffix(strings.ToLower(event.Filename), ".cbz") {
filename := strings.ToLower(event.Filename)
if !strings.HasSuffix(filename, ".cbz") && !strings.HasSuffix(filename, ".cbr") {
continue
}
for _, e := range event.Events {
switch e {
case inotifywaitgo.CLOSE_WRITE, inotifywaitgo.MOVE:
err := utils.Optimize(chapterConverter, event.Filename, quality, override)
err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: event.Filename,
Quality: quality,
Override: override,
Split: split,
Timeout: timeout,
})
if err != nil {
errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err)
}
@@ -124,7 +143,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
go func() {
defer wg.Done()
for err := range errors {
log.Printf("Error: %v\n", err)
log.Error().Err(err).Msg("Watch error")
}
}()

17
cmd/cbzoptimizer/main.go Normal file
View File

@@ -0,0 +1,17 @@
package main
import (
"github.com/belphemur/CBZOptimizer/v2/cmd/cbzoptimizer/commands"
)
var (
version = "dev"
commit = "none"
date = "unknown"
)
func main() {
commands.SetVersionInfo(version, commit, date)
commands.Execute()
}

View File

@@ -1,128 +0,0 @@
package cmd
import (
"fmt"
"github.com/belphemur/CBZOptimizer/converter"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/utils"
"github.com/spf13/cobra"
"github.com/thediveo/enumflag/v2"
"os"
"path/filepath"
"strings"
"sync"
)
var converterType constant.ConversionFormat
func init() {
command := &cobra.Command{
Use: "optimize [folder]",
Short: "Optimize all CBZ files in a folder recursively",
Long: "Optimize all CBZ files in a folder recursively.\nIt will take all the different pages in the CBZ files and convert them to the given format.\nThe original CBZ files will be kept intact depending if you choose to override or not.",
RunE: ConvertCbzCommand,
Args: cobra.ExactArgs(1),
}
formatFlag := enumflag.New(&converterType, "format", constant.CommandValue, enumflag.EnumCaseInsensitive)
_ = formatFlag.RegisterCompletion(command, "format", constant.HelpText)
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
command.Flags().BoolP("override", "o", false, "Override the original CBZ files")
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
fmt.Sprintf("Format to convert the images to: %s", constant.ListAll()))
command.PersistentFlags().Lookup("format").NoOptDefVal = constant.DefaultConversion.String()
AddCommand(command)
}
func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
path := args[0]
if path == "" {
return fmt.Errorf("path is required")
}
if !utils.IsValidFolder(path) {
return fmt.Errorf("the path needs to be a folder")
}
quality, err := cmd.Flags().GetUint8("quality")
if err != nil || quality <= 0 || quality > 100 {
return fmt.Errorf("invalid quality value")
}
override, err := cmd.Flags().GetBool("override")
if err != nil {
return fmt.Errorf("invalid quality value")
}
parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 {
return fmt.Errorf("invalid parallelism value")
}
chapterConverter, err := converter.Get(converterType)
if err != nil {
return fmt.Errorf("failed to get chapterConverter: %v", err)
}
err = chapterConverter.PrepareConverter()
if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err)
}
// Channel to manage the files to process
fileChan := make(chan string)
// Channel to collect errors
errorChan := make(chan error, parallelism)
// WaitGroup to wait for all goroutines to finish
var wg sync.WaitGroup
// Start worker goroutines
for i := 0; i < parallelism; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for path := range fileChan {
err := utils.Optimize(chapterConverter, path, quality, override)
if err != nil {
errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
}
}
}()
}
// Walk the path and send files to the channel
err = filepath.WalkDir(path, func(path string, info os.DirEntry, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
fileChan <- path
}
return nil
})
if err != nil {
return fmt.Errorf("error walking the path: %w", err)
}
close(fileChan) // Close the channel to signal workers to stop
wg.Wait() // Wait for all workers to finish
close(errorChan) // Close the error channel
var errs []error
for err := range errorChan {
errs = append(errs, err)
}
if len(errs) > 0 {
return fmt.Errorf("encountered errors: %v", errs)
}
return nil
}

View File

@@ -1,121 +0,0 @@
package cmd
import (
"github.com/belphemur/CBZOptimizer/cbz"
"github.com/belphemur/CBZOptimizer/converter"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/manga"
"github.com/spf13/cobra"
"log"
"os"
"path/filepath"
"strings"
"testing"
"time"
)
// MockConverter is a mock implementation of the Converter interface
type MockConverter struct{}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(string)) (*manga.Chapter, error) {
// Simulate conversion by setting the IsConverted flag
chapter.IsConverted = true
chapter.ConvertedTime = time.Now()
return chapter, nil
}
func (m *MockConverter) PrepareConverter() error {
return nil
}
func TestConvertCbzCommand(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_cbz")
if err != nil {
log.Fatal(err)
}
defer os.RemoveAll(tempDir) // Clean up the temp directory when done
// Locate the testdata directory
testdataDir := filepath.Join("../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found")
}
// Copy sample CBZ files from testdata to the temporary directory
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
return os.WriteFile(destPath, data, info.Mode())
}
return nil
})
if err != nil {
t.Fatalf("Failed to copy sample files: %v", err)
}
// Mock the converter.Get function
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
defer func() { converter.Get = originalGet }()
// Set up the command
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files")
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed: %v", err)
}
// Verify the results
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() || !strings.HasSuffix(info.Name(), "_converted.cbz") {
return nil
}
t.Logf("CBZ file found: %s", path)
// Load the converted chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return err
}
// Check if the chapter is marked as converted
if !chapter.IsConverted {
t.Errorf("Chapter is not marked as converted: %s", path)
}
// Check if the ConvertedTime is set
if chapter.ConvertedTime.IsZero() {
t.Errorf("ConvertedTime is not set for chapter: %s", path)
}
t.Logf("CBZ file [%s] is converted: %s", path, chapter.ConvertedTime)
return nil
})
if err != nil {
t.Fatalf("Error verifying converted files: %v", err)
}
}

View File

@@ -1,62 +0,0 @@
package cmd
import (
"fmt"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"os"
"path/filepath"
"runtime"
)
var rootCmd = &cobra.Command{
Use: "cbzconverter",
Short: "Convert CBZ files using a specified converter",
}
func SetVersionInfo(version, commit, date string) {
rootCmd.Version = fmt.Sprintf("%s (Built on %s from Git SHA %s)", version, date, commit)
}
func getPath() string {
return filepath.Join(map[string]string{
"windows": filepath.Join(os.Getenv("APPDATA")),
"darwin": filepath.Join(os.Getenv("HOME"), ".config"),
"linux": filepath.Join(os.Getenv("HOME"), ".config"),
}[runtime.GOOS], "CBZOptimizer")
}
func init() {
configFolder := getPath()
viper.SetConfigName("config")
viper.SetConfigType("yaml")
viper.AddConfigPath(configFolder)
viper.SetEnvPrefix("CBZ")
viper.AutomaticEnv()
err := os.MkdirAll(configFolder, os.ModePerm)
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
if err := viper.ReadInConfig(); err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
err := viper.SafeWriteConfig()
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
} else {
panic(fmt.Errorf("fatal error config file: %w", err))
}
}
}
// Execute executes the root command.
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
func AddCommand(cmd *cobra.Command) {
rootCmd.AddCommand(cmd)
}

View File

@@ -1,162 +0,0 @@
package converter
import (
"bytes"
"github.com/belphemur/CBZOptimizer/manga"
"image"
"image/jpeg"
"os"
"testing"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
}{
{
name: "All split pages",
genTestChapter: genBigPages,
},
{
name: "No split pages",
genTestChapter: genSmallPages,
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
},
}
// Load test genTestChapter from testdata
temp, err := os.CreateTemp("", "test_chapter_*.cbz")
if err != nil {
t.Fatalf("failed to create temporary file: %v", err)
}
defer os.Remove(temp.Name())
for _, converter := range Available() {
converter, err := Get(converter)
if err != nil {
t.Fatalf("failed to get converter: %v", err)
}
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
quality := uint8(80)
progress := func(msg string) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(chapter, quality, progress)
if err != nil {
t.Fatalf("failed to convert genTestChapter: %v", err)
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
}
}
})
}
})
}
}
func genBigPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 10000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}

View File

@@ -1,231 +0,0 @@
package webp
import (
"bytes"
"fmt"
"github.com/belphemur/CBZOptimizer/converter/constant"
packer2 "github.com/belphemur/CBZOptimizer/manga"
"github.com/oliamb/cutter"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
"image"
_ "image/jpeg"
"image/png"
"io"
"runtime"
"sync"
"sync/atomic"
)
type Converter struct {
maxHeight int
cropHeight int
isPrepared bool
}
func (converter *Converter) Format() (format constant.ConversionFormat) {
return constant.WebP
}
func New() *Converter {
return &Converter{
//maxHeight: 16383 / 2,
maxHeight: 4000,
cropHeight: 2000,
isPrepared: false,
}
}
func (converter *Converter) PrepareConverter() error {
if converter.isPrepared {
return nil
}
err := PrepareEncoder()
if err != nil {
return err
}
converter.isPrepared = true
return nil
}
func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uint8, progress func(string)) (*packer2.Chapter, error) {
err := converter.PrepareConverter()
if err != nil {
return nil, err
}
var wgConvertedPages sync.WaitGroup
maxGoroutines := runtime.NumCPU()
pagesChan := make(chan *packer2.PageContainer, maxGoroutines)
errChan := make(chan error, maxGoroutines)
var wgPages sync.WaitGroup
wgPages.Add(len(chapter.Pages))
guard := make(chan struct{}, maxGoroutines)
pagesMutex := sync.Mutex{}
var pages []*packer2.Page
var totalPages = uint32(len(chapter.Pages))
go func() {
for page := range pagesChan {
guard <- struct{}{} // would block if guard channel is already filled
go func(pageToConvert *packer2.PageContainer) {
defer wgConvertedPages.Done()
convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil {
if convertedPage == nil {
errChan <- err
<-guard
return
}
buffer := new(bytes.Buffer)
err := png.Encode(buffer, convertedPage.Image)
if err != nil {
errChan <- err
<-guard
return
}
convertedPage.Page.Contents = buffer
convertedPage.Page.Extension = ".png"
convertedPage.Page.Size = uint64(buffer.Len())
}
pagesMutex.Lock()
pages = append(pages, convertedPage.Page)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format()))
pagesMutex.Unlock()
<-guard
}(page)
}
}()
for _, page := range chapter.Pages {
go func(page *packer2.Page) {
defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page)
if err != nil {
errChan <- fmt.Errorf("error checking if page %d of genTestChapter %s needs split: %v", page.Index, chapter.FilePath, err)
return
}
if !splitNeeded {
wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, format)
return
}
images, err := converter.cropImage(img)
if err != nil {
errChan <- fmt.Errorf("error converting page %d of genTestChapter %s to webp: %v", page.Index, chapter.FilePath, err)
return
}
atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images {
page := &packer2.Page{Index: page.Index, IsSplitted: true, SplitPartIndex: uint16(i)}
wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, "N/A")
}
}(page)
}
wgPages.Wait()
wgConvertedPages.Wait()
close(pagesChan)
close(errChan)
var errList []error
for err := range errChan {
errList = append(errList, err)
}
if len(errList) > 0 {
return nil, fmt.Errorf("encountered errors: %v", errList)
}
slices.SortFunc(pages, func(a, b *packer2.Page) int {
if a.Index == b.Index {
return int(b.SplitPartIndex - a.SplitPartIndex)
}
return int(b.Index - a.Index)
})
chapter.Pages = pages
runtime.GC()
return chapter, nil
}
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
bounds := img.Bounds()
height := bounds.Dy()
numParts := height / converter.cropHeight
if height%converter.cropHeight != 0 {
numParts++
}
parts := make([]image.Image, numParts)
for i := 0; i < numParts; i++ {
partHeight := converter.cropHeight
if i == numParts-1 {
partHeight = height - i*converter.cropHeight
}
part, err := cutter.Crop(img, cutter.Config{
Width: bounds.Dx(),
Height: partHeight,
Anchor: image.Point{Y: i * converter.cropHeight},
Mode: cutter.TopLeft,
})
if err != nil {
return nil, fmt.Errorf("error cropping part %d: %v", i+1, err)
}
parts[i] = part
}
return parts, nil
}
func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image.Image, string, error) {
reader := io.Reader(bytes.NewBuffer(page.Contents.Bytes()))
img, format, err := image.Decode(reader)
if err != nil {
return false, nil, format, err
}
bounds := img.Bounds()
height := bounds.Dy()
return height >= converter.maxHeight, img, format, nil
}
func (converter *Converter) convertPage(container *packer2.PageContainer, quality uint8) (*packer2.PageContainer, error) {
if container.Format == "webp" {
return container, nil
}
converted, err := converter.convert(container.Image, uint(quality))
if err != nil {
return nil, err
}
container.Page.Contents = converted
container.Page.Extension = ".webp"
container.Page.Size = uint64(converted.Len())
return container, nil
}
// convert converts an image to the WebP format. It decodes the image from the input buffer,
// encodes it as a WebP file using the webp.Encode() function, and returns the resulting WebP
// file as a bytes.Buffer.
func (converter *Converter) convert(image image.Image, quality uint) (*bytes.Buffer, error) {
var buf bytes.Buffer
err := Encode(&buf, image, quality)
if err != nil {
return nil, err
}
return &buf, nil
}

65
go.mod
View File

@@ -1,49 +1,64 @@
module github.com/belphemur/CBZOptimizer
module github.com/belphemur/CBZOptimizer/v2
go 1.23.0
go 1.25
require (
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de
github.com/belphemur/go-webpbin/v2 v2.0.0
github.com/mholt/archives v0.1.3
github.com/oliamb/cutter v0.2.2
github.com/pablodz/inotifywaitgo v0.0.7
github.com/samber/lo v1.47.0
github.com/spf13/cobra v1.8.1
github.com/spf13/viper v1.19.0
github.com/thediveo/enumflag/v2 v2.0.5
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948
golang.org/x/image v0.19.0
github.com/pablodz/inotifywaitgo v0.0.9
github.com/rs/zerolog v1.34.0
github.com/samber/lo v1.51.0
github.com/spf13/cobra v1.9.1
github.com/spf13/viper v1.20.1
github.com/stretchr/testify v1.11.0
github.com/thediveo/enumflag/v2 v2.0.7
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b
golang.org/x/image v0.30.0
)
require (
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/STARRY-S/zip v0.2.1 // indirect
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 // indirect
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 // indirect
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/sevenzip v1.6.0 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/fsnotify/fsnotify v1.8.0 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jfrog/archiver/v3 v3.6.1 // indirect
github.com/klauspost/compress v1.17.9 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.0 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/nwaples/rardecode/v2 v2.1.0 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/sagikazarmark/locafero v0.7.0 // indirect
github.com/sorairolake/lzip-go v0.3.5 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.6 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/sys v0.18.0 // indirect
golang.org/x/text v0.17.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/sys v0.29.0 // indirect
golang.org/x/text v0.28.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

399
go.sum
View File

@@ -1,141 +1,408 @@
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/STARRY-S/zip v0.2.1 h1:pWBd4tuSGm3wtpoqRZZ2EAwOmcHK6XFf7bU9qcJXyFg=
github.com/STARRY-S/zip v0.2.1/go.mod h1:xNvshLODWtC4EJ702g7cTYn13G53o1+X9BWnPFpcWV4=
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3 h1:8PmGpDEZl9yDpcdEr6Odf23feCxK3LNUNMxjXg41pZQ=
github.com/andybalholm/brotli v1.1.2-0.20250424173009-453214e765f3/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 h1:EzKgPYK90TyAOmytK7bvapqlkG/m7KWKK28mOAdQEaM=
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0/go.mod h1:s2Dv+CfgVbNM9ucqvE5qCCC0AkI1PE2OZb7N8PPlOh4=
github.com/belphemur/go-webpbin/v2 v2.0.0 h1:Do0TTTJ6cS6lgi+R67De+jXRYe+ZOwxFqTiFggyX5p8=
github.com/belphemur/go-webpbin/v2 v2.0.0/go.mod h1:VIHXZQaIwaIYDn08w0qeJFPj1MuYt5pyJnkQALPYc5g=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
github.com/bodgit/sevenzip v1.6.0 h1:a4R0Wu6/P1o1pP/3VV++aEOcyeBxeO/xE2Y9NSTrr6A=
github.com/bodgit/sevenzip v1.6.0/go.mod h1:zOBh9nJUof7tcrlqJFv1koWRrhz3LbDbUNngkuZxLMc=
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY=
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg=
github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jfrog/archiver/v3 v3.6.1 h1:LOxnkw9pOn45DzCbZNFV6K0+6dCsQ0L8mR3ZcujO5eI=
github.com/jfrog/archiver/v3 v3.6.1/go.mod h1:VgR+3WZS4N+i9FaDwLZbq+jeU4B4zctXL+gL4EMzfLw=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mholt/archives v0.1.3 h1:aEAaOtNra78G+TvV5ohmXrJOAzf++dIlYeDW3N9q458=
github.com/mholt/archives v0.1.3/go.mod h1:LUCGp++/IbV/I0Xq4SzcIR6uwgeh2yjnQWamjRQfLTU=
github.com/mikelolasagasti/xz v1.0.1 h1:Q2F2jX0RYJUG3+WsM+FJknv+6eVjsjXNDV0KJXZzkD0=
github.com/mikelolasagasti/xz v1.0.1/go.mod h1:muAirjiOUxPRXwm9HdDtB3uoRPrGnL85XHtokL9Hcgc=
github.com/minio/minlz v1.0.0 h1:Kj7aJZ1//LlTP1DM8Jm7lNKvvJS2m74gyyXXn3+uJWQ=
github.com/minio/minlz v1.0.0/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U=
github.com/nwaples/rardecode/v2 v2.1.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4=
github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o=
github.com/onsi/gomega v1.28.1 h1:MijcGUbfYuznzK/5R4CPNoUP/9Xvuo20sXfEm6XxoTA=
github.com/onsi/gomega v1.28.1/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ=
github.com/pablodz/inotifywaitgo v0.0.7 h1:1ii49dGBnRn0t1Sz7RGZS6/NberPEDQprwKHN49Bv6U=
github.com/pablodz/inotifywaitgo v0.0.7/go.mod h1:OtzRCsYTJlIr+vAzlOtauTkfQ1c25ebFuXq8tbbf8cw=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/pablodz/inotifywaitgo v0.0.9 h1:njquRbBU7fuwIe5rEvtaniVBjwWzcpdUVptSgzFqZsw=
github.com/pablodz/inotifywaitgo v0.0.9/go.mod h1:hAfx2oN+WKg8miwUKPs52trySpPignlRBRxWcXVHku0=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc=
github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU=
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
github.com/samber/lo v1.51.0 h1:kysRYLbHy/MB7kQZf5DSN50JHmMsNEdeY24VzJFu7wI=
github.com/samber/lo v1.51.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0=
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sorairolake/lzip-go v0.3.5 h1:ms5Xri9o1JBIWvOFAorYtUNik6HI3HgBTkISiqu0Cwg=
github.com/sorairolake/lzip-go v0.3.5/go.mod h1:N0KYq5iWrMXI0ZEXKXaS9hCyOjZUQdBDEIbXfoUwbdk=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8=
github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/thediveo/enumflag/v2 v2.0.5 h1:VJjvlAqUb6m6mxOrB/0tfBJI0Kvi9wJ8ulh38xK87i8=
github.com/thediveo/enumflag/v2 v2.0.5/go.mod h1:0NcG67nYgwwFsAvoQCmezG0J0KaIxZ0f7skg9eLq1DA=
github.com/thediveo/success v1.0.1 h1:NVwUOwKUwaN8szjkJ+vsiM2L3sNBFscldoDJ2g2tAPg=
github.com/thediveo/success v1.0.1/go.mod h1:AZ8oUArgbIsCuDEWrzWNQHdKnPbDOLQsWOFj9ynwLt0=
github.com/thediveo/enumflag/v2 v2.0.7 h1:uxXDU+rTel7Hg4X0xdqICpG9rzuI/mzLAEYXWLflOfs=
github.com/thediveo/enumflag/v2 v2.0.7/go.mod h1:bWlnNvTJuUK+huyzf3WECFLy557Ttlc+yk3o+BPs0EA=
github.com/thediveo/success v1.0.2 h1:w+r3RbSjLmd7oiNnlCblfGqItcsaShcuAorRVh/+0xk=
github.com/thediveo/success v1.0.2/go.mod h1:hdPJB77k70w764lh8uLUZgNhgeTl3DYeZ4d4bwMO2CU=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ=
golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ=
golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b h1:DXr+pvt3nC887026GRP39Ej11UATqWDmWuS99x26cD0=
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4=
golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=

123
internal/cbz/cbz_creator.go Normal file
View File

@@ -0,0 +1,123 @@
package cbz
import (
"archive/zip"
"fmt"
"os"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/rs/zerolog/log"
)
func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
log.Debug().
Str("chapter_file", chapter.FilePath).
Str("output_path", outputFilePath).
Int("page_count", len(chapter.Pages)).
Bool("is_converted", chapter.IsConverted).
Msg("Starting CBZ file creation")
// Create a new ZIP file
log.Debug().Str("output_path", outputFilePath).Msg("Creating output CBZ file")
zipFile, err := os.Create(outputFilePath)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create CBZ file")
return fmt.Errorf("failed to create .cbz file: %w", err)
}
defer errs.Capture(&err, zipFile.Close, "failed to close .cbz file")
// Create a new ZIP writer
log.Debug().Str("output_path", outputFilePath).Msg("Creating ZIP writer")
zipWriter := zip.NewWriter(zipFile)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create ZIP writer")
return err
}
defer errs.Capture(&err, zipWriter.Close, "failed to close .cbz writer")
// Write each page to the ZIP archive
log.Debug().Str("output_path", outputFilePath).Int("pages_to_write", len(chapter.Pages)).Msg("Writing pages to CBZ archive")
for _, page := range chapter.Pages {
// Construct the file name for the page
var fileName string
if page.IsSplitted {
// Use the format page%03d-%02d for split pages
fileName = fmt.Sprintf("%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
} else {
// Use the format page%03d for non-split pages
fileName = fmt.Sprintf("%04d%s", page.Index, page.Extension)
}
log.Debug().
Str("output_path", outputFilePath).
Uint16("page_index", page.Index).
Bool("is_splitted", page.IsSplitted).
Uint16("split_part", page.SplitPartIndex).
Str("filename", fileName).
Int("size", len(page.Contents.Bytes())).
Msg("Writing page to CBZ archive")
// Create a new file in the ZIP archive
fileWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: fileName,
Method: zip.Store,
Modified: time.Now(),
})
if err != nil {
log.Error().Str("output_path", outputFilePath).Str("filename", fileName).Err(err).Msg("Failed to create file in CBZ archive")
return fmt.Errorf("failed to create file in .cbz: %w", err)
}
// Write the page contents to the file
bytesWritten, err := fileWriter.Write(page.Contents.Bytes())
if err != nil {
log.Error().Str("output_path", outputFilePath).Str("filename", fileName).Err(err).Msg("Failed to write page contents")
return fmt.Errorf("failed to write page contents: %w", err)
}
log.Debug().
Str("output_path", outputFilePath).
Str("filename", fileName).
Int("bytes_written", bytesWritten).
Msg("Page written successfully")
}
// Optionally, write the ComicInfo.xml file if present
if chapter.ComicInfoXml != "" {
log.Debug().Str("output_path", outputFilePath).Int("xml_size", len(chapter.ComicInfoXml)).Msg("Writing ComicInfo.xml to CBZ archive")
comicInfoWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "ComicInfo.xml",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create ComicInfo.xml in CBZ archive")
return fmt.Errorf("failed to create ComicInfo.xml in .cbz: %w", err)
}
bytesWritten, err := comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to write ComicInfo.xml contents")
return fmt.Errorf("failed to write ComicInfo.xml contents: %w", err)
}
log.Debug().Str("output_path", outputFilePath).Int("bytes_written", bytesWritten).Msg("ComicInfo.xml written successfully")
} else {
log.Debug().Str("output_path", outputFilePath).Msg("No ComicInfo.xml to write")
}
if chapter.IsConverted {
convertedString := fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)
log.Debug().Str("output_path", outputFilePath).Str("comment", convertedString).Msg("Setting CBZ comment for converted chapter")
err = zipWriter.SetComment(convertedString)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to write CBZ comment")
return fmt.Errorf("failed to write comment: %w", err)
}
log.Debug().Str("output_path", outputFilePath).Msg("CBZ comment set successfully")
}
log.Debug().Str("output_path", outputFilePath).Msg("CBZ file creation completed successfully")
return nil
}

View File

@@ -3,18 +3,23 @@ package cbz
import (
"archive/zip"
"bytes"
"github.com/belphemur/CBZOptimizer/manga"
"fmt"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"os"
"testing"
"time"
)
func TestWriteChapterToCBZ(t *testing.T) {
currentTime := time.Now()
// Define test cases
testCases := []struct {
name string
chapter *manga.Chapter
expectedFiles []string
name string
chapter *manga.Chapter
expectedFiles []string
expectedComment string
}{
//test case where there is only one page and ComicInfo and the chapter is converted
{
@@ -29,9 +34,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
ComicInfoXml: "<Series>Boundless Necromancer</Series>",
IsConverted: true,
ConvertedTime: time.Now(),
ConvertedTime: currentTime,
},
expectedFiles: []string{"page_0000.jpg", "ComicInfo.xml", "Converted.txt"},
expectedFiles: []string{"0000.jpg", "ComicInfo.xml"},
expectedComment: fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", currentTime),
},
//test case where there is only one page and no
{
@@ -45,7 +51,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
},
},
expectedFiles: []string{"page_0000.jpg"},
expectedFiles: []string{"0000.jpg"},
},
{
name: "Multiple pages with ComicInfo",
@@ -64,7 +70,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
ComicInfoXml: "<Series>Boundless Necromancer</Series>",
},
expectedFiles: []string{"page_0000.jpg", "page_0001.jpg", "ComicInfo.xml"},
expectedFiles: []string{"0000.jpg", "0001.jpg", "ComicInfo.xml"},
},
{
name: "Split page",
@@ -79,7 +85,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
},
},
expectedFiles: []string{"page_0000-01.jpg"},
expectedFiles: []string{"0000-01.jpg"},
},
}
@@ -90,7 +96,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
if err != nil {
t.Fatalf("Failed to create temporary file: %v", err)
}
defer os.Remove(tempFile.Name())
defer errs.CaptureGeneric(&err, os.Remove, tempFile.Name(), "failed to remove temporary file")
// Write the chapter to the .cbz file
err = WriteChapterToCBZ(tc.chapter, tempFile.Name())
@@ -103,7 +109,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
if err != nil {
t.Fatalf("Failed to open CBZ file: %v", err)
}
defer r.Close()
defer errs.Capture(&err, r.Close, "failed to close CBZ file")
// Collect the names of the files in the archive
var filesInArchive []string
@@ -125,6 +131,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
}
}
if tc.expectedComment != "" && r.Comment != tc.expectedComment {
t.Errorf("Expected comment %s, but found %s", tc.expectedComment, r.Comment)
}
// Check if there are no unexpected files
if len(filesInArchive) != len(tc.expectedFiles) {
t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive))

166
internal/cbz/cbz_loader.go Normal file
View File

@@ -0,0 +1,166 @@
package cbz
import (
"archive/zip"
"bufio"
"bytes"
"context"
"fmt"
"io"
"io/fs"
"path/filepath"
"strings"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/mholt/archives"
"github.com/rs/zerolog/log"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
log.Debug().Str("file_path", filePath).Msg("Starting chapter loading")
ctx := context.Background()
chapter := &manga.Chapter{
FilePath: filePath,
}
// First, try to read the comment using zip.OpenReader for CBZ files
if strings.ToLower(filepath.Ext(filePath)) == ".cbz" {
log.Debug().Str("file_path", filePath).Msg("Checking CBZ comment for conversion status")
r, err := zip.OpenReader(filePath)
if err == nil {
defer errs.Capture(&err, r.Close, "failed to close zip reader for comment")
// Check for comment
if r.Comment != "" {
log.Debug().Str("file_path", filePath).Str("comment", r.Comment).Msg("Found CBZ comment")
scanner := bufio.NewScanner(strings.NewReader(r.Comment))
if scanner.Scan() {
convertedTime := scanner.Text()
log.Debug().Str("file_path", filePath).Str("converted_time", convertedTime).Msg("Parsing conversion timestamp")
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err == nil {
chapter.IsConverted = true
log.Debug().Str("file_path", filePath).Time("converted_time", chapter.ConvertedTime).Msg("Chapter marked as previously converted")
} else {
log.Debug().Str("file_path", filePath).Err(err).Msg("Failed to parse conversion timestamp")
}
}
} else {
log.Debug().Str("file_path", filePath).Msg("No CBZ comment found")
}
} else {
log.Debug().Str("file_path", filePath).Err(err).Msg("Failed to open CBZ file for comment reading")
}
// Continue even if comment reading fails
}
// Open the archive using archives library for file operations
log.Debug().Str("file_path", filePath).Msg("Opening archive file system")
fsys, err := archives.FileSystem(ctx, filePath, nil)
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed to open archive file system")
return nil, fmt.Errorf("failed to open archive file: %w", err)
}
// Walk through all files in the filesystem
log.Debug().Str("file_path", filePath).Msg("Starting filesystem walk")
err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
return func() error {
// Open the file
file, err := fsys.Open(path)
if err != nil {
return fmt.Errorf("failed to open file %s: %w", path, err)
}
defer errs.Capture(&err, file.Close, fmt.Sprintf("failed to close file %s", path))
// Determine the file extension
ext := strings.ToLower(filepath.Ext(path))
fileName := strings.ToLower(filepath.Base(path))
if ext == ".xml" && fileName == "comicinfo.xml" {
log.Debug().Str("file_path", filePath).Str("archive_file", path).Msg("Found ComicInfo.xml")
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read ComicInfo.xml")
return fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
log.Debug().Str("file_path", filePath).Int("xml_size", len(xmlContent)).Msg("ComicInfo.xml loaded")
} else if !chapter.IsConverted && ext == ".txt" && fileName == "converted.txt" {
log.Debug().Str("file_path", filePath).Str("archive_file", path).Msg("Found converted.txt")
textContent, err := io.ReadAll(file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read converted.txt")
return fmt.Errorf("failed to read converted.txt content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
log.Debug().Str("file_path", filePath).Str("converted_time", convertedTime).Msg("Parsing converted.txt timestamp")
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed to parse converted time from converted.txt")
return fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
log.Debug().Str("file_path", filePath).Time("converted_time", chapter.ConvertedTime).Msg("Chapter marked as converted from converted.txt")
}
} else {
// Read the file contents for page
log.Debug().Str("file_path", filePath).Str("archive_file", path).Str("extension", ext).Msg("Processing page file")
buf := new(bytes.Buffer)
bytesCopied, err := io.Copy(buf, file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read page file contents")
return fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object
page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext,
Size: uint64(buf.Len()),
Contents: buf,
IsSplitted: false,
}
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
log.Debug().
Str("file_path", filePath).
Str("archive_file", path).
Uint16("page_index", page.Index).
Int64("bytes_read", bytesCopied).
Msg("Page loaded successfully")
}
return nil
}()
})
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed during filesystem walk")
return nil, err
}
log.Debug().
Str("file_path", filePath).
Int("pages_loaded", len(chapter.Pages)).
Bool("is_converted", chapter.IsConverted).
Bool("has_comic_info", chapter.ComicInfoXml != "").
Msg("Chapter loading completed successfully")
return chapter, nil
}

View File

@@ -16,15 +16,22 @@ func TestLoadChapter(t *testing.T) {
testCases := []testCase{
{
name: "Original Chapter",
filePath: "../testdata/Chapter 1.cbz",
name: "Original Chapter CBZ",
filePath: "../../testdata/Chapter 1.cbz",
expectedPages: 16,
expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: false,
},
{
name: "Original Chapter CBR",
filePath: "../../testdata/Chapter 1.cbr",
expectedPages: 16,
expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: false,
},
{
name: "Converted Chapter",
filePath: "../testdata/Chapter 1_converted.cbz",
filePath: "../../testdata/Chapter 10_converted.cbz",
expectedPages: 107,
expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: true,

View File

@@ -0,0 +1,32 @@
package manga
import (
"bytes"
"image"
)
// PageContainer is a struct that holds a manga page, its image, and the image format.
type PageContainer struct {
// Page is a pointer to a manga page object.
Page *Page
// Image is the decoded image of the manga page.
Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string
// IsToBeConverted is a boolean flag indicating whether the image needs to be converted to another format.
IsToBeConverted bool
// HasBeenConverted is a boolean flag indicating whether the image has been converted to another format.
HasBeenConverted bool
}
func NewContainer(Page *Page, img image.Image, format string, isToBeConverted bool) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format, IsToBeConverted: isToBeConverted, HasBeenConverted: false}
}
// SetConverted sets the converted image, its extension, and its size in the PageContainer.
func (pc *PageContainer) SetConverted(converted *bytes.Buffer, extension string) {
pc.Page.Contents = converted
pc.Page.Extension = extension
pc.Page.Size = uint64(converted.Len())
pc.HasBeenConverted = true
}

View File

@@ -0,0 +1,25 @@
package errs
import (
"errors"
"fmt"
)
// Capture runs errFunc and assigns the error, if any, to *errPtr. Preserves the
// original error by wrapping with errors.Join if the errFunc err is non-nil.
func Capture(errPtr *error, errFunc func() error, msg string) {
err := errFunc()
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}
// CaptureGeneric runs errFunc with a generic type K and assigns the error, if any, to *errPtr.
func CaptureGeneric[K any](errPtr *error, errFunc func(value K) error, value K, msg string) {
err := errFunc(value)
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}

View File

@@ -0,0 +1,122 @@
package errs
import (
"errors"
"fmt"
"testing"
)
func TestCapture(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func() error
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func() error { return nil },
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
Capture(&err, tt.errFunc, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}
func TestCaptureGeneric(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func(int) error
value int
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func(value int) error { return nil },
value: 0,
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error and value",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func(value int) error { return fmt.Errorf("hello error:%d", value) },
value: 1,
msg: "test message",
expected: "wrapped error: initial error\ntest message: hello error:1",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
CaptureGeneric(&err, tt.errFunc, tt.value, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}

170
internal/utils/optimize.go Normal file
View File

@@ -0,0 +1,170 @@
package utils
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
errors2 "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"github.com/rs/zerolog/log"
)
type OptimizeOptions struct {
ChapterConverter converter.Converter
Path string
Quality uint8
Override bool
Split bool
Timeout time.Duration
}
// Optimize optimizes a CBZ/CBR file using the specified converter.
func Optimize(options *OptimizeOptions) error {
log.Info().Str("file", options.Path).Msg("Processing file")
log.Debug().
Str("file", options.Path).
Uint8("quality", options.Quality).
Bool("override", options.Override).
Bool("split", options.Split).
Msg("Optimization parameters")
// Load the chapter
log.Debug().Str("file", options.Path).Msg("Loading chapter")
chapter, err := cbz.LoadChapter(options.Path)
if err != nil {
log.Error().Str("file", options.Path).Err(err).Msg("Failed to load chapter")
return fmt.Errorf("failed to load chapter: %v", err)
}
log.Debug().
Str("file", options.Path).
Int("pages", len(chapter.Pages)).
Bool("converted", chapter.IsConverted).
Msg("Chapter loaded successfully")
if chapter.IsConverted {
log.Info().Str("file", options.Path).Msg("Chapter already converted")
return nil
}
// Convert the chapter
log.Debug().
Str("file", chapter.FilePath).
Int("pages", len(chapter.Pages)).
Uint8("quality", options.Quality).
Bool("split", options.Split).
Msg("Starting chapter conversion")
var ctx context.Context
if options.Timeout > 0 {
var cancel context.CancelFunc
ctx, cancel = context.WithTimeout(context.Background(), options.Timeout)
defer cancel()
log.Debug().Str("file", chapter.FilePath).Dur("timeout", options.Timeout).Msg("Applying timeout to chapter conversion")
} else {
ctx = context.Background()
}
convertedChapter, err := options.ChapterConverter.ConvertChapter(ctx, chapter, options.Quality, options.Split, func(msg string, current uint32, total uint32) {
if current%10 == 0 || current == total {
log.Info().Str("file", chapter.FilePath).Uint32("current", current).Uint32("total", total).Msg("Converting")
} else {
log.Debug().Str("file", chapter.FilePath).Uint32("current", current).Uint32("total", total).Msg("Converting page")
}
})
if err != nil {
var pageIgnoredError *errors2.PageIgnoredError
if errors.As(err, &pageIgnoredError) {
log.Debug().Str("file", chapter.FilePath).Err(err).Msg("Page conversion error (non-fatal)")
} else {
log.Error().Str("file", chapter.FilePath).Err(err).Msg("Chapter conversion failed")
return fmt.Errorf("failed to convert chapter: %v", err)
}
}
if convertedChapter == nil {
log.Error().Str("file", chapter.FilePath).Msg("Conversion returned nil chapter")
return fmt.Errorf("failed to convert chapter")
}
log.Debug().
Str("file", chapter.FilePath).
Int("original_pages", len(chapter.Pages)).
Int("converted_pages", len(convertedChapter.Pages)).
Msg("Chapter conversion completed")
convertedChapter.SetConverted()
// Determine output path and handle CBR override logic
log.Debug().
Str("input_path", options.Path).
Bool("override", options.Override).
Msg("Determining output path")
outputPath := options.Path
originalPath := options.Path
isCbrOverride := false
if options.Override {
// For override mode, check if it's a CBR file that needs to be converted to CBZ
pathLower := strings.ToLower(options.Path)
if strings.HasSuffix(pathLower, ".cbr") {
// Convert CBR to CBZ: change extension and mark for deletion
outputPath = strings.TrimSuffix(options.Path, filepath.Ext(options.Path)) + ".cbz"
isCbrOverride = true
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("CBR to CBZ conversion: will delete original after conversion")
} else {
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("CBZ override mode: will overwrite original file")
}
} else {
// Handle both .cbz and .cbr files - strip the extension and add _converted.cbz
pathLower := strings.ToLower(options.Path)
if strings.HasSuffix(pathLower, ".cbz") {
outputPath = strings.TrimSuffix(options.Path, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(pathLower, ".cbr") {
outputPath = strings.TrimSuffix(options.Path, ".cbr") + "_converted.cbz"
} else {
// Fallback for other extensions - just add _converted.cbz
outputPath = options.Path + "_converted.cbz"
}
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("Non-override mode: creating converted file alongside original")
}
// Write the converted chapter to CBZ file
log.Debug().Str("output_path", outputPath).Msg("Writing converted chapter to CBZ file")
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
log.Error().Str("output_path", outputPath).Err(err).Msg("Failed to write converted chapter")
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Debug().Str("output_path", outputPath).Msg("Successfully wrote converted chapter")
// If we're overriding a CBR file, delete the original CBR after successful write
if isCbrOverride {
log.Debug().Str("file", originalPath).Msg("Attempting to delete original CBR file")
err = os.Remove(originalPath)
if err != nil {
// Log the error but don't fail the operation since conversion succeeded
log.Warn().Str("file", originalPath).Err(err).Msg("Failed to delete original CBR file")
} else {
log.Info().Str("file", originalPath).Msg("Deleted original CBR file")
}
}
log.Info().Str("output", outputPath).Msg("Converted file written")
return nil
}

View File

@@ -0,0 +1,424 @@
package utils
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
)
// MockConverter for testing
type MockConverter struct {
shouldFail bool
}
func (m *MockConverter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
if m.shouldFail {
return nil, &MockError{message: "mock conversion error"}
}
// Check if context is already cancelled
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
// Simulate some work that can be interrupted by context cancellation
for i := 0; i < len(chapter.Pages); i++ {
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
// Simulate processing time
time.Sleep(100 * time.Microsecond)
if progress != nil {
progress(fmt.Sprintf("Converting page %d/%d", i+1, len(chapter.Pages)), uint32(i+1), uint32(len(chapter.Pages)))
}
}
}
// Create a copy of the chapter to simulate conversion
converted := &manga.Chapter{
FilePath: chapter.FilePath,
Pages: chapter.Pages,
ComicInfoXml: chapter.ComicInfoXml,
IsConverted: true,
ConvertedTime: time.Now(),
}
return converted, nil
}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error {
if m.shouldFail {
return &MockError{message: "mock prepare error"}
}
return nil
}
type MockError struct {
message string
}
func (e *MockError) Error() string {
return e.message
}
func TestOptimize(t *testing.T) {
// Create temporary directory for tests
tempDir, err := os.MkdirTemp("", "test_optimize")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
// Copy sample files
var cbzFile, cbrFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") && !strings.Contains(fileName, "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
} else if strings.HasSuffix(fileName, ".cbr") {
destPath := filepath.Join(tempDir, "test.cbr")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbrFile = destPath
}
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Create a CBR file by copying the CBZ file if no CBR exists
if cbrFile == "" {
cbrFile = filepath.Join(tempDir, "test.cbr")
data, err := os.ReadFile(cbzFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(cbrFile, data, 0644)
if err != nil {
t.Fatal(err)
}
}
tests := []struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
mockFail bool
}{
{
name: "CBZ file without override",
inputFile: cbzFile,
override: false,
expectedOutput: strings.TrimSuffix(cbzFile, ".cbz") + "_converted.cbz",
shouldDelete: false,
expectError: false,
},
{
name: "CBZ file with override",
inputFile: cbzFile,
override: true,
expectedOutput: cbzFile,
shouldDelete: false,
expectError: false,
},
{
name: "CBR file without override",
inputFile: cbrFile,
override: false,
expectedOutput: strings.TrimSuffix(cbrFile, ".cbr") + "_converted.cbz",
shouldDelete: false,
expectError: false,
},
{
name: "CBR file with override",
inputFile: cbrFile,
override: true,
expectedOutput: strings.TrimSuffix(cbrFile, ".cbr") + ".cbz",
shouldDelete: true,
expectError: false,
},
{
name: "Converter failure",
inputFile: cbzFile,
override: false,
expectError: true,
mockFail: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a copy of the input file for this test
testFile := filepath.Join(tempDir, tt.name+"_"+filepath.Base(tt.inputFile))
data, err := os.ReadFile(tt.inputFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(testFile, data, 0644)
if err != nil {
t.Fatal(err)
}
// Setup options
options := &OptimizeOptions{
ChapterConverter: &MockConverter{shouldFail: tt.mockFail},
Path: testFile,
Quality: 85,
Override: tt.override,
Split: false,
Timeout: 0,
}
// Run optimization
err = Optimize(options)
if tt.expectError {
if err == nil {
t.Error("Expected error but got none")
}
return
}
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Determine expected output path for this test
expectedOutput := tt.expectedOutput
if tt.override && strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, filepath.Ext(testFile)) + ".cbz"
} else if !tt.override {
if strings.HasSuffix(strings.ToLower(testFile), ".cbz") {
expectedOutput = strings.TrimSuffix(testFile, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, ".cbr") + "_converted.cbz"
}
} else {
expectedOutput = testFile
}
// Verify output file exists
if _, err := os.Stat(expectedOutput); os.IsNotExist(err) {
t.Errorf("Expected output file not found: %s", expectedOutput)
}
// Verify output is a valid CBZ
chapter, err := cbz.LoadChapter(expectedOutput)
if err != nil {
t.Errorf("Failed to load converted chapter: %v", err)
}
if !chapter.IsConverted {
t.Error("Chapter is not marked as converted")
}
// Verify original file deletion for CBR override
if tt.shouldDelete {
if _, err := os.Stat(testFile); !os.IsNotExist(err) {
t.Error("Original CBR file should have been deleted but still exists")
}
} else {
// Verify original file still exists (unless it's the same as output)
if testFile != expectedOutput {
if _, err := os.Stat(testFile); os.IsNotExist(err) {
t.Error("Original file should not have been deleted")
}
}
}
// Clean up output file
os.Remove(expectedOutput)
})
}
}
func TestOptimize_AlreadyConverted(t *testing.T) {
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_converted")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Use a converted test file
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
var convertedFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.Contains(strings.ToLower(info.Name()), "converted") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
convertedFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if convertedFile == "" {
t.Skip("No converted test file found")
}
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: convertedFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 0,
}
err = Optimize(options)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Should not create a new file since it's already converted
expectedOutput := strings.TrimSuffix(convertedFile, ".cbz") + "_converted.cbz"
if _, err := os.Stat(expectedOutput); !os.IsNotExist(err) {
t.Error("Should not have created a new converted file for already converted chapter")
}
}
func TestOptimize_InvalidFile(t *testing.T) {
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: "/nonexistent/file.cbz",
Quality: 85,
Override: false,
Split: false,
Timeout: 0,
}
err := Optimize(options)
if err == nil {
t.Error("Expected error for nonexistent file")
}
}
func TestOptimize_Timeout(t *testing.T) {
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_timeout")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
var cbzFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Test with short timeout (500 microseconds) to force timeout during conversion
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: cbzFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 500 * time.Microsecond, // 500 microseconds - should timeout during page processing
}
err = Optimize(options)
if err == nil {
t.Error("Expected timeout error but got none")
}
// Check that the error contains timeout information
if !strings.Contains(err.Error(), "context deadline exceeded") {
t.Errorf("Expected timeout error message, got: %v", err)
}
}

16
main.go
View File

@@ -1,16 +0,0 @@
package main
import (
"github.com/belphemur/CBZOptimizer/cmd"
)
var (
version = "dev"
commit = "none"
date = "unknown"
)
func main() {
cmd.SetVersionInfo(version, commit, date)
cmd.Execute()
}

View File

@@ -1,17 +0,0 @@
package manga
import "image"
// PageContainer is a struct that holds a manga page, its image, and the image format.
type PageContainer struct {
// Page is a pointer to a manga page object.
Page *Page
// Image is the decoded image of the manga page.
Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string
}
func NewContainer(Page *Page, img image.Image, format string) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format}
}

View File

@@ -1,5 +0,0 @@
package meta
var Version = "v0.0.0"
var Commit = ""
var Date = ""

View File

@@ -1,18 +1,23 @@
package converter
import (
"context"
"fmt"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/converter/webp"
"github.com/belphemur/CBZOptimizer/manga"
"github.com/samber/lo"
"strings"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/webp"
"github.com/samber/lo"
)
type Converter interface {
// Format of the converter
Format() (format constant.ConversionFormat)
ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(string)) (*manga.Chapter, error)
// ConvertChapter converts a manga chapter to the specified format.
//
// Returns partial success where some pages are converted and some are not.
ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
PrepareConverter() error
}

View File

@@ -0,0 +1,236 @@
package converter
import (
"bytes"
"context"
"image"
"image/jpeg"
"os"
"testing"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"golang.org/x/exp/slices"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
split bool
expectFailure []constant.ConversionFormat
expectPartialSuccess []constant.ConversionFormat
}{
{
name: "All split pages",
genTestChapter: genHugePage,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectFailure: []constant.ConversionFormat{constant.WebP},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{constant.WebP},
},
}
// Load test genTestChapter from testdata
temp, err := os.CreateTemp("", "test_chapter_*.cbz")
if err != nil {
t.Fatalf("failed to create temporary file: %v", err)
}
defer errs.CaptureGeneric(&err, os.Remove, temp.Name(), "failed to remove temporary file")
for _, converter := range Available() {
converter, err := Get(converter)
if err != nil {
t.Fatalf("failed to get converter: %v", err)
}
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
quality := uint8(80)
progress := func(msg string, current uint32, total uint32) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, quality, tc.split, progress)
if err != nil {
if convertedChapter != nil && slices.Contains(tc.expectPartialSuccess, converter.Format()) {
t.Logf("Partial success to convert genTestChapter: %v", err)
return
}
if slices.Contains(tc.expectFailure, converter.Format()) {
t.Logf("Expected failure to convert genTestChapter: %v", err)
return
}
t.Fatalf("failed to convert genTestChapter: %v", err)
} else if slices.Contains(tc.expectFailure, converter.Format()) {
t.Fatalf("expected failure to convert genTestChapter didn't happen")
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
if len(convertedChapter.Pages) != len(chapter.Pages) {
t.Fatalf("converted chapter has different number of pages")
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
}
}
})
}
})
}
}
func genHugePage(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 1; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallHuge(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 10; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 2000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}

View File

@@ -0,0 +1,13 @@
package errors
type PageIgnoredError struct {
s string
}
func (e *PageIgnoredError) Error() string {
return e.s
}
func NewPageIgnored(text string) error {
return &PageIgnoredError{text}
}

View File

@@ -0,0 +1,453 @@
package webp
import (
"bytes"
"context"
"errors"
"fmt"
"image"
_ "image/jpeg"
"image/png"
"runtime"
"sync"
"sync/atomic"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
converterrors "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"github.com/oliamb/cutter"
"github.com/rs/zerolog/log"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
)
const webpMaxHeight = 16383
type Converter struct {
maxHeight int
cropHeight int
isPrepared bool
}
func (converter *Converter) Format() (format constant.ConversionFormat) {
return constant.WebP
}
func New() *Converter {
return &Converter{
//maxHeight: 16383 / 2,
maxHeight: 4000,
cropHeight: 2000,
isPrepared: false,
}
}
func (converter *Converter) PrepareConverter() error {
if converter.isPrepared {
return nil
}
err := PrepareEncoder()
if err != nil {
return err
}
converter.isPrepared = true
return nil
}
func (converter *Converter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
log.Debug().
Str("chapter", chapter.FilePath).
Int("pages", len(chapter.Pages)).
Uint8("quality", quality).
Bool("split", split).
Int("max_goroutines", runtime.NumCPU()).
Msg("Starting chapter conversion")
err := converter.PrepareConverter()
if err != nil {
log.Error().Str("chapter", chapter.FilePath).Err(err).Msg("Failed to prepare converter")
return nil, err
}
var wgConvertedPages sync.WaitGroup
maxGoroutines := runtime.NumCPU()
pagesChan := make(chan *manga.PageContainer, maxGoroutines)
errChan := make(chan error, maxGoroutines)
doneChan := make(chan struct{})
var wgPages sync.WaitGroup
wgPages.Add(len(chapter.Pages))
guard := make(chan struct{}, maxGoroutines)
pagesMutex := sync.Mutex{}
var pages []*manga.Page
var totalPages = uint32(len(chapter.Pages))
log.Debug().
Str("chapter", chapter.FilePath).
Int("total_pages", len(chapter.Pages)).
Int("worker_count", maxGoroutines).
Msg("Initialized conversion worker pool")
// Check if context is already cancelled
select {
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
default:
}
// Start the worker pool
go func() {
defer close(doneChan)
for page := range pagesChan {
select {
case <-ctx.Done():
return
case guard <- struct{}{}: // would block if guard channel is already filled
}
go func(pageToConvert *manga.PageContainer) {
defer func() {
wgConvertedPages.Done()
<-guard
}()
// Check context cancellation before processing
select {
case <-ctx.Done():
return
default:
}
convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil {
if convertedPage == nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
buffer := new(bytes.Buffer)
err := png.Encode(buffer, convertedPage.Image)
if err != nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
convertedPage.Page.Contents = buffer
convertedPage.Page.Extension = ".png"
convertedPage.Page.Size = uint64(buffer.Len())
}
pagesMutex.Lock()
pages = append(pages, convertedPage.Page)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format()), uint32(len(pages)), totalPages)
pagesMutex.Unlock()
}(page)
}
}()
// Process pages
for _, page := range chapter.Pages {
select {
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
default:
}
go func(page *manga.Page) {
defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page, split)
if err != nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
if img != nil {
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, false):
case <-ctx.Done():
return
}
}
return
}
if !splitNeeded {
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, true):
case <-ctx.Done():
return
}
return
}
images, err := converter.cropImage(img)
if err != nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images {
select {
case <-ctx.Done():
return
default:
}
newPage := &manga.Page{
Index: page.Index,
IsSplitted: true,
SplitPartIndex: uint16(i),
}
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(newPage, img, "N/A", true):
case <-ctx.Done():
return
}
}
}(page)
}
wgPages.Wait()
close(pagesChan)
// Wait for all conversions to complete or context cancellation
done := make(chan struct{})
go func() {
defer close(done)
wgConvertedPages.Wait()
}()
select {
case <-done:
// Conversion completed successfully
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
}
close(errChan)
close(guard)
var errList []error
for err := range errChan {
errList = append(errList, err)
}
var aggregatedError error = nil
if len(errList) > 0 {
aggregatedError = errors.Join(errList...)
log.Debug().
Str("chapter", chapter.FilePath).
Int("error_count", len(errList)).
Msg("Conversion completed with errors")
} else {
log.Debug().
Str("chapter", chapter.FilePath).
Int("pages_converted", len(pages)).
Msg("Conversion completed successfully")
}
slices.SortFunc(pages, func(a, b *manga.Page) int {
if a.Index == b.Index {
return int(a.SplitPartIndex) - int(b.SplitPartIndex)
}
return int(a.Index) - int(b.Index)
})
chapter.Pages = pages
log.Debug().
Str("chapter", chapter.FilePath).
Int("final_page_count", len(pages)).
Msg("Pages sorted and chapter updated")
runtime.GC()
log.Debug().Str("chapter", chapter.FilePath).Msg("Garbage collection completed")
return chapter, aggregatedError
}
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
bounds := img.Bounds()
height := bounds.Dy()
width := bounds.Dx()
numParts := height / converter.cropHeight
if height%converter.cropHeight != 0 {
numParts++
}
log.Debug().
Int("original_width", width).
Int("original_height", height).
Int("crop_height", converter.cropHeight).
Int("num_parts", numParts).
Msg("Starting image cropping for page splitting")
parts := make([]image.Image, numParts)
for i := 0; i < numParts; i++ {
partHeight := converter.cropHeight
if i == numParts-1 {
partHeight = height - i*converter.cropHeight
}
log.Debug().
Int("part_index", i).
Int("part_height", partHeight).
Int("y_offset", i*converter.cropHeight).
Msg("Cropping image part")
part, err := cutter.Crop(img, cutter.Config{
Width: bounds.Dx(),
Height: partHeight,
Anchor: image.Point{Y: i * converter.cropHeight},
Mode: cutter.TopLeft,
})
if err != nil {
log.Error().
Int("part_index", i).
Err(err).
Msg("Failed to crop image part")
return nil, fmt.Errorf("error cropping part %d: %v", i+1, err)
}
parts[i] = part
log.Debug().
Int("part_index", i).
Int("cropped_width", part.Bounds().Dx()).
Int("cropped_height", part.Bounds().Dy()).
Msg("Image part cropped successfully")
}
log.Debug().
Int("total_parts", len(parts)).
Msg("Image cropping completed")
return parts, nil
}
func (converter *Converter) checkPageNeedsSplit(page *manga.Page, splitRequested bool) (bool, image.Image, string, error) {
log.Debug().
Uint16("page_index", page.Index).
Bool("split_requested", splitRequested).
Int("page_size", len(page.Contents.Bytes())).
Msg("Analyzing page for splitting")
reader := bytes.NewBuffer(page.Contents.Bytes())
img, format, err := image.Decode(reader)
if err != nil {
log.Debug().Uint16("page_index", page.Index).Err(err).Msg("Failed to decode page image")
return false, nil, format, err
}
bounds := img.Bounds()
height := bounds.Dy()
width := bounds.Dx()
log.Debug().
Uint16("page_index", page.Index).
Int("width", width).
Int("height", height).
Str("format", format).
Int("max_height", converter.maxHeight).
Int("webp_max_height", webpMaxHeight).
Msg("Page dimensions analyzed")
if height >= webpMaxHeight && !splitRequested {
log.Debug().
Uint16("page_index", page.Index).
Int("height", height).
Int("webp_max", webpMaxHeight).
Msg("Page too tall for WebP format, would be ignored")
return false, img, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d is too tall [max: %dpx] to be converted to webp format", page.Index, webpMaxHeight))
}
needsSplit := height >= converter.maxHeight && splitRequested
log.Debug().
Uint16("page_index", page.Index).
Bool("needs_split", needsSplit).
Msg("Page splitting decision made")
return needsSplit, img, format, nil
}
func (converter *Converter) convertPage(container *manga.PageContainer, quality uint8) (*manga.PageContainer, error) {
log.Debug().
Uint16("page_index", container.Page.Index).
Str("format", container.Format).
Bool("to_be_converted", container.IsToBeConverted).
Uint8("quality", quality).
Msg("Converting page")
// Fix WebP format detection (case insensitive)
if container.Format == "webp" || container.Format == "WEBP" {
log.Debug().
Uint16("page_index", container.Page.Index).
Msg("Page already in WebP format, skipping conversion")
container.Page.Extension = ".webp"
return container, nil
}
if !container.IsToBeConverted {
log.Debug().
Uint16("page_index", container.Page.Index).
Msg("Page marked as not to be converted, skipping")
return container, nil
}
log.Debug().
Uint16("page_index", container.Page.Index).
Uint8("quality", quality).
Msg("Encoding page to WebP format")
converted, err := converter.convert(container.Image, uint(quality))
if err != nil {
log.Error().
Uint16("page_index", container.Page.Index).
Err(err).
Msg("Failed to convert page to WebP")
return nil, err
}
container.SetConverted(converted, ".webp")
log.Debug().
Uint16("page_index", container.Page.Index).
Int("original_size", len(container.Page.Contents.Bytes())).
Int("converted_size", len(converted.Bytes())).
Msg("Page conversion completed")
return container, nil
}
// convert converts an image to the WebP format. It decodes the image from the input buffer,
// encodes it as a WebP file using the webp.Encode() function, and returns the resulting WebP
// file as a bytes.Buffer.
func (converter *Converter) convert(image image.Image, quality uint) (*bytes.Buffer, error) {
var buf bytes.Buffer
err := Encode(&buf, image, quality)
if err != nil {
return nil, err
}
return &buf, nil
}

View File

@@ -0,0 +1,365 @@
package webp
import (
"bytes"
"context"
"image"
"image/color"
"image/jpeg"
"image/png"
"sync"
"testing"
_ "golang.org/x/image/webp"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func createTestImage(width, height int, format string) (image.Image, error) {
img := image.NewRGBA(image.Rect(0, 0, width, height))
// Create a gradient pattern to ensure we have actual image data
for y := 0; y < height; y++ {
for x := 0; x < width; x++ {
img.Set(x, y, color.RGBA{
R: uint8((x * 255) / width),
G: uint8((y * 255) / height),
B: 100,
A: 255,
})
}
}
return img, nil
}
func encodeImage(img image.Image, format string) (*bytes.Buffer, string, error) {
buf := new(bytes.Buffer)
switch format {
case "jpeg", "jpg":
if err := jpeg.Encode(buf, img, &jpeg.Options{Quality: 85}); err != nil {
return nil, "", err
}
return buf, ".jpg", nil
case "webp":
PrepareEncoder()
if err := Encode(buf, img, 80); err != nil {
return nil, "", err
}
return buf, ".webp", nil
case "png":
fallthrough
default:
if err := png.Encode(buf, img); err != nil {
return nil, "", err
}
return buf, ".png", nil
}
}
func createTestPage(t *testing.T, index int, width, height int, format string) *manga.Page {
img, err := createTestImage(width, height, format)
require.NoError(t, err)
buf, ext, err := encodeImage(img, format)
require.NoError(t, err)
return &manga.Page{
Index: uint16(index),
Contents: buf,
Extension: ext,
Size: uint64(buf.Len()),
}
}
func validateConvertedImage(t *testing.T, page *manga.Page) {
require.NotNil(t, page.Contents)
require.Greater(t, page.Contents.Len(), 0)
// Try to decode the image
img, format, err := image.Decode(bytes.NewReader(page.Contents.Bytes()))
require.NoError(t, err, "Failed to decode converted image")
if page.Extension == ".webp" {
assert.Equal(t, "webp", format, "Expected WebP format")
}
require.NotNil(t, img)
bounds := img.Bounds()
assert.Greater(t, bounds.Dx(), 0, "Image width should be positive")
assert.Greater(t, bounds.Dy(), 0, "Image height should be positive")
}
// TestConverter_ConvertChapter tests the ConvertChapter method of the WebP converter.
// It verifies various scenarios including:
// - Converting single normal images
// - Converting multiple normal images
// - Converting tall images with split enabled
// - Handling tall images that exceed maximum height
//
// For each test case it validates:
// - Proper error handling
// - Expected number of output pages
// - Correct page ordering
// - Split page handling and indexing
// - Progress callback behavior
//
// The test uses different image dimensions and split settings to ensure
// the converter handles all cases correctly while maintaining proper
// progress reporting and page ordering.
func TestConverter_ConvertChapter(t *testing.T) {
tests := []struct {
name string
pages []*manga.Page
split bool
expectSplit bool
expectError bool
numExpected int
}{
{
name: "Single normal image",
pages: []*manga.Page{createTestPage(t, 1, 800, 1200, "jpeg")},
split: false,
expectSplit: false,
numExpected: 1,
},
{
name: "Multiple normal images",
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
},
split: false,
expectSplit: false,
numExpected: 2,
},
{
name: "Tall image with split enabled",
pages: []*manga.Page{createTestPage(t, 1, 800, 5000, "jpeg")},
split: true,
expectSplit: true,
numExpected: 3, // Based on cropHeight of 2000
},
{
name: "Tall image without split",
pages: []*manga.Page{createTestPage(t, 1, 800, webpMaxHeight+100, "png")},
split: false,
expectError: true,
numExpected: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
chapter := &manga.Chapter{
Pages: tt.pages,
}
var progressMutex sync.Mutex
var lastProgress uint32
progress := func(message string, current uint32, total uint32) {
progressMutex.Lock()
defer progressMutex.Unlock()
assert.GreaterOrEqual(t, current, lastProgress, "Progress should never decrease")
lastProgress = current
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, 80, tt.split, progress)
if tt.expectError {
assert.Error(t, err)
if convertedChapter != nil {
assert.LessOrEqual(t, len(convertedChapter.Pages), tt.numExpected)
}
return
}
require.NoError(t, err)
require.NotNil(t, convertedChapter)
assert.Len(t, convertedChapter.Pages, tt.numExpected)
// Validate all converted images
for _, page := range convertedChapter.Pages {
validateConvertedImage(t, page)
}
// Verify page order
for i := 1; i < len(convertedChapter.Pages); i++ {
prevPage := convertedChapter.Pages[i-1]
currPage := convertedChapter.Pages[i]
if prevPage.Index == currPage.Index {
assert.Less(t, prevPage.SplitPartIndex, currPage.SplitPartIndex,
"Split parts should be in ascending order for page %d", prevPage.Index)
} else {
assert.Less(t, prevPage.Index, currPage.Index,
"Pages should be in ascending order")
}
}
if tt.expectSplit {
splitFound := false
for _, page := range convertedChapter.Pages {
if page.IsSplitted {
splitFound = true
break
}
}
assert.True(t, splitFound, "Expected to find at least one split page")
}
})
}
}
func TestConverter_convertPage(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
tests := []struct {
name string
format string
isToBeConverted bool
expectWebP bool
}{
{
name: "Convert PNG to WebP",
format: "png",
isToBeConverted: true,
expectWebP: true,
},
{
name: "Already WebP",
format: "webp",
isToBeConverted: true,
expectWebP: true,
},
{
name: "Skip conversion",
format: "png",
isToBeConverted: false,
expectWebP: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
page := createTestPage(t, 1, 100, 100, tt.format)
img, err := createTestImage(100, 100, tt.format)
require.NoError(t, err)
container := manga.NewContainer(page, img, tt.format, tt.isToBeConverted)
converted, err := converter.convertPage(container, 80)
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
}
})
}
}
func TestConverter_checkPageNeedsSplit(t *testing.T) {
converter := New()
tests := []struct {
name string
imageHeight int
split bool
expectSplit bool
expectError bool
}{
{
name: "Normal height",
imageHeight: 1000,
split: true,
expectSplit: false,
},
{
name: "Height exceeds max with split enabled",
imageHeight: 5000,
split: true,
expectSplit: true,
},
{
name: "Height exceeds webp max without split",
imageHeight: webpMaxHeight + 100,
split: false,
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
page := createTestPage(t, 1, 800, tt.imageHeight, "jpeg")
needsSplit, img, format, err := converter.checkPageNeedsSplit(page, tt.split)
if tt.expectError {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.NotNil(t, img)
assert.NotEmpty(t, format)
assert.Equal(t, tt.expectSplit, needsSplit)
})
}
}
func TestConverter_Format(t *testing.T) {
converter := New()
assert.Equal(t, constant.WebP, converter.Format())
}
func TestConverter_ConvertChapter_Timeout(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a test chapter with a few pages
pages := []*manga.Page{
createTestPage(t, 1, 800, 1200, "jpeg"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "jpeg"),
}
chapter := &manga.Chapter{
FilePath: "/test/chapter.cbz",
Pages: pages,
}
var progressMutex sync.Mutex
var lastProgress uint32
progress := func(message string, current uint32, total uint32) {
progressMutex.Lock()
defer progressMutex.Unlock()
assert.GreaterOrEqual(t, current, lastProgress, "Progress should never decrease")
lastProgress = current
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
// Test with very short timeout (1 nanosecond)
ctx, cancel := context.WithTimeout(context.Background(), 1)
defer cancel()
convertedChapter, err := converter.ConvertChapter(ctx, chapter, 80, false, progress)
// Should return context error due to timeout
assert.Error(t, err)
assert.Nil(t, convertedChapter)
assert.Equal(t, context.DeadlineExceeded, err)
}

View File

@@ -6,7 +6,7 @@ import (
"io"
)
const libwebpVersion = "1.4.0"
const libwebpVersion = "1.6.0"
func PrepareEncoder() error {
webpbin.SetLibVersion(libwebpVersion)

View File

@@ -2,5 +2,16 @@
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended"
],
"packageRules": [
{
"matchUpdateTypes": [
"minor",
"patch",
"digest"
],
"matchCurrentVersion": "!/^0/",
"automerge": true
}
]
}

BIN
testdata/Chapter 1.cbr vendored Normal file

Binary file not shown.

View File

@@ -1,48 +0,0 @@
package utils
import (
"fmt"
"github.com/belphemur/CBZOptimizer/cbz"
"github.com/belphemur/CBZOptimizer/converter"
"log"
"strings"
)
// Optimize optimizes a CBZ file using the specified converter.
func Optimize(chapterConverter converter.Converter, path string, quality uint8, override bool) error {
log.Printf("Processing file: %s\n", path)
// Load the chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return fmt.Errorf("failed to load chapter: %v", err)
}
if chapter.IsConverted {
log.Printf("Chapter already converted: %s", path)
return nil
}
// Convert the chapter
convertedChapter, err := chapterConverter.ConvertChapter(chapter, quality, func(msg string) {
log.Printf("[%s]%s", path, msg)
})
if err != nil {
return fmt.Errorf("failed to convert chapter: %v", err)
}
convertedChapter.SetConverted()
// Write the converted chapter back to a CBZ file
outputPath := path
if !override {
outputPath = strings.TrimSuffix(path, ".cbz") + "_converted.cbz"
}
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Printf("Converted file written to: %s\n", outputPath)
return nil
}