285 Commits
v1.0.4 ... main

Author SHA1 Message Date
renovate[bot]
03479c8772 chore(deps): update anchore/sbom-action action to v0.21.1 (#155)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-08 17:14:58 +00:00
renovate[bot]
ee47432721 fix(deps): update module github.com/thediveo/enumflag/v2 to v2.1.0 (#154)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-01-03 17:42:02 +00:00
renovate[bot]
5a0fe68e68 chore(deps): update anchore/sbom-action action to v0.21.0 (#153)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-23 17:47:47 +00:00
Copilot
e535809e74 fix: Fix deadlock in ConvertChapter when context cancelled during page processing (#152)
* Initial plan

* Fix deadlock in ConvertChapter when context is cancelled after wgConvertedPages.Add

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>

* Fix test comments to remove placeholder issue references

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>

---------

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-20 14:26:33 -05:00
renovate[bot]
af5bfe8000 fix(deps): update golang.org/x/exp digest to 944ab1f (#151)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-19 21:52:44 +00:00
renovate[bot]
9ac9901990 chore(deps): update actions/upload-artifact action to v6 (#149)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-14 14:31:21 -05:00
Copilot
8c52010dfe Remove cosign signing in favor of GitHub native attestation (#148)
* Initial plan

* Remove cosign signing and improve GoReleaser configuration

- Removed cosign-installer step from release workflow
- Removed cosign signing sections (signs and docker_signs) from .goreleaser.yml
- Added include_meta: true to release configuration
- Added use: github and format to changelog configuration
- Added before hooks section for go mod tidy and go generate
- Improved comments and structure following best practices
- Added proper step names to workflow for better readability
- Kept attestation steps for checksums.txt and digests.txt using GitHub's native attestation

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>

---------

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 16:39:25 -05:00
Antoine Aflalo
aefadafc7d Merge pull request #146 from Belphemur/copilot/update-readme-docker-compose
docs: add Docker Compose examples and update copilot workflow
2025-12-10 16:19:50 -05:00
copilot-swe-agent[bot]
ba1ab20697 fix: improve format flag flexibility and usability
The format flag now supports multiple input syntaxes for better user experience:
- Space-separated: --format webp or -f webp
- Equals syntax: --format=webp
- Case-insensitive: webp, WEBP, and WebP are all valid

This change centralizes format flag setup in setupFormatFlag() function,
making it consistent across optimize and watch commands while supporting
both command-line usage and viper configuration binding.

The improvements enhance CLI usability without breaking existing usage patterns.
2025-12-10 21:16:16 +00:00
copilot-swe-agent[bot]
43593c37fc ci: remove application build from copilot-setup-steps workflow
Only build and run encoder-setup utility for WebP configuration.
Application building is not required for Copilot development environment setup.
2025-12-10 21:16:16 +00:00
copilot-swe-agent[bot]
44a4726258 ci: rename copilot-setup workflow to copilot-setup-steps and follow standard pattern
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 21:14:57 +00:00
copilot-swe-agent[bot]
e71a3d7693 docs: add Docker Compose examples and usage instructions
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 21:11:33 +00:00
copilot-swe-agent[bot]
992e37f9af Initial plan 2025-12-10 21:08:27 +00:00
Antoine Aflalo
a2f6805d47 Merge pull request #145 from Belphemur/copilot/add-copilot-instructions
Add GitHub Copilot instructions and setup workflow
2025-12-10 16:06:00 -05:00
copilot-swe-agent[bot]
552364f69c Fix: Use correct command name 'cbzconverter' and make workflow manual-only
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:59:54 +00:00
Antoine Aflalo
da65eeecba Merge branch 'main' into copilot/add-copilot-instructions 2025-12-10 15:49:41 -05:00
copilot-swe-agent[bot]
5d35a2e3fa Fix: Add explicit permissions block to copilot-setup workflow for security
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:49:17 +00:00
Antoine Aflalo
1568334c03 Potential fix for code scanning alert no. 5: Workflow does not contain permissions
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
2025-12-10 15:48:56 -05:00
Antoine Aflalo
31ef12bb17 Merge pull request #144 from Belphemur/copilot/fix-format-flag-crash
Fix format flag crash with space-separated values
2025-12-10 15:47:25 -05:00
copilot-swe-agent[bot]
9529004554 Add GitHub Copilot instructions and setup workflow
Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:45:23 +00:00
copilot-swe-agent[bot]
6a2efc42ac Initial plan 2025-12-10 20:39:42 +00:00
copilot-swe-agent[bot]
44e2469e34 Consolidate all common flags into flags.go
- Create individual setup functions for each common flag (quality, override, split, timeout)
- Create setupCommonFlags function that sets up all common flags in one call
- Simplify optimize_command.go and watch_command.go by using setupCommonFlags
- All flags now centralized in flags.go for better maintainability
- All tests continue to pass

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:38:58 +00:00
copilot-swe-agent[bot]
9b6a733012 Complete format flag fix with all requirements met
All changes completed and verified

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:29:34 +00:00
copilot-swe-agent[bot]
b80535d211 Fix formatting in setupTestCommand function
- Remove extra blank line after t.Helper() for Go formatting consistency

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:28:36 +00:00
copilot-swe-agent[bot]
3a2fb2a97e Add comprehensive documentation to shared functions
- Add detailed docstring to setupFormatFlag explaining parameters and usage
- Add detailed docstring to setupTestCommand explaining return values
- Use constant.DefaultConversion instead of hard-coded constant.WebP for better maintainability
- Clarify when bindViper should be true vs false

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:27:28 +00:00
copilot-swe-agent[bot]
c5de49a310 Refactor tests to use shared setupFormatFlag and helper function
- Create setupTestCommand helper function to reduce test duplication
- Update all format flag tests to use shared setupFormatFlag function
- Remove unused enumflag import from test file
- Ensures test consistency with production code
- All tests continue to pass

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:25:59 +00:00
copilot-swe-agent[bot]
cd0f056648 Eliminate code duplication with shared setupFormatFlag function
- Extract format flag setup logic into shared flags.go file
- Create setupFormatFlag function to eliminate duplication between optimize and watch commands
- Add bindViper parameter to support different flag binding strategies
- Remove duplicate enumflag imports from command files
- All tests continue to pass

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:22:58 +00:00
copilot-swe-agent[bot]
a2feca6cca Fix format flag crash and add comprehensive tests
- Remove NoOptDefVal which caused the format flag to fail with space-separated values
- Add 5 comprehensive unit tests for format flag: space syntax, short form, equals syntax, default value, and case-insensitive
- Update README with detailed format flag documentation and examples
- Format flag now works with all syntaxes: --format webp, -f webp, --format=webp
- Default value (webp) is preserved and shown in help text

Co-authored-by: Belphemur <197810+Belphemur@users.noreply.github.com>
2025-12-10 20:19:10 +00:00
copilot-swe-agent[bot]
1fa54e1936 Initial plan 2025-12-10 20:07:44 +00:00
renovate[bot]
ce8aaba165 chore(deps): update anchore/sbom-action action to v0.20.11 (#142)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-10 03:49:38 +00:00
renovate[bot]
647b139ea0 fix(deps): update golang.org/x/exp digest to 8475f28 (#141)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-09 19:54:13 +00:00
renovate[bot]
16b3ce3c9b fix(deps): update module golang.org/x/image to v0.34.0 (#140)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-08 23:57:17 +00:00
renovate[bot]
8d359aa575 fix(deps): update module github.com/spf13/cobra to v1.10.2 (#139)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-12-04 02:18:16 +00:00
renovate[bot]
97f89a51c6 fix(deps): update golang.org/x/exp digest to 87e1e73 (#137)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-25 21:51:06 +00:00
Antoine Aflalo
6840de3a89 Merge pull request #136 from Belphemur/renovate/actions-checkout-6.x 2025-11-23 20:46:44 -05:00
renovate[bot]
117b55eeaf chore(deps): update actions/checkout action to v6 2025-11-20 16:41:02 +00:00
renovate[bot]
287ae8df8b chore(deps): update anchore/sbom-action action to v0.20.10 (#135)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-17 23:47:37 +00:00
renovate[bot]
481da7c769 fix(deps): update golang.org/x/exp digest to e25ba8c (#134)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-14 02:37:56 +00:00
renovate[bot]
e269537049 fix(deps): update module golang.org/x/image to v0.33.0 (#133)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-11 23:51:57 +00:00
Antoine Aflalo
cc4829cb39 Merge pull request #132 from Belphemur/renovate/major-github-artifact-actions 2025-10-24 15:36:44 -04:00
renovate[bot]
65747d35c0 chore(deps): update actions/upload-artifact action to v5 2025-10-24 19:35:39 +00:00
renovate[bot]
eb8803302c fix(deps): update golang.org/x/exp digest to a4bb9ff (#131)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-23 20:41:51 +00:00
renovate[bot]
e60e30f5a0 chore(deps): update anchore/sbom-action action to v0.20.9 (#130)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-23 14:38:00 +00:00
renovate[bot]
7f5f690e66 fix(deps): update golang.org/x/exp digest to 90e834f (#129)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-18 04:16:25 +00:00
Antoine Aflalo
f752586432 Merge pull request #128 from Belphemur/renovate/sigstore-cosign-installer-4.x 2025-10-17 09:27:18 -04:00
renovate[bot]
9a72d64a38 chore(deps): update sigstore/cosign-installer action to v4 2025-10-17 01:13:10 +00:00
renovate[bot]
09655e225c chore(deps): update sigstore/cosign-installer action to v3.10.1 (#127)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-16 22:52:31 +00:00
renovate[bot]
90d75361a7 chore(deps): update anchore/sbom-action action to v0.20.8 (#126)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-16 16:00:36 +00:00
renovate[bot]
503fad46a6 chore(deps): update anchore/sbom-action action to v0.20.7 (#125)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-15 21:56:44 +00:00
renovate[bot]
e842b49535 fix(deps): update module github.com/mholt/archives to v0.1.5 (#124)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-13 20:42:51 +00:00
renovate[bot]
86d20e14b1 fix(deps): update golang.org/x/exp digest to d2f985d (#122)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-09 18:34:12 +00:00
renovate[bot]
7081f4aa1c fix(deps): update module golang.org/x/image to v0.32.0 (#121)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-08 18:40:16 +00:00
renovate[bot]
6d8e1e2f5e fix(deps): update module github.com/samber/lo to v1.52.0 (#120)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-08 15:59:33 +00:00
renovate[bot]
77279cb0c5 fix(deps): update golang.org/x/exp digest to 27f1f14 (#119)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-02 19:07:05 +00:00
renovate[bot]
82ab972c2e fix(deps): update module github.com/mholt/archives to v0.1.4 (#118)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-17 01:45:17 +00:00
renovate[bot]
ae754ae5d8 chore(deps): update anchore/sbom-action action to v0.20.6 (#117)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-16 14:49:14 +00:00
renovate[bot]
507d8df103 chore(deps): update sigstore/cosign-installer action to v3.10.0 (#115)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-13 23:45:35 +00:00
Antoine Aflalo
545382c887 Merge pull request #114 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to df92998
2025-09-11 08:43:10 -04:00
renovate[bot]
255b158778 fix(deps): update golang.org/x/exp digest to df92998 2025-09-11 10:32:41 +00:00
Antoine Aflalo
4f9dacdaf6 chore: remove requirements 2025-09-10 09:13:59 -04:00
Antoine Aflalo
3e62ab40e3 fix(docker): add proper annotation for multi-arch 2025-09-10 09:06:19 -04:00
Antoine Aflalo
51af843432 chore: typo fix 2025-09-10 09:02:42 -04:00
Antoine Aflalo
6b92336ba1 fix(docker): be sure we have the encoder installed in the right user folder 2025-09-10 09:00:35 -04:00
Antoine Aflalo
a6ad1dada3 fix(release): fix not having archive in the release 2025-09-10 08:49:50 -04:00
Antoine Aflalo
17fe01f27c fix(docker): remove unnecessary volume declaration and clean up bashrc setup 2025-09-09 22:58:24 -04:00
Antoine Aflalo
4fa3014d80 fix(docker): when we set the volume
To be sure we have the cache downloaded
2025-09-09 22:55:27 -04:00
Antoine Aflalo
a47af5a7a8 ci: fix cosign 2025-09-09 22:32:09 -04:00
Antoine Aflalo
d7f13132f4 ci: fix test workflow 2025-09-09 22:23:22 -04:00
Antoine Aflalo
a8587f3f1f fix(docker): have already the converter in the docker image 2025-09-09 22:21:50 -04:00
Antoine Aflalo
12817b1bff ci: full upgrade 2025-09-09 21:48:26 -04:00
Antoine Aflalo
19dcf9d40b fix(docker): missing ca-certificate 2025-09-09 21:47:47 -04:00
Antoine Aflalo
a7fa5bd0c7 ci: no need for interactive for docker image 2025-09-09 21:44:30 -04:00
Antoine Aflalo
9bde56d6c1 ci: have buildx setup 2025-09-09 21:38:11 -04:00
Antoine Aflalo
9c28923c35 ci: add verbosity 2025-09-09 21:25:28 -04:00
Antoine Aflalo
b878390b46 ci: goreleaser fixes 2025-09-09 21:17:27 -04:00
Antoine Aflalo
41ff843a80 ci(docker-sign): remove signing docker image 2025-09-09 20:53:49 -04:00
Antoine Aflalo
221945cb66 fix(docker): fix docker image 2025-09-09 20:47:46 -04:00
Antoine Aflalo
35bba7c088 ci: improve goreleaser config for docker image 2025-09-09 20:46:08 -04:00
Antoine Aflalo
b5a894deba fix(docker): fix issue with docker image not downloading the right webp converter 2025-09-09 20:36:42 -04:00
Antoine Aflalo
7ad0256b46 chore: cleanup deps 2025-09-09 20:15:35 -04:00
Antoine Aflalo
f08e8dad7b fix: releasing app 2025-09-09 19:36:05 -04:00
Antoine Aflalo
54de9bcdeb perf: use default for unlock the mutex 2025-09-09 16:59:11 -04:00
Antoine Aflalo
0a7cc506fd fix: possible race condition 2025-09-09 16:58:42 -04:00
Antoine Aflalo
fe8c5606fc chore: update deps 2025-09-09 16:49:24 -04:00
Antoine Aflalo
9a8a9693fb fix(webp): fix installing newer version of webp 2025-09-09 11:37:13 -04:00
Antoine Aflalo
7047710fdd test: no need for timeout for integration test 2025-09-09 10:13:56 -04:00
Antoine Aflalo
88786d4e53 fix(webp): fix using the right version 2025-09-09 10:12:38 -04:00
Antoine Aflalo
e0c8bf340b ci: add test preparation 2025-09-09 09:58:50 -04:00
Antoine Aflalo
36b9ddc80f fix(webp): fix issue with concurrent preparation of encoder. 2025-09-09 09:31:31 -04:00
Antoine Aflalo
a380de3fe5 ci: improve testing workflow 2025-09-09 09:19:07 -04:00
Antoine Aflalo
e47e21386f tests: update testing suite with more use cases
Actually try to convert existing chapters.
2025-09-09 09:14:40 -04:00
renovate[bot]
1b1be3a83a fix(deps): update module github.com/spf13/viper to v1.21.0 (#112)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-08 21:54:12 +00:00
Antoine Aflalo
44a919e4f3 test: add test with webp 2025-09-08 17:21:41 -04:00
renovate[bot]
1b9d83d2ff fix(deps): update module golang.org/x/image to v0.31.0 (#111)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-08 16:27:05 +00:00
Antoine Aflalo
ddc5121216 Merge pull request #110 from Belphemur/renovate/actions-setup-go-6.x
chore(deps): update actions/setup-go action to v6
2025-09-04 08:12:50 -04:00
renovate[bot]
a361f22951 chore(deps): update actions/setup-go action to v6 2025-09-04 05:07:02 +00:00
Antoine Aflalo
d245b80c65 fix: naming issue 2025-09-03 22:21:37 -04:00
Antoine Aflalo
011f7a7a7f Merge pull request #109 from Belphemur/feat/gif
Feat/gif
2025-09-03 22:19:07 -04:00
Antoine Aflalo
f159d3d0d0 fix: keep the error 2025-09-03 22:17:41 -04:00
Antoine Aflalo
ede8d62572 fix: Keep page as they are if we can't decode them and disable conversion 2025-09-03 22:15:10 -04:00
Antoine Aflalo
a151a1d4f8 tests(corruption): add test for corrupt pages 2025-09-03 21:38:21 -04:00
Antoine Aflalo
30ea3d4583 test: add test for page type 2025-09-03 21:34:39 -04:00
Antoine Aflalo
6205e3ea28 feat(gif): support gif file
See .gif file extension support and more exception handling
Fixes #105
2025-09-03 21:04:51 -04:00
Antoine Aflalo
f6bdc3cd86 Merge pull request #106 from Belphemur/dependabot/go_modules/go_modules-004c5295e3 2025-09-03 11:01:04 -04:00
Antoine Aflalo
70257a0439 Merge pull request #107 from Belphemur/renovate/actions-attest-build-provenance-3.x
chore(deps): update actions/attest-build-provenance action to v3
2025-09-03 08:40:01 -04:00
dependabot[bot]
41108514d9 chore(deps): bump github.com/ulikunitz/xz
Bumps the go_modules group with 1 update in the / directory: [github.com/ulikunitz/xz](https://github.com/ulikunitz/xz).


Updates `github.com/ulikunitz/xz` from 0.5.12 to 0.5.14
- [Commits](https://github.com/ulikunitz/xz/compare/v0.5.12...v0.5.14)

---
updated-dependencies:
- dependency-name: github.com/ulikunitz/xz
  dependency-version: 0.5.14
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 23:13:21 +00:00
renovate[bot]
7e2bb7cf90 fix(deps): update module github.com/spf13/cobra to v1.10.1 (#108)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-01 23:10:00 +00:00
renovate[bot]
8ab75421b1 chore(deps): update actions/attest-build-provenance action to v3 2025-08-31 08:55:43 +00:00
renovate[bot]
4894b14b90 fix(deps): update module github.com/stretchr/testify to v1.11.1 (#104)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-27 13:34:53 +00:00
Antoine Aflalo
9a29b6b45c fix: rollback dockerfile changes 2025-08-27 09:27:11 -04:00
Antoine Aflalo
fcc4ac57ca fix(ci): rollback to docker config instead of docker_v2 2025-08-26 23:08:08 -04:00
Antoine Aflalo
4cc33db553 fix(goreleaser): fix ci 2025-08-26 23:03:47 -04:00
Antoine Aflalo
d36c5cf0f1 fix: ci issue with goreleaser 2025-08-26 23:00:53 -04:00
Antoine Aflalo
ed70eb81cd ci: update to new setup for docker images 2025-08-26 22:59:13 -04:00
Antoine Aflalo
419edbce7b fix: ci config for goreleaser 2025-08-26 22:50:14 -04:00
Antoine Aflalo
4524e94b17 ci: fix goreleaser 2025-08-26 22:47:36 -04:00
Antoine Aflalo
c6823168af fix: add attestations 2025-08-26 22:45:35 -04:00
Antoine Aflalo
9bca0ceaf4 fix: add autocomplete defintion for log level 2025-08-26 22:39:23 -04:00
Antoine Aflalo
c2a6220fde fix(logging): fix logging parameter not taken into account 2025-08-26 22:36:23 -04:00
Antoine Aflalo
e26cf7a26a fix: test 2025-08-26 21:37:51 -04:00
Antoine Aflalo
4e5180f658 feat: add timeout option for chapter conversion to prevent hanging on problematic files
fixes #102
2025-08-26 21:34:52 -04:00
Antoine Aflalo
e7bbae1c25 chore: bump webp 2025-08-26 21:20:56 -04:00
Antoine Aflalo
32c009ed9b feat: integrate zerolog for enhanced logging across multiple components 2025-08-26 21:16:54 -04:00
Antoine Aflalo
94fb60c5c6 feat: enhance logging capabilities with zerolog integration and command-line support 2025-08-26 21:07:48 -04:00
Antoine Aflalo
dfee46812d feat: use Zerolog for logging. 2025-08-26 20:55:34 -04:00
Antoine Aflalo
d0e4037e15 Merge pull request #101 from Belphemur/dependabot/go_modules/go_modules-e1b2e84e8b 2025-08-26 20:38:53 -04:00
renovate[bot]
8539abe99e fix(deps): update module github.com/stretchr/testify to v1.11.0 (#103)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-24 17:46:08 +00:00
dependabot[bot]
f1151435e1 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.3.0 to 2.4.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.3.0...v2.4.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.4.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-08-21 15:25:11 +00:00
renovate[bot]
c6e00fda5d fix(deps): update golang.org/x/exp digest to 8b4c13b (#100)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-19 23:02:35 +00:00
renovate[bot]
2f37936a72 chore(deps): update anchore/sbom-action action to v0.20.5 (#99)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-14 18:05:44 +00:00
renovate[bot]
f0d5c254a6 fix(deps): update golang.org/x/exp digest to 42675ad (#98)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-13 16:28:31 +00:00
renovate[bot]
e35b7b3ae8 chore(deps): update dependency go to v1.25.0 (#97)
* chore(deps): update dependency go to v1.25.0

* chore: move ci/cd to 1.25

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Antoine Aflalo <197810+Belphemur@users.noreply.github.com>
2025-08-13 01:04:05 +00:00
Antoine Aflalo
43d9550e6e Merge pull request #95 from Belphemur/renovate/actions-checkout-5.x 2025-08-11 20:33:18 -04:00
renovate[bot]
e7fa06f4d3 fix(deps): update golang.org/x/exp digest to 51f8813 (#96)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-11 22:47:50 +00:00
renovate[bot]
8b48da1b25 chore(deps): update actions/checkout action to v5 2025-08-11 16:24:11 +00:00
renovate[bot]
fdcc9bf076 fix(deps): update golang.org/x/exp digest to a408d31 (#94)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-08 17:00:38 +00:00
renovate[bot]
38b9d4f1bd fix(deps): update module golang.org/x/image to v0.30.0 (#93)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-07 21:44:51 +00:00
renovate[bot]
fbc1ec7d75 chore(deps): update dependency go to v1.24.6 (#92)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-06 20:48:02 +00:00
renovate[bot]
e7b566ff63 chore(deps): update anchore/sbom-action action to v0.20.4 (#91)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-22 03:11:33 +00:00
renovate[bot]
d73d0347b1 fix(deps): update golang.org/x/exp digest to 645b1fa (#90)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-18 20:32:01 +00:00
renovate[bot]
04b9dbb2dd chore(deps): update sigstore/cosign-installer action to v3.9.2 (#89)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 22:49:46 +00:00
renovate[bot]
5d767470a8 fix(deps): update golang.org/x/exp digest to 542afb5 (#88)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 22:49:33 +00:00
renovate[bot]
473c6f40e8 fix(deps): update golang.org/x/exp digest to 6ae5c78 (#87)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 23:47:39 +00:00
renovate[bot]
403f43a417 fix(deps): update module golang.org/x/image to v0.29.0 (#86)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-10 03:11:22 +00:00
renovate[bot]
1bfe755dd9 chore(deps): update dependency go to v1.24.5 (#85)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-08 22:53:26 +00:00
renovate[bot]
3cd6a4ab1f chore(deps): update anchore/sbom-action action to v0.20.2 (#84)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-02 22:27:30 +00:00
Antoine Aflalo
117206e0ee Merge pull request #83 from Belphemur/dependabot/go_modules/go_modules-3464edad9a 2025-06-27 15:48:52 -04:00
dependabot[bot]
1e43f9d8a0 chore(deps): bump github.com/go-viper/mapstructure/v2
Bumps the go_modules group with 1 update in the / directory: [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure).


Updates `github.com/go-viper/mapstructure/v2` from 2.2.1 to 2.3.0
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.2.1...v2.3.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.3.0
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-06-27 16:49:44 +00:00
renovate[bot]
6f8b525a96 fix(deps): update module github.com/mholt/archives to v0.1.3 (#82)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-26 22:52:14 +00:00
renovate[bot]
9480cc0e36 chore(deps): update sigstore/cosign-installer action to v3.9.1 (#81)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-23 17:36:00 +00:00
renovate[bot]
a72cd3f84f fix(deps): update golang.org/x/exp digest to b7579e2 (#80)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-20 07:16:04 +00:00
renovate[bot]
a3424494cc chore(deps): update sigstore/cosign-installer action to v3.9.0 (#79)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-17 13:39:58 +00:00
renovate[bot]
85d0b8bbca chore(deps): update anchore/sbom-action action to v0.20.1 (#78)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-13 21:55:26 +00:00
Antoine Aflalo
29f7fbbc0d Merge pull request #77 from Belphemur/Belphemur/issue75 2025-06-12 09:26:34 -04:00
Antoine Aflalo
1258b06210 docs: update README to include CBR file support and clarify features 2025-06-12 09:24:02 -04:00
Antoine Aflalo
8a6ddc668e feat: enhance optimization logic for CBR/CBZ file handling and add tests 2025-06-12 09:23:00 -04:00
Antoine Aflalo
989ca2450d feat: support CBR files in optimize and watch commands
Fixes #75
2025-06-12 09:18:06 -04:00
Antoine Aflalo
970b9019df feat: load CBR files 2025-06-12 09:11:22 -04:00
renovate[bot]
a5f88fe0e9 fix(deps): update module github.com/samber/lo to v1.51.0 (#76)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-11 08:58:04 +00:00
renovate[bot]
c46700d0e5 fix(deps): update module golang.org/x/image to v0.28.0 (#74)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-07 00:59:04 +00:00
renovate[bot]
3d98fe036b chore(deps): update dependency go to v1.24.4 (#73)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-06 19:09:31 +00:00
renovate[bot]
00d7ec0ba9 fix(deps): update golang.org/x/exp digest to dcc06ee (#72)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-06 19:08:57 +00:00
renovate[bot]
8c09db9a9e fix(deps): update golang.org/x/exp digest to b6e5de4 (#71)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-31 02:07:12 +00:00
renovate[bot]
0390f1119f fix(deps): update golang.org/x/exp digest to 65e9200 (#70)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-30 18:45:36 +00:00
renovate[bot]
b62485de3b chore(deps): update anchore/sbom-action action to v0.20.0 (#69)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-14 20:21:27 +00:00
renovate[bot]
8e11eca719 chore(deps): update dependency go to v1.24.3 (#68)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-06 19:38:28 +00:00
renovate[bot]
841bdce097 fix(deps): update golang.org/x/exp digest to ce4c2cf (#67)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-06 03:55:49 +00:00
renovate[bot]
74c0954118 fix(deps): update module golang.org/x/image to v0.27.0 (#66)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-05-05 22:49:16 +00:00
renovate[bot]
7478f0b71c fix(deps): update module github.com/samber/lo to v1.50.0 (#65)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-26 18:28:12 +00:00
renovate[bot]
a03eba5400 chore(deps): update anchore/sbom-action action to v0.19.0 (#64)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-24 23:25:23 +00:00
renovate[bot]
7546e516cd chore(deps): update sigstore/cosign-installer action to v3.8.2 (#63)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-22 21:53:05 +00:00
renovate[bot]
bef7052163 fix(deps): update golang.org/x/exp digest to 7e4ce0a (#62)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-08 18:03:02 +00:00
renovate[bot]
e04b213fa4 fix(deps): update module golang.org/x/image to v0.26.0 (#61)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-06 17:53:06 +00:00
renovate[bot]
92fa3a54e7 chore(deps): update dependency go to v1.24.2 (#60)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-04-01 17:06:22 +00:00
renovate[bot]
bc92d36df2 fix(deps): update module github.com/spf13/viper to v1.20.1 (#59)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-26 19:06:44 +00:00
renovate[bot]
9863dd5d98 fix(deps): update module github.com/spf13/viper to v1.20.0 (#58)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-15 17:12:41 +00:00
renovate[bot]
ddd19292d5 fix(deps): update golang.org/x/exp digest to 054e65f (#57)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-06 02:41:33 +00:00
renovate[bot]
6a7914bd83 fix(deps): update module golang.org/x/image to v0.25.0 (#56)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-05 19:37:10 +00:00
renovate[bot]
005d2d35c3 chore(deps): update dependency go to v1.24.1 (#55)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-04 22:46:43 +00:00
renovate[bot]
abcce332e5 fix(deps): update golang.org/x/exp digest to dead583 (#54)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-28 22:01:17 +00:00
renovate[bot]
376656ba2c chore(deps): update sigstore/cosign-installer action to v3.8.1 (#53)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-20 15:55:10 +00:00
renovate[bot]
34288e6bbe fix(deps): update golang.org/x/exp digest to aa4b98e (#52)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-18 18:49:14 +00:00
renovate[bot]
d32ea3e8a9 fix(deps): update module github.com/spf13/cobra to v1.9.1 (#51)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-17 02:47:58 +00:00
renovate[bot]
23256013f5 fix(deps): update golang.org/x/exp digest to eff6e97 (#50)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-15 21:41:43 +00:00
Antoine Aflalo
c87fde31c4 chore: Removes unused import
Removes the `image/jpeg` import, as it is already imported via blank identifier.
2025-02-14 10:55:39 -05:00
Antoine Aflalo
23eb43c691 fix(chapter): fix chapter conversion.
Still need to figure out the memory issues

Consolidates image conversion logic into a dedicated method.

This change streamlines the conversion process by centralizing the
setting of converted image data, extension, and size. It also
introduces a flag to track whether an image has been converted.

The old resource cleanup has been removed since it is not needed anymore.
2025-02-14 10:03:35 -05:00
Antoine Aflalo
4d3391273c ci: Sets up QEMU for cross-platform builds
Configures QEMU to enable emulation of different architectures,
allowing for cross-platform builds and testing in the release workflow.
2025-02-13 20:18:14 -05:00
Antoine Aflalo
2da3bae04a Updates build configuration for multi-platform support
Configures the build process to support multiple platforms (Linux, Darwin, Windows) and architectures (amd64, arm64).

Disables CGO to simplify cross-compilation.

Updates Docker image creation to produce separate images for amd64 and arm64, and creates manifest lists for `latest` and versioned tags.
2025-02-13 20:12:49 -05:00
Antoine Aflalo
a3dfec642c test: add webp converter test 2025-02-13 20:05:08 -05:00
Antoine Aflalo
0303c80feb test: fix path 2025-02-13 20:04:33 -05:00
Antoine Aflalo
efe1696bfa fix(memory): fix possible memory leak and add better tests 2025-02-13 20:02:45 -05:00
Antoine Aflalo
25cd4585b7 feat: revert to use webp executable 2025-02-13 19:47:13 -05:00
Antoine Aflalo
dd7b6a332c refactor: update import paths to use internal package 2025-02-13 19:43:18 -05:00
renovate[bot]
5428134d15 chore(deps): update dependency go to v1.24.0 (#49)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-11 20:04:36 +00:00
renovate[bot]
8d59530234 fix(deps): update golang.org/x/exp digest to 939b2ce (#48)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-10 20:23:31 +00:00
renovate[bot]
af30f34aa6 fix(deps): update golang.org/x/exp digest to f9890c6 (#47)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-07 05:00:13 +00:00
renovate[bot]
b3c412c09d chore(deps): update sigstore/cosign-installer action to v3.8.0 (#46)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-05 00:35:33 +00:00
renovate[bot]
16ba484f28 fix(deps): update module golang.org/x/image to v0.24.0 (#45)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-04 20:39:34 +00:00
renovate[bot]
2de8a81137 chore(deps): update dependency go to v1.23.6 (#44)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-04 17:36:25 +00:00
renovate[bot]
c223c9dca6 fix(deps): update golang.org/x/exp digest to e0ece0d (#43)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 21:04:20 +00:00
renovate[bot]
dcf57c7646 fix(deps): update golang.org/x/exp digest to 3edf0e9 (#42)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 17:46:34 +00:00
renovate[bot]
77e7724de2 fix(deps): update module github.com/samber/lo to v1.49.1 (#41)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 13:22:17 +00:00
renovate[bot]
ea8fd55cc2 fix(deps): update module github.com/samber/lo to v1.49.0 (#40)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-27 04:30:54 +00:00
renovate[bot]
709c53d647 fix(deps): update module github.com/pablodz/inotifywaitgo to v0.0.9 (#39)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-26 20:57:06 +00:00
renovate[bot]
919a53fec7 fix(deps): update module github.com/samber/lo to v1.48.0 (#38)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-26 10:22:50 +00:00
renovate[bot]
d3b3a73b8f chore(deps): update anchore/sbom-action action to v0.18.0 (#37)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-23 20:47:16 +00:00
renovate[bot]
188211e26d fix(deps): update golang.org/x/exp digest to 7588d65 (#36)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-06 21:12:16 +00:00
renovate[bot]
f57a88eaf4 fix(deps): update module github.com/thediveo/enumflag/v2 to v2.0.7 (#35)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-05 19:29:38 +00:00
renovate[bot]
6e6b66b5eb fix(deps): update golang.org/x/exp digest to 7d7fa50 (#34)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-03 21:05:57 +00:00
renovate[bot]
1ff1bed3cc fix(deps): update golang.org/x/exp digest to dd03c70 (#33)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-03 18:34:17 +00:00
renovate[bot]
196938718c fix(deps): update golang.org/x/exp digest to b2144cd (#32)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-17 20:15:57 +00:00
renovate[bot]
9972709d32 fix(deps): update golang.org/x/exp digest to 4a55095 (#31)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-15 18:36:01 +00:00
renovate[bot]
152fa85577 chore(deps): update anchore/sbom-action action to v0.17.9 (#30)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-13 23:01:28 +00:00
renovate[bot]
554fce5d1e fix(deps): update golang.org/x/exp digest to 1829a12 (#29)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-10 23:09:05 +00:00
renovate[bot]
25357e9ec6 fix(deps): update golang.org/x/exp digest to 1443442 (#28)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-10 17:58:23 +00:00
Antoine Aflalo
ecc561263f fix: move image to debian 2024-12-06 18:00:24 -05:00
Antoine Aflalo
fb1056e5e7 ci: remove bash completion 2024-12-06 17:40:42 -05:00
Antoine Aflalo
07bc88bb04 fix: v2 versioning 2024-12-06 17:28:09 -05:00
Antoine Aflalo
8c3665fa53 ci: fix dockerfile 2024-12-06 17:26:25 -05:00
Antoine Aflalo
8dce346997 ci: debug release 2 2024-12-06 17:22:27 -05:00
Antoine Aflalo
4646789e4e ci: debug release 2024-12-06 17:21:12 -05:00
Antoine Aflalo
22ca56c98b ci: fix building 2024-12-06 17:18:28 -05:00
Antoine Aflalo
f45a1d4ed0 ci: remove arm64 2024-12-06 17:10:52 -05:00
Antoine Aflalo
ee53fddf02 ci: fix version 2024-12-06 17:03:42 -05:00
Antoine Aflalo
f416f1ff32 feat: replace webp lib by C libwebp
Avoid having to download anything
2024-12-06 17:01:00 -05:00
renovate[bot]
969993161f fix(deps): update golang.org/x/exp digest to 43b7b7c (#27)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-05 01:57:59 +00:00
renovate[bot]
f6b41f6391 fix(deps): update module golang.org/x/image to v0.23.0 (#26)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-04 18:33:59 +00:00
renovate[bot]
0bb9e4320c chore(deps): update anchore/sbom-action action to v0.17.8 (#25)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-21 18:47:50 +00:00
Antoine Aflalo
35cfe41aa6 Merge pull request #24 from Belphemur/renovate/codecov-codecov-action-5.x
chore(deps): update codecov/codecov-action action to v5
2024-11-18 14:37:12 -05:00
renovate[bot]
021c647a6e chore(deps): update codecov/codecov-action action to v5 2024-11-14 19:14:51 +00:00
renovate[bot]
6217254305 fix(deps): update golang.org/x/exp digest to 2d47ceb (#23)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 23:01:56 +00:00
renovate[bot]
0ad711a24d fix(deps): update golang.org/x/exp digest to 04b2079 (#22)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 19:25:44 +00:00
renovate[bot]
f24e4cc26e fix(deps): update module golang.org/x/image to v0.22.0 (#21)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 00:06:54 +00:00
Antoine Aflalo
1d3a8396f2 Merge pull request #20 from Belphemur/renovate/anchore-sbom-action-0.x 2024-11-05 09:49:07 -05:00
renovate[bot]
497f206c50 chore(deps): update anchore/sbom-action action to v0.17.7 2024-11-05 14:37:06 +00:00
renovate[bot]
9ade876952 chore(deps): update anchore/sbom-action action to v0.17.6 (#19)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-29 16:46:36 +00:00
Antoine Aflalo
103d38c74b Merge pull request #18 from Belphemur/renovate/anchore-sbom-action-0.x 2024-10-22 10:23:05 -04:00
renovate[bot]
80a1afe7c3 chore(deps): update anchore/sbom-action action to v0.17.5 2024-10-21 20:34:41 +00:00
Antoine Aflalo
2de7bc7a04 Merge pull request #17 from Belphemur/renovate/anchore-sbom-action-0.x 2024-10-15 14:29:56 -04:00
renovate[bot]
bccf7a7029 chore(deps): update anchore/sbom-action action to v0.17.4 2024-10-15 17:14:00 +00:00
renovate[bot]
4e80ddfb3a chore(deps): update anchore/sbom-action action to v0.17.3 (#16)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-12 00:29:35 +00:00
renovate[bot]
090bbac593 fix(deps): update golang.org/x/exp digest to f66d83c (#15)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-09 22:19:26 +00:00
renovate[bot]
c8b0f11784 fix(deps): update golang.org/x/exp digest to 225e2ab (#14)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-05 00:22:41 +00:00
Antoine Aflalo
449b57b14e Merge pull request #13 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.21.0
2024-10-04 14:25:05 -04:00
renovate[bot]
0dcb55f06d fix(deps): update module golang.org/x/image to v0.21.0 2024-10-04 16:40:30 +00:00
Antoine Aflalo
33ae460caf Merge pull request #12 from Belphemur/renovate/sigstore-cosign-installer-3.x
chore(deps): update sigstore/cosign-installer action to v3.7.0
2024-10-04 09:57:48 -04:00
renovate[bot]
1af484aea8 chore(deps): update sigstore/cosign-installer action to v3.7.0 2024-10-04 13:31:05 +00:00
Antoine Aflalo
e798a59a43 perf: fix any unhandled errors 2024-09-10 15:10:40 -04:00
Antoine Aflalo
72086d658e refactor: clean up 2024-09-10 13:58:52 -04:00
Antoine Aflalo
a7bca7ee05 ci(qodana): add qodana 2024-09-10 13:54:08 -04:00
Antoine Aflalo
ba82003b53 Merge pull request #11 from Belphemur/renovate
perf(error): better deal with deferred errors
2024-09-09 14:47:12 -04:00
Antoine Aflalo
5f7e7de644 ci(tests): fix possible error with tests 2024-09-09 14:45:30 -04:00
Antoine Aflalo
5b183cca29 perf(error): better deal with deferred errors 2024-09-09 14:45:30 -04:00
Antoine Aflalo
d901be14fa Merge pull request #9 from Belphemur/renovatebot
ci(renovate): auto merge digest
2024-09-09 14:11:56 -04:00
Antoine Aflalo
a80997835a chore(deps): update deps 2024-09-09 14:09:39 -04:00
Antoine Aflalo
37bb12fd61 ci(renovate): auto merge digest 2024-09-09 14:09:23 -04:00
Antoine Aflalo
c19afb9f40 Merge pull request #7 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to 701f63a
2024-09-09 14:06:57 -04:00
renovate[bot]
911e1041ff fix(deps): update golang.org/x/exp digest to 701f63a 2024-09-09 18:06:01 +00:00
Antoine Aflalo
a10d589b67 Merge pull request #8 from Belphemur/fix-ci
ci: always generate and upload test results
2024-09-09 14:05:05 -04:00
Antoine Aflalo
da508fcb3f ci: always generate and upload test results 2024-09-09 14:03:28 -04:00
Antoine Aflalo
57f5282032 ci(renovate): add automerge 2024-09-09 09:27:33 -04:00
Antoine Aflalo
d4f8d8b5ff ci(test): fix the report xml file 2024-09-09 09:25:46 -04:00
Antoine Aflalo
1b026b9dbd fix(watch): add missing split option in log 2024-09-09 09:11:45 -04:00
Antoine Aflalo
12cc8d4e25 Merge pull request #6 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to e7e105d
2024-09-06 17:10:42 -04:00
renovate[bot]
3442b2a845 fix(deps): update golang.org/x/exp digest to e7e105d 2024-09-06 21:08:46 +00:00
Antoine Aflalo
b9a1fb213a Merge pull request #5 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.20.0
2024-09-06 17:07:40 -04:00
renovate[bot]
278ee130e3 fix(deps): update module golang.org/x/image to v0.20.0 2024-09-04 19:30:08 +00:00
Antoine Aflalo
5357ece2b7 perf: use comment of the zip to know if it's converted instead of txt file 2024-08-29 09:38:39 -04:00
Antoine Aflalo
dbef43d376 fix(watch): fix watch command not using proper path 2024-08-28 15:06:47 -04:00
Antoine Aflalo
7c63ea49c0 fix(docker): fix docker image config folder 2024-08-28 14:36:14 -04:00
Antoine Aflalo
8a067939af Merge pull request #4 from Belphemur/renovate/major-github-artifact-actions
chore(deps): update actions/upload-artifact action to v4
2024-08-28 14:24:57 -04:00
Antoine Aflalo
f89974ac79 ci: Another attempt at reducing 2024-08-28 14:22:25 -04:00
Antoine Aflalo
ce365a6bdf ci: reduce size of page to pass tests
Fix failing test
2024-08-28 14:16:51 -04:00
renovate[bot]
9e61ff4634 chore(deps): update actions/upload-artifact action to v4 2024-08-28 17:56:03 +00:00
Antoine Aflalo
63a1b592c3 ci: add test result to pipeline 2024-08-28 13:55:33 -04:00
Antoine Aflalo
673484692b perf(webp): improve the error message for page too tall 2024-08-28 13:52:27 -04:00
Antoine Aflalo
ad35e2655f feat(webp): add partial success to conversion
So we only keep images that couldn't be optimized and return the chapter
2024-08-28 13:49:14 -04:00
Antoine Aflalo
d7f55fa886 fix(webp): improve error message in page not convertible 2024-08-28 12:09:40 -04:00
Antoine Aflalo
62638517e4 test: improve testing suite for expected failure 2024-08-28 12:03:33 -04:00
Antoine Aflalo
dbf7f6c262 fix(webp): be sure we split big page when requested 2024-08-28 11:55:53 -04:00
Antoine Aflalo
9ecd5ff3a5 fix(webp): fix the actual maximum limit 2024-08-28 11:53:26 -04:00
Antoine Aflalo
a63d2395f0 fix(webp): better handling of error for page too big for webp 2024-08-28 11:51:06 -04:00
Antoine Aflalo
839ad9ed9d fix(cbz): make pages be the first in the cbz by only be number 2024-08-28 09:16:19 -04:00
Antoine Aflalo
c8879349e1 feat(split): Make the split configurable for the watch command 2024-08-28 09:10:08 -04:00
Antoine Aflalo
5ac59a93c5 feat(split): Make the split configurable for the optimize command 2024-08-28 09:06:49 -04:00
58 changed files with 5227 additions and 1294 deletions

309
.github/copilot-instructions.md vendored Normal file
View File

@@ -0,0 +1,309 @@
# CBZOptimizer - GitHub Copilot Instructions
## Project Overview
CBZOptimizer is a Go-based command-line tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to modern formats (primarily WebP) with configurable quality settings. The tool reduces the size of comic book archives while maintaining acceptable image quality.
**Key Features:**
- Convert CBZ/CBR files to optimized CBZ format
- WebP image encoding with quality control
- Parallel chapter processing
- File watching for automatic optimization
- Optional page splitting for large images
- Timeout handling for problematic files
## Technology Stack
- **Language:** Go 1.25+
- **CLI Framework:** Cobra + Viper
- **Logging:** zerolog (structured logging)
- **Image Processing:** go-webpbin/v2 for WebP encoding
- **Archive Handling:** mholt/archives for CBZ/CBR processing
- **Testing:** testify + gotestsum
## Project Structure
```
.
├── cmd/
│ ├── cbzoptimizer/ # Main CLI application
│ │ ├── commands/ # Cobra commands (optimize, watch)
│ │ └── main.go # Entry point
│ └── encoder-setup/ # WebP encoder setup utility
│ └── main.go # Encoder initialization (build tag: encoder_setup)
├── internal/
│ ├── cbz/ # CBZ/CBR file operations
│ │ ├── cbz_loader.go # Load and parse comic archives
│ │ └── cbz_creator.go # Create optimized archives
│ ├── manga/ # Domain models
│ │ ├── chapter.go # Chapter representation
│ │ ├── page.go # Page image handling
│ │ └── page_container.go # Page collection management
│ └── utils/ # Utility functions
│ ├── optimize.go # Core optimization logic
│ └── errs/ # Error handling utilities
└── pkg/
└── converter/ # Image conversion abstractions
├── converter.go # Converter interface
├── webp/ # WebP implementation
│ ├── webp_converter.go # WebP conversion logic
│ └── webp_provider.go # WebP encoder provider
├── errors/ # Conversion error types
└── constant/ # Shared constants
```
## Building and Testing
### Prerequisites
Before building or testing, the WebP encoder must be set up:
```bash
# Build the encoder-setup utility
go build -tags encoder_setup -o encoder-setup ./cmd/encoder-setup
# Run encoder setup (downloads and configures libwebp 1.6.0)
./encoder-setup
```
This step is **required** before running tests or building the main application.
### Build Commands
```bash
# Build the main application
go build -o cbzconverter ./cmd/cbzoptimizer
# Build with version information
go build -ldflags "-s -w -X main.version=1.0.0 -X main.commit=abc123 -X main.date=2024-01-01" -o cbzconverter ./cmd/cbzoptimizer
```
### Testing
```bash
# Install test runner
go install gotest.tools/gotestsum@latest
# Run all tests with coverage
gotestsum --format testname -- -race -coverprofile=coverage.txt -covermode=atomic ./...
# Run specific package tests
go test -v ./internal/cbz/...
go test -v ./pkg/converter/...
# Run integration tests
go test -v ./internal/utils/...
```
### Linting
```bash
# Install golangci-lint if not available
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
# Run linter
golangci-lint run
```
## Code Conventions
### Go Style
- **Follow standard Go conventions:** Use `gofmt` and `goimports`
- **Package naming:** Short, lowercase, single-word names
- **Error handling:** Always check errors explicitly; use structured error wrapping with `fmt.Errorf("context: %w", err)`
- **Context usage:** Pass `context.Context` as first parameter for operations that may be cancelled
### Logging
Use **zerolog** for all logging:
```go
import "github.com/rs/zerolog/log"
// Info level with structured fields
log.Info().Str("file", path).Int("pages", count).Msg("Processing file")
// Debug level for detailed diagnostics
log.Debug().Str("file", path).Uint8("quality", quality).Msg("Optimization parameters")
// Error level with error wrapping
log.Error().Str("file", path).Err(err).Msg("Failed to load chapter")
```
**Log Levels (in order of verbosity):**
- `panic` - System panic conditions
- `fatal` - Fatal errors requiring exit
- `error` - Error conditions
- `warn` - Warning conditions
- `info` - General information (default)
- `debug` - Debug-level messages
- `trace` - Trace-level messages
### Error Handling
- Use the custom `errs` package for deferred error handling:
```go
import "github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
func processFile() (err error) {
defer errs.Wrap(&err, "failed to process file")
// ... implementation
}
```
- Define custom error types in `pkg/converter/errors/` for specific error conditions
- Always provide context when wrapping errors
### Testing
- Use **testify** for assertions:
```go
import "github.com/stretchr/testify/assert"
func TestSomething(t *testing.T) {
result, err := DoSomething()
assert.NoError(t, err)
assert.Equal(t, expected, result)
}
```
- Use table-driven tests for multiple scenarios:
```go
testCases := []struct {
name string
input string
expected string
expectError bool
}{
{"case1", "input1", "output1", false},
{"case2", "input2", "output2", true},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// test implementation
})
}
```
- Integration tests should be in `*_integration_test.go` files
- Use temporary directories for file operations in tests
### Command Structure (Cobra)
- Commands are in `cmd/cbzoptimizer/commands/`
- Each command is in its own file (e.g., `optimize_command.go`, `watch_command.go`)
- Use Cobra's persistent flags for global options
- Use Viper for configuration management
### Dependencies
**Key external packages:**
- `github.com/belphemur/go-webpbin/v2` - WebP encoding (libwebp wrapper)
- `github.com/mholt/archives` - Archive format handling
- `github.com/spf13/cobra` - CLI framework
- `github.com/spf13/viper` - Configuration management
- `github.com/rs/zerolog` - Structured logging
- `github.com/oliamb/cutter` - Image cropping for page splitting
- `golang.org/x/image` - Extended image format support
## Docker Considerations
The Dockerfile uses a multi-stage build and requires:
1. The compiled `CBZOptimizer` binary (from goreleaser)
2. The `encoder-setup` binary (built with `-tags encoder_setup`)
3. The encoder-setup is run during image build to configure WebP encoder
The encoder must be set up in the container before the application runs.
## Common Tasks
### Adding a New Command
1. Create `cmd/cbzoptimizer/commands/newcommand_command.go`
2. Define the command using Cobra:
```go
var newCmd = &cobra.Command{
Use: "new",
Short: "Description",
RunE: func(cmd *cobra.Command, args []string) error {
// implementation
},
}
func init() {
rootCmd.AddCommand(newCmd)
}
```
3. Add tests in `newcommand_command_test.go`
### Adding a New Image Format Converter
1. Create a new package under `pkg/converter/` (e.g., `avif/`)
2. Implement the `Converter` interface from `pkg/converter/converter.go`
3. Add tests following existing patterns in `pkg/converter/webp/`
4. Update command flags to support the new format
### Modifying Optimization Logic
The core optimization logic is in `internal/utils/optimize.go`:
- Uses the `OptimizeOptions` struct for parameters
- Handles chapter loading, conversion, and saving
- Implements timeout handling with context
- Provides structured logging at each step
## CI/CD
### GitHub Actions Workflows
1. **test.yml** - Runs on every push/PR
- Sets up Go environment
- Runs encoder-setup
- Executes tests with coverage
- Uploads results to Codecov
2. **release.yml** - Runs on version tags
- Uses goreleaser for multi-platform builds
- Builds Docker images for linux/amd64 and linux/arm64
- Signs releases with cosign
- Generates SBOMs with syft
3. **qodana.yml** - Code quality analysis
### Release Process
Releases are automated via goreleaser:
- Tag format: `v*` (e.g., `v2.1.0`)
- Builds for: linux, darwin, windows (amd64, arm64)
- Creates Docker images and pushes to ghcr.io
- Generates checksums and SBOMs
## Performance Considerations
- **Parallelism:** Use `--parallelism` flag to control concurrent chapter processing
- **Memory:** Large images are processed in-memory; consider system RAM when setting parallelism
- **Timeouts:** Use `--timeout` flag to prevent hanging on problematic files
- **WebP Quality:** Balance quality (0-100) vs file size; default is 85
## Security
- No credentials or secrets should be committed
- Archive extraction includes path traversal protection
- File permissions are preserved during operations
- Docker images run as non-root user (`abc`, UID 99)
## Additional Notes
- CBR files are always converted to CBZ format (RAR is read-only)
- The `--override` flag deletes the original file after successful conversion
- Page splitting is useful for double-page spreads or very tall images
- Watch mode uses inotify on Linux for efficient file monitoring
- Bash completion is available via `cbzconverter completion bash`
## Getting Help
- Use `--help` flag for command documentation
- Use `--log debug` for detailed diagnostic output
- Check GitHub Issues for known problems
- Review test files for usage examples

View File

@@ -0,0 +1,69 @@
name: Copilot Setup Steps
permissions:
contents: read
on:
workflow_dispatch:
jobs:
copilot-setup-steps:
name: Setup Go and gopls
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Set up Go
uses: actions/setup-go@v6
with:
go-version-file: go.mod
cache: true
- name: Verify Go installation
run: |
go version
go env
- name: Install gopls
run: |
go install golang.org/x/tools/gopls@latest
- name: Verify gopls installation
run: |
gopls version
- name: Install golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: latest
- name: Download Go dependencies
run: |
go mod download
go mod verify
- name: Build encoder-setup utility
run: |
go build -tags encoder_setup -o encoder-setup ./cmd/encoder-setup
ls -lh encoder-setup
- name: Run encoder-setup
run: |
./encoder-setup
- name: Install gotestsum
run: |
go install gotest.tools/gotestsum@latest
- name: Verify gotestsum installation
run: |
gotestsum --version
- name: Setup complete
run: |
echo "✅ Go environment setup complete"
echo "✅ gopls (Go language server) installed"
echo "✅ golangci-lint installed"
echo "✅ Dependencies downloaded and verified"
echo "✅ WebP encoder configured (libwebp 1.6.0)"
echo "✅ gotestsum (test runner) installed"

23
.github/workflows/qodana.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: Qodana
on:
workflow_dispatch:
pull_request:
push:
branches:
jobs:
qodana:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
checks: write
steps:
- uses: actions/checkout@v6
with:
ref: ${{ github.event.pull_request.head.sha }} # to check out the actual pull request commit, not the merge commit
fetch-depth: 0 # a full history is required for pull request analysis
- name: 'Qodana Scan'
uses: JetBrains/qodana-action@v2024.1
env:
QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }}

View File

@@ -8,34 +8,52 @@ name: release
on:
push:
tags:
- 'v*'
- "v*"
permissions:
contents: write # needed to write releases
id-token: write # needed for keyless signing
packages: write # needed for ghcr access
attestations: write # needed for attestations
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v6
with:
fetch-depth: 0 # this is important, otherwise it won't checkout the full tree (i.e. no previous tags)
- uses: actions/setup-go@v5
- name: Set up Go
uses: actions/setup-go@v6
with:
go-version: 1.23
go-version-file: go.mod
cache: true
- uses: sigstore/cosign-installer@v3.6.0 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.17.2 # installs syft
- uses: docker/login-action@v3 # login to ghcr
- name: Install Syft
uses: anchore/sbom-action/download-syft@v0.21.1 # installs syft
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GHCR
uses: docker/login-action@v3 # login to ghcr
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: goreleaser/goreleaser-action@v6 # run goreleaser
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v6 # run goreleaser
with:
version: latest
args: release --clean
version: nightly
args: release --clean --verbose
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# After GoReleaser runs, attest all the files in ./dist/checksums.txt:
- name: Attest Build Provenance for Archives
uses: actions/attest-build-provenance@v3
with:
subject-checksums: ./dist/checksums.txt
# After GoReleaser runs, attest all the images in ./dist/digests.txt:
- name: Attest Build Provenance for Docker Images
uses: actions/attest-build-provenance@v3
with:
subject-checksums: ./dist/digests.txt

View File

@@ -6,35 +6,46 @@ on:
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- uses: actions/checkout@v6
- name: Set up Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version: '1.23'
go-version-file: go.mod
cache: true
- name: Install gotestsum
run: go install gotest.tools/gotestsum@latest
- name: Install dependencies
run: go mod tidy
- name: Install Junit reporter
- name: Setup test environment
run: |
wget https://github.com/jstemmer/go-junit-report/releases/download/v2.1.0/go-junit-report-v2.1.0-linux-amd64.tar.gz && \
tar -xzf go-junit-report-v2.1.0-linux-amd64.tar.gz && \
chmod +x go-junit-report && \
mv go-junit-report /usr/local/bin/
go build -tags encoder_setup -o encoder-setup ./cmd/encoder-setup
./encoder-setup
- name: Run tests
run: go test -v 2>&1 ./... -coverprofile=coverage.txt | go-junit-report -set-exit-code > junit.xml
- name: Upload results to Codecov
uses: codecov/codecov-action@v4
run: |
mkdir -p test-results
gotestsum --junitfile test-results/junit.xml --format testname -- -race -coverprofile=coverage.txt -covermode=atomic ./...
- name: Upload test result artifact
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v6
with:
token: ${{ secrets.CODECOV_TOKEN }}
name: test-results
path: |
test-results/junit.xml
test-results/coverage.txt
retention-days: 7
- name: Upload test results to Codecov
if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: test-results/junit.xml
- name: Upload coverage reports to Codecov
if: ${{ !cancelled() }}
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}

1
.gitignore vendored
View File

@@ -103,3 +103,4 @@ fabric.properties
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
*__debug_bin*

View File

@@ -1,17 +1,40 @@
# .goreleaser.yml
version: 2
project_name: CBZOptimizer
# Configures the release process on GitHub
# https://goreleaser.com/customization/release/
release:
github:
owner: belphemur
name: CBZOptimizer
include_meta: true
# draft: false # Default is false
# prerelease: auto # Default is auto
# mode: replace # Default is append
# Configures the binary archive generation
# https://goreleaser.com/customization/archive/
archives:
- ids:
- cbzoptimizer
formats: ["tar.zst"]
format_overrides:
- # Which GOOS to override the format for.
goos: windows
formats: ["zip"] # Plural form, multiple formats. Since: v2.6
# Configures the changelog generation
# https://goreleaser.com/customization/changelog/
changelog:
use: github
format: "{{.SHA}}: {{.Message}} (@{{.AuthorUsername}})"
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
- '^chore:'
- "^docs:"
- "^test:"
- "^chore:"
groups:
- title: Features
regexp: '^.*?feat(\([[:word:]]+\))??!?:.+$'
@@ -22,9 +45,41 @@ changelog:
- title: "Performance"
regexp: '^.*?perf(\([[:word:]]+\))??!?:.+$'
order: 2
# Hooks to run before the build process starts
# https://goreleaser.com/customization/hooks/
before:
hooks:
- go mod tidy
- go generate ./...
# Configures the Go build process
# https://goreleaser.com/customization/build/
builds:
- id: cbzoptimizer
main: main.go
main: cmd/cbzoptimizer/main.go
goos:
- linux
- darwin
- windows
goarch:
- amd64
- arm64
ignore:
- goos: windows
goarch: arm64
# ensures mod timestamp to be the commit timestamp
mod_timestamp: "{{ .CommitTimestamp }}"
flags:
# trims path
- -trimpath
ldflags:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }}
env:
- CGO_ENABLED=0
- id: encoder-setup
main: cmd/encoder-setup/main.go
binary: encoder-setup
goos:
- linux
goarch:
@@ -35,67 +90,67 @@ builds:
flags:
# trims path
- -trimpath
tags:
- encoder_setup
ldflags:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }}
env:
- CGO_ENABLED=0
# config the checksum filename
# https://goreleaser.com/customization/checksum
# Configures the checksum file generation
# https://goreleaser.com/customization/checksum/
checksum:
name_template: "checksums.txt"
# create a source tarball
# Change the digests filename for attestation
# https://goreleaser.com/customization/docker_digest/
docker_digest:
name_template: "digests.txt"
# Creates a source code archive (tar.gz and zip)
# https://goreleaser.com/customization/source/
source:
enabled: true
# proxies from the go mod proxy before building
# https://goreleaser.com/customization/gomod
# Configures Go Modules settings
# https://goreleaser.com/customization/gomod/
gomod:
proxy: true
# creates SBOMs of all archives and the source tarball using syft
# https://goreleaser.com/customization/sbom
# Creates SBOMs (Software Bill of Materials)
# https://goreleaser.com/customization/sbom/
sboms:
- artifacts: archive
- id: source # Two different sbom configurations need two different IDs
artifacts: source
# create a docker image
# https://goreleaser.com/customization/docker
dockers:
- image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}"
dockerfile: Dockerfile
build_flag_templates:
- "--pull"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source={{.GitURL}}"
# signs the checksum file
# all files (including the sboms) are included in the checksum, so we don't need to sign each one if we don't want to
# https://goreleaser.com/customization/sign
signs:
- cmd: cosign
env:
- COSIGN_EXPERIMENTAL=1
certificate: "${artifact}.pem"
args:
- sign-blob
- "--output-certificate=${certificate}"
- "--output-signature=${signature}"
- "${artifact}"
- "--yes" # needed on cosign 2.0.0+
artifacts: checksum
output: true
# signs our docker image
# https://goreleaser.com/customization/docker_sign
docker_signs:
- cmd: cosign
env:
- COSIGN_EXPERIMENTAL=1
artifacts: images
output: true
args:
- "sign"
- "${artifact}"
- "--yes" # needed on cosign 2.0.0+
- id: archive # Default ID for archive SBOMs
artifacts: archive # Generate SBOMs for binary archives using Syft
- id: source # Unique ID for source SBOM
artifacts: source # Generate SBOM for the source code archive
# Creates Docker images and pushes them to registries using Docker v2 API
# https://goreleaser.com/customization/docker/
dockers_v2:
- id: cbzoptimizer-image
ids:
- cbzoptimizer
- encoder-setup
platforms:
- linux/amd64
- linux/arm64
images:
- "ghcr.io/belphemur/cbzoptimizer"
tags:
- "{{ .Version }}"
- latest
annotations:
"org.opencontainers.image.description": "CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality."
"org.opencontainers.image.created": "{{.Date}}"
"org.opencontainers.image.name": "{{.ProjectName}}"
"org.opencontainers.image.revision": "{{.FullCommit}}"
"org.opencontainers.image.version": "{{.Version}}"
"org.opencontainers.image.source": "{{.GitURL}}"
labels:
"org.opencontainers.image.created": "{{.Date}}"
"org.opencontainers.image.name": "{{.ProjectName}}"
"org.opencontainers.image.revision": "{{.FullCommit}}"
"org.opencontainers.image.version": "{{.Version}}"
"org.opencontainers.image.source": "{{.GitURL}}"
"org.opencontainers.image.description": "CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality."

View File

@@ -0,0 +1,10 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="GoDfaErrorMayBeNotNil" enabled="true" level="WARNING" enabled_by_default="true">
<methods>
<method importPath="github.com/belphemur/CBZOptimizer/converter" receiver="Converter" name="ConvertChapter" />
</methods>
</inspection_tool>
</profile>
</component>

24
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch file",
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${file}"
},
{
"name": "Optimize Testdata",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/cmd/cbzoptimizer",
"args": ["optimize", "${workspaceFolder}/testdata", "-l", "debug"],
"cwd": "${workspaceFolder}"
}
]
}

View File

@@ -1,21 +1,44 @@
FROM alpine:latest
FROM debian:trixie-slim
LABEL authors="Belphemur"
ARG TARGETPLATFORM
ARG APP_PATH=/usr/local/bin/CBZOptimizer
ENV USER=abc
ENV CONFIG_FOLDER=/config
ENV PUID=99
RUN mkdir -p "${CONFIG_FOLDER}" && adduser \
--disabled-password \
--gecos "" \
--home "$(pwd)" \
--ingroup "users" \
--no-create-home \
ENV DEBIAN_FRONTEND=noninteractive
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update && apt-get install -y --no-install-recommends adduser && \
addgroup --system users && \
adduser \
--system \
--home "${CONFIG_FOLDER}" \
--uid "${PUID}" \
--ingroup users \
--disabled-password \
"${USER}" && \
chown ${PUID}:${GUID} "${CONFIG_FOLDER}"
apt-get purge -y --auto-remove adduser
COPY CBZOptimizer /usr/local/bin/CBZOptimizer
COPY ${TARGETPLATFORM}/CBZOptimizer ${APP_PATH}
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update && \
apt-get full-upgrade -y && \
apt-get install -y --no-install-recommends \
inotify-tools \
bash \
ca-certificates \
bash-completion && \
chmod +x ${APP_PATH} && \
${APP_PATH} completion bash > /etc/bash_completion.d/CBZOptimizer.bash
RUN apk add --no-cache inotify-tools bash-completion && chmod +x /usr/local/bin/CBZOptimizer && /usr/local/bin/CBZOptimizer completion bash > /etc/bash_completion.d/CBZOptimizer
USER ${USER}
# Need to run as the user to have the right config folder created
RUN --mount=type=bind,source=${TARGETPLATFORM},target=/tmp/target \
/tmp/target/encoder-setup
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]

239
README.md
View File

@@ -1,62 +1,249 @@
# CBZOptimizer
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) and CBR (Comic Book RAR) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
**Note**: CBR files are supported as input but are always converted to CBZ format for output.
## Features
- Convert images within CBZ files to different formats (e.g., WebP).
- Convert images within CBZ and CBR files to different formats (e.g., WebP).
- Support for multiple archive formats including CBZ and CBR (CBR files are converted to CBZ format).
- Adjust the quality of the converted images.
- Process multiple chapters in parallel.
- Option to override the original CBZ files.
- Option to override the original files (CBR files are converted to CBZ and original CBR is deleted).
- Watch a folder for new CBZ/CBR files and optimize them automatically.
- Set time limits for chapter conversion to avoid hanging on problematic files.
## Installation
1. Clone the repository:
```sh
git clone https://github.com/belphemur/CBZOptimizer.git
cd CBZOptimizer
```
### Download Binary
2. Install dependencies:
```sh
go mod tidy
```
Download the latest release from [GitHub Releases](https://github.com/belphemur/CBZOptimizer/releases).
### Docker
Pull the Docker image:
```sh
docker pull ghcr.io/belphemur/cbzoptimizer:latest
```
## Usage
### Command Line Interface
The tool provides a CLI command to optimize CBZ files. Below is an example of how to use it:
The tool provides CLI commands to optimize and watch CBZ/CBR files. Below are examples of how to use them:
#### Optimize Command
Optimize all CBZ/CBR files in a folder recursively:
```sh
go run main.go optimize --quality 85 --parallelism 2 --override /path/to/cbz/files
cbzconverter optimize [folder] --quality 85 --parallelism 2 --override --format webp --split
```
The format flag can be specified in multiple ways:
```sh
# Using space-separated syntax
cbzconverter optimize [folder] --format webp
# Using short form with space
cbzconverter optimize [folder] -f webp
# Using equals syntax
cbzconverter optimize [folder] --format=webp
# Format is case-insensitive
cbzconverter optimize [folder] --format WEBP
```
With timeout to avoid hanging on problematic chapters:
```sh
cbzconverter optimize [folder] --timeout 10m --quality 85
```
Or with Docker:
```sh
docker run -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics --quality 85 --parallelism 2 --override --format webp --split
```
#### Watch Command
Watch a folder for new CBZ/CBR files and optimize them automatically:
```sh
cbzconverter watch [folder] --quality 85 --override --format webp --split
```
Or with Docker:
```sh
docker run -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest watch /comics --quality 85 --override --format webp --split
```
### Flags
- `--quality`, `-q`: Quality for conversion (0-100). Default is 85.
- `--parallelism`, `-n`: Number of chapters to convert in parallel. Default is 2.
- `--override`, `-o`: Override the original CBZ files. Default is false.
- `--override`, `-o`: Override the original files. For CBZ files, overwrites the original. For CBR files, deletes the original CBR and creates a new CBZ. Default is false.
- `--split`, `-s`: Split long pages into smaller chunks. Default is false.
- `--format`, `-f`: Format to convert the images to (currently supports: webp). Default is webp.
- Can be specified as: `--format webp`, `-f webp`, or `--format=webp`
- Case-insensitive: `webp`, `WEBP`, and `WebP` are all valid
- `--timeout`, `-t`: Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout. Default is 0.
- `--log`, `-l`: Set log level; can be 'panic', 'fatal', 'error', 'warn', 'info', 'debug', or 'trace'. Default is info.
## Testing
## Logging
To run the tests, use the following command:
CBZOptimizer uses structured logging with [zerolog](https://github.com/rs/zerolog) for consistent and performant logging output.
### Log Levels
You can control the verbosity of logging using either command-line flags or environment variables:
**Command Line:**
```sh
go test ./... -v
# Set log level to debug for detailed output
cbzconverter --log debug optimize [folder]
# Set log level to error for minimal output
cbzconverter --log error optimize [folder]
```
## GitHub Actions
**Environment Variable:**
The project includes a GitHub Actions workflow to run tests on every push and pull request to the `main` branch. The workflow is defined in `.github/workflows/go.yml`.
```sh
# Set log level via environment variable
LOG_LEVEL=debug cbzconverter optimize [folder]
```
## Contributing
**Docker:**
1. Fork the repository.
2. Create a new branch (`git checkout -b feature-branch`).
3. Commit your changes (`git commit -am 'Add new feature'`).
4. Push to the branch (`git push origin feature-branch`).
5. Create a new Pull Request.
```sh
# Set log level via environment variable in Docker
docker run -e LOG_LEVEL=debug -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics
```
### Available Log Levels
- `panic`: Logs panic level messages and above
- `fatal`: Logs fatal level messages and above
- `error`: Logs error level messages and above
- `warn`: Logs warning level messages and above
- `info`: Logs info level messages and above (default)
- `debug`: Logs debug level messages and above
- `trace`: Logs all messages including trace level
### Examples
```sh
# Default info level logging
cbzconverter optimize comics/
# Debug level for troubleshooting
cbzconverter --log debug optimize comics/
# Quiet operation (only errors and above)
cbzconverter --log error optimize comics/
# Using environment variable
LOG_LEVEL=warn cbzconverter optimize comics/
# Docker with debug logging
docker run -e LOG_LEVEL=debug -v /path/to/comics:/comics ghcr.io/belphemur/cbzoptimizer:latest optimize /comics
```
## Docker Image
The official Docker image is available at: `ghcr.io/belphemur/cbzoptimizer:latest`
### Docker Compose
You can use Docker Compose to run CBZOptimizer with persistent configuration. Create a `docker-compose.yml` file:
```yaml
version: '3.8'
services:
cbzoptimizer:
image: ghcr.io/belphemur/cbzoptimizer:latest
container_name: cbzoptimizer
environment:
# Set log level (panic, fatal, error, warn, info, debug, trace)
- LOG_LEVEL=info
# User and Group ID for file permissions
- PUID=99
- PGID=100
volumes:
# Mount your comics directory
- /path/to/your/comics:/comics
# Optional: Mount a config directory for persistent settings
- ./config:/config
# Example: Optimize all comics in the /comics directory
command: optimize /comics --quality 85 --parallelism 2 --override --format webp --split
restart: unless-stopped
```
For watch mode, you can create a separate service:
```yaml
cbzoptimizer-watch:
image: ghcr.io/belphemur/cbzoptimizer:latest
container_name: cbzoptimizer-watch
environment:
- LOG_LEVEL=info
- PUID=99
- PGID=100
volumes:
- /path/to/watch/directory:/watch
- ./config:/config
# Watch for new files and automatically optimize them
command: watch /watch --quality 85 --override --format webp --split
restart: unless-stopped
```
**Important Notes:**
- Replace `/path/to/your/comics` and `/path/to/watch/directory` with your actual directory paths
- The `PUID` and `PGID` environment variables control file permissions (default: 99/100)
- The `LOG_LEVEL` environment variable sets the logging verbosity
- For one-time optimization, remove the `restart: unless-stopped` line
- Watch mode only works on Linux systems
#### Running with Docker Compose
```sh
# Start the service (one-time optimization)
docker-compose up cbzoptimizer
# Start in detached mode
docker-compose up -d cbzoptimizer
# Start watch mode service
docker-compose up -d cbzoptimizer-watch
# View logs
docker-compose logs -f cbzoptimizer
# Stop services
docker-compose down
```
## Troubleshooting
If you encounter issues:
1. Use `--log debug` for detailed logging output
2. Check that all required dependencies are installed
3. Ensure proper file permissions for input/output directories
4. For Docker usage, verify volume mounts are correct
## Support
For issues and questions, please use [GitHub Issues](https://github.com/belphemur/CBZOptimizer/issues).
## License

View File

@@ -1,90 +0,0 @@
package cbz
import (
"archive/zip"
"fmt"
"github.com/belphemur/CBZOptimizer/manga"
"os"
"time"
)
func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
// Create a new ZIP file
zipFile, err := os.Create(outputFilePath)
if err != nil {
return fmt.Errorf("failed to create .cbz file: %w", err)
}
defer zipFile.Close()
// Create a new ZIP writer
zipWriter := zip.NewWriter(zipFile)
err = zipWriter.SetComment("Created by CBZOptimizer")
if err != nil {
return err
}
defer zipWriter.Close()
// Write each page to the ZIP archive
for _, page := range chapter.Pages {
// Construct the file name for the page
var fileName string
if page.IsSplitted {
// Use the format page%03d-%02d for split pages
fileName = fmt.Sprintf("page_%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
} else {
// Use the format page%03d for non-split pages
fileName = fmt.Sprintf("page_%04d%s", page.Index, page.Extension)
}
// Create a new file in the ZIP archive
fileWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: fileName,
Method: zip.Store,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create file in .cbz: %w", err)
}
// Write the page contents to the file
_, err = fileWriter.Write(page.Contents.Bytes())
if err != nil {
return fmt.Errorf("failed to write page contents: %w", err)
}
}
// Optionally, write the ComicInfo.xml file if present
if chapter.ComicInfoXml != "" {
comicInfoWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "ComicInfo.xml",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create ComicInfo.xml in .cbz: %w", err)
}
_, err = comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
if err != nil {
return fmt.Errorf("failed to write ComicInfo.xml contents: %w", err)
}
}
if chapter.IsConverted {
convertedWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "Converted.txt",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create Converted.txt in .cbz: %w", err)
}
_, err = convertedWriter.Write([]byte(fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)))
if err != nil {
return fmt.Errorf("failed to write Converted.txt contents: %w", err)
}
}
return nil
}

View File

@@ -1,90 +0,0 @@
package cbz
import (
"archive/zip"
"bufio"
"bytes"
"fmt"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/manga"
"io"
"path/filepath"
"strings"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
// Open the .cbz file
r, err := zip.OpenReader(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open .cbz file: %w", err)
}
defer r.Close()
chapter := &manga.Chapter{
FilePath: filePath,
}
for _, f := range r.File {
if f.FileInfo().IsDir() {
continue
}
// Open the file inside the zip
rc, err := f.Open()
if err != nil {
return nil, fmt.Errorf("failed to open file inside .cbz: %w", err)
}
// Determine the file extension
ext := strings.ToLower(filepath.Ext(f.Name))
if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
} else if ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" {
textContent, err := io.ReadAll(rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read Converted.xml content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
}
} else {
// Read the file contents for page
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object
page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext,
Size: uint64(buf.Len()),
Contents: buf,
IsSplitted: false,
}
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
}
rc.Close()
}
return chapter, nil
}

View File

@@ -0,0 +1,100 @@
package commands
import (
"fmt"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
)
// setupFormatFlag sets up the format flag for a command.
//
// Parameters:
// - cmd: The Cobra command to add the format flag to
// - converterType: Pointer to the ConversionFormat variable that will store the flag value
// - bindViper: If true, binds the flag to viper for configuration file support.
// Set to true for commands that use viper for configuration (e.g., watch command),
// and false for commands that don't (e.g., optimize command).
func setupFormatFlag(cmd *cobra.Command, converterType *constant.ConversionFormat, bindViper bool) {
formatFlag := enumflag.New(converterType, "format", constant.CommandValue, enumflag.EnumCaseInsensitive)
_ = formatFlag.RegisterCompletion(cmd, "format", constant.HelpText)
cmd.Flags().VarP(
formatFlag,
"format", "f",
fmt.Sprintf("Format to convert the images to: %s", constant.ListAll()))
if bindViper {
_ = viper.BindPFlag("format", cmd.Flags().Lookup("format"))
}
}
// setupQualityFlag sets up the quality flag for a command.
//
// Parameters:
// - cmd: The Cobra command to add the quality flag to
// - defaultValue: The default quality value (0-100)
// - bindViper: If true, binds the flag to viper for configuration file support
func setupQualityFlag(cmd *cobra.Command, defaultValue uint8, bindViper bool) {
cmd.Flags().Uint8P("quality", "q", defaultValue, "Quality for conversion (0-100)")
if bindViper {
_ = viper.BindPFlag("quality", cmd.Flags().Lookup("quality"))
}
}
// setupOverrideFlag sets up the override flag for a command.
//
// Parameters:
// - cmd: The Cobra command to add the override flag to
// - defaultValue: The default override value
// - bindViper: If true, binds the flag to viper for configuration file support
func setupOverrideFlag(cmd *cobra.Command, defaultValue bool, bindViper bool) {
cmd.Flags().BoolP("override", "o", defaultValue, "Override the original CBZ/CBR files")
if bindViper {
_ = viper.BindPFlag("override", cmd.Flags().Lookup("override"))
}
}
// setupSplitFlag sets up the split flag for a command.
//
// Parameters:
// - cmd: The Cobra command to add the split flag to
// - defaultValue: The default split value
// - bindViper: If true, binds the flag to viper for configuration file support
func setupSplitFlag(cmd *cobra.Command, defaultValue bool, bindViper bool) {
cmd.Flags().BoolP("split", "s", defaultValue, "Split long pages into smaller chunks")
if bindViper {
_ = viper.BindPFlag("split", cmd.Flags().Lookup("split"))
}
}
// setupTimeoutFlag sets up the timeout flag for a command.
//
// Parameters:
// - cmd: The Cobra command to add the timeout flag to
// - bindViper: If true, binds the flag to viper for configuration file support
func setupTimeoutFlag(cmd *cobra.Command, bindViper bool) {
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
if bindViper {
_ = viper.BindPFlag("timeout", cmd.Flags().Lookup("timeout"))
}
}
// setupCommonFlags sets up all common flags for optimize and watch commands.
//
// Parameters:
// - cmd: The Cobra command to add the flags to
// - converterType: Pointer to the ConversionFormat variable that will store the format flag value
// - qualityDefault: The default quality value (0-100)
// - overrideDefault: The default override value
// - splitDefault: The default split value
// - bindViper: If true, binds all flags to viper for configuration file support
func setupCommonFlags(cmd *cobra.Command, converterType *constant.ConversionFormat, qualityDefault uint8, overrideDefault bool, splitDefault bool, bindViper bool) {
setupFormatFlag(cmd, converterType, bindViper)
setupQualityFlag(cmd, qualityDefault, bindViper)
setupOverrideFlag(cmd, overrideDefault, bindViper)
setupSplitFlag(cmd, splitDefault, bindViper)
setupTimeoutFlag(cmd, bindViper)
}

View File

@@ -0,0 +1,183 @@
package commands
import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
var converterType constant.ConversionFormat
func init() {
command := &cobra.Command{
Use: "optimize [folder]",
Short: "Optimize all CBZ/CBR files in a folder recursively",
Long: "Optimize all CBZ/CBR files in a folder recursively.\nIt will take all the different pages in the CBZ/CBR files and convert them to the given format.\nThe original CBZ/CBR files will be kept intact depending if you choose to override or not.",
RunE: ConvertCbzCommand,
Args: cobra.ExactArgs(1),
}
// Setup common flags (format, quality, override, split, timeout)
setupCommonFlags(command, &converterType, 85, false, false, false)
// Setup optimize-specific flags
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
AddCommand(command)
}
func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
log.Info().Str("command", "optimize").Msg("Starting optimize command")
path := args[0]
if path == "" {
log.Error().Msg("Path argument is required but empty")
return fmt.Errorf("path is required")
}
log.Debug().Str("input_path", path).Msg("Validating input path")
if !utils2.IsValidFolder(path) {
log.Error().Str("input_path", path).Msg("Path validation failed - not a valid folder")
return fmt.Errorf("the path needs to be a folder")
}
log.Debug().Str("input_path", path).Msg("Input path validated successfully")
log.Debug().Msg("Parsing command-line flags")
quality, err := cmd.Flags().GetUint8("quality")
if err != nil || quality <= 0 || quality > 100 {
log.Error().Err(err).Uint8("quality", quality).Msg("Invalid quality value")
return fmt.Errorf("invalid quality value")
}
log.Debug().Uint8("quality", quality).Msg("Quality parameter validated")
override, err := cmd.Flags().GetBool("override")
if err != nil {
log.Error().Err(err).Msg("Failed to parse override flag")
return fmt.Errorf("invalid quality value")
}
log.Debug().Bool("override", override).Msg("Override parameter parsed")
split, err := cmd.Flags().GetBool("split")
if err != nil {
log.Error().Err(err).Msg("Failed to parse split flag")
return fmt.Errorf("invalid split value")
}
log.Debug().Bool("split", split).Msg("Split parameter parsed")
timeout, err := cmd.Flags().GetDuration("timeout")
if err != nil {
log.Error().Err(err).Msg("Failed to parse timeout flag")
return fmt.Errorf("invalid timeout value")
}
log.Debug().Dur("timeout", timeout).Msg("Timeout parameter parsed")
parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 {
log.Error().Err(err).Int("parallelism", parallelism).Msg("Invalid parallelism value")
return fmt.Errorf("invalid parallelism value")
}
log.Debug().Int("parallelism", parallelism).Msg("Parallelism parameter validated")
log.Debug().Str("converter_format", converterType.String()).Msg("Initializing converter")
chapterConverter, err := converter.Get(converterType)
if err != nil {
log.Error().Str("converter_format", converterType.String()).Err(err).Msg("Failed to get chapter converter")
return fmt.Errorf("failed to get chapterConverter: %v", err)
}
log.Debug().Str("converter_format", converterType.String()).Msg("Converter initialized successfully")
log.Debug().Msg("Preparing converter")
err = chapterConverter.PrepareConverter()
if err != nil {
log.Error().Err(err).Msg("Failed to prepare converter")
return fmt.Errorf("failed to prepare converter: %v", err)
}
log.Debug().Msg("Converter prepared successfully")
// Channel to manage the files to process
fileChan := make(chan string)
// Slice to collect errors with mutex for thread safety
var errs []error
var errMutex sync.Mutex
// WaitGroup to wait for all goroutines to finish
var wg sync.WaitGroup
// Start worker goroutines
log.Debug().Int("worker_count", parallelism).Msg("Starting worker goroutines")
for i := 0; i < parallelism; i++ {
wg.Add(1)
go func(workerID int) {
defer wg.Done()
log.Debug().Int("worker_id", workerID).Msg("Worker started")
for path := range fileChan {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker processing file")
err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: path,
Quality: quality,
Override: override,
Split: split,
Timeout: timeout,
})
if err != nil {
log.Error().Int("worker_id", workerID).Str("file_path", path).Err(err).Msg("Worker encountered error")
errMutex.Lock()
errs = append(errs, fmt.Errorf("error processing file %s: %w", path, err))
errMutex.Unlock()
} else {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker completed file successfully")
}
}
log.Debug().Int("worker_id", workerID).Msg("Worker finished")
}(i)
}
log.Debug().Int("worker_count", parallelism).Msg("All worker goroutines started")
// Walk the path and send files to the channel
log.Debug().Str("search_path", path).Msg("Starting filesystem walk for CBZ/CBR files")
err = filepath.WalkDir(path, func(filePath string, info os.DirEntry, err error) error {
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Error during filesystem walk")
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
log.Debug().Str("file_path", filePath).Str("file_name", fileName).Msg("Found CBZ/CBR file")
fileChan <- filePath
}
}
return nil
})
if err != nil {
log.Error().Str("search_path", path).Err(err).Msg("Filesystem walk failed")
return fmt.Errorf("error walking the path: %w", err)
}
log.Debug().Str("search_path", path).Msg("Filesystem walk completed")
close(fileChan) // Close the channel to signal workers to stop
log.Debug().Msg("File channel closed, waiting for workers to complete")
wg.Wait() // Wait for all workers to finish
log.Debug().Msg("All workers completed")
if len(errs) > 0 {
log.Error().Int("error_count", len(errs)).Msg("Command completed with errors")
return fmt.Errorf("encountered errors: %v", errs)
}
log.Info().Str("search_path", path).Msg("Optimize command completed successfully")
return nil
}

View File

@@ -0,0 +1,546 @@
package commands
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra"
)
// MockConverter is a mock implementation of the Converter interface
type MockConverter struct{}
func (m *MockConverter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
chapter.IsConverted = true
chapter.ConvertedTime = time.Now()
return chapter, nil
}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error {
return nil
}
func TestConvertCbzCommand(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_cbz")
if err != nil {
log.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Locate the testdata directory
testdataDir := filepath.Join("../../../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found")
}
// Copy sample CBZ/CBR files from testdata to the temporary directory
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
return os.WriteFile(destPath, data, info.Mode())
}
}
return nil
})
if err != nil {
t.Fatalf("Failed to copy sample files: %v", err)
}
// Mock the converter.Get function
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
defer func() { converter.Get = originalGet }()
// Set up the command
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter (e.g., 30s, 5m, 1h). 0 means no timeout")
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed: %v", err)
}
// Track expected converted files for verification
expectedFiles := make(map[string]bool)
convertedFiles := make(map[string]bool)
// First pass: identify original files and expected converted filenames
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr") {
if !strings.Contains(fileName, "_converted") {
// This is an original file, determine expected converted filename
baseName := strings.TrimSuffix(info.Name(), filepath.Ext(info.Name()))
expectedConverted := baseName + "_converted.cbz"
expectedFiles[expectedConverted] = false // false means not yet found
}
}
return nil
})
if err != nil {
t.Fatalf("Error identifying original files: %v", err)
}
// Second pass: verify converted files exist and are properly converted
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fileName := info.Name()
// Check if this is a converted file (should only be .cbz, never .cbr)
if strings.HasSuffix(fileName, "_converted.cbz") {
convertedFiles[fileName] = true
expectedFiles[fileName] = true // Mark as found
t.Logf("Archive file found: %s", path)
// Load the converted chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return err
}
// Check if the chapter is marked as converted
if !chapter.IsConverted {
t.Errorf("Chapter is not marked as converted: %s", path)
}
// Check if the ConvertedTime is set
if chapter.ConvertedTime.IsZero() {
t.Errorf("ConvertedTime is not set for chapter: %s", path)
}
t.Logf("Archive file [%s] is converted: %s", path, chapter.ConvertedTime)
} else if strings.HasSuffix(fileName, "_converted.cbr") {
t.Errorf("Found incorrectly named converted file: %s (should be .cbz, not .cbr)", fileName)
}
return nil
})
if err != nil {
t.Fatalf("Error verifying converted files: %v", err)
}
// Verify all expected files were found
for expectedFile, found := range expectedFiles {
if !found {
t.Errorf("Expected converted file not found: %s", expectedFile)
}
}
// Log summary
t.Logf("Found %d converted files", len(convertedFiles))
}
// setupTestCommand creates a test command with all required flags for testing.
// It mocks the converter.Get function and sets up a complete command with all flags.
//
// Returns:
// - *cobra.Command: A configured command ready for testing
// - func(): A cleanup function that must be deferred to restore the original converter.Get
func setupTestCommand(t *testing.T) (*cobra.Command, func()) {
t.Helper()
// Mock the converter.Get function
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
cleanup := func() { converter.Get = originalGet }
// Set up the command
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 1, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter")
// Reset converterType to default before test for consistency
converterType = constant.DefaultConversion
setupFormatFlag(cmd, &converterType, false)
return cmd, cleanup
}
// TestFormatFlagWithSpace tests that the format flag works with space-separated values
func TestFormatFlagWithSpace(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_format_space")
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tempDir)
cmd, cleanup := setupTestCommand(t)
defer cleanup()
// Test with space-separated format flag (--format webp)
cmd.ParseFlags([]string{"--format", "webp"})
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed with --format webp: %v", err)
}
// Verify the format was set correctly
if converterType != constant.WebP {
t.Errorf("Expected format to be WebP, got %v", converterType)
}
}
// TestFormatFlagWithShortForm tests that the short form of format flag works with space-separated values
func TestFormatFlagWithShortForm(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_format_short")
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tempDir)
cmd, cleanup := setupTestCommand(t)
defer cleanup()
// Test with short form and space (-f webp)
cmd.ParseFlags([]string{"-f", "webp"})
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed with -f webp: %v", err)
}
// Verify the format was set correctly
if converterType != constant.WebP {
t.Errorf("Expected format to be WebP, got %v", converterType)
}
}
// TestFormatFlagWithEquals tests that the format flag works with equals syntax
func TestFormatFlagWithEquals(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_format_equals")
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tempDir)
cmd, cleanup := setupTestCommand(t)
defer cleanup()
// Test with equals syntax (--format=webp)
cmd.ParseFlags([]string{"--format=webp"})
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed with --format=webp: %v", err)
}
// Verify the format was set correctly
if converterType != constant.WebP {
t.Errorf("Expected format to be WebP, got %v", converterType)
}
}
// TestFormatFlagDefaultValue tests that the default format is used when flag is not provided
func TestFormatFlagDefaultValue(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_format_default")
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tempDir)
cmd, cleanup := setupTestCommand(t)
defer cleanup()
// Don't set format flag - should use default
cmd.ParseFlags([]string{})
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed with default format: %v", err)
}
// Verify the default format is used
if converterType != constant.DefaultConversion {
t.Errorf("Expected format to be default (%v), got %v", constant.DefaultConversion, converterType)
}
}
// TestFormatFlagCaseInsensitive tests that the format flag is case-insensitive
func TestFormatFlagCaseInsensitive(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_format_case")
if err != nil {
t.Fatalf("Failed to create temp directory: %v", err)
}
defer os.RemoveAll(tempDir)
testCases := []string{"webp", "WEBP", "WebP", "WeBp"}
for _, formatValue := range testCases {
t.Run(formatValue, func(t *testing.T) {
cmd, cleanup := setupTestCommand(t)
defer cleanup()
// Test with different case variations
cmd.ParseFlags([]string{"--format", formatValue})
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed with format '%s': %v", formatValue, err)
}
// Verify the format was set correctly
if converterType != constant.WebP {
t.Errorf("Expected format to be WebP for input '%s', got %v", formatValue, converterType)
}
})
}
}
// TestConvertCbzCommand_ManyFiles_NoDeadlock tests that processing many files in parallel
// does not cause a deadlock. This reproduces the scenario where processing
// recursive folders of CBZ files with parallelism > 1 could cause a "all goroutines are asleep - deadlock!" error.
func TestConvertCbzCommand_ManyFiles_NoDeadlock(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_cbz_many_files")
if err != nil {
log.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Locate the testdata directory
testdataDir := filepath.Join("../../../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found")
}
// Create subdirectories to simulate the recursive folder structure from the bug report
subdirs := []string{"author1/book1", "author2/book2", "author3/book3", "author4/book4"}
for _, subdir := range subdirs {
err := os.MkdirAll(filepath.Join(tempDir, subdir), 0755)
if err != nil {
t.Fatalf("Failed to create subdirectory: %v", err)
}
}
// Find a sample CBZ file to copy
var sampleCBZ string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
sampleCBZ = path
return filepath.SkipDir
}
return nil
})
if err != nil || sampleCBZ == "" {
t.Fatalf("Failed to find sample CBZ file: %v", err)
}
// Copy the sample file to multiple locations (simulating many files to process)
numFilesPerDir := 5
totalFiles := 0
for _, subdir := range subdirs {
for i := 0; i < numFilesPerDir; i++ {
destPath := filepath.Join(tempDir, subdir, fmt.Sprintf("Chapter_%d.cbz", i+1))
data, err := os.ReadFile(sampleCBZ)
if err != nil {
t.Fatalf("Failed to read sample file: %v", err)
}
err = os.WriteFile(destPath, data, 0644)
if err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
totalFiles++
}
}
t.Logf("Created %d test files across %d directories", totalFiles, len(subdirs))
// Mock the converter.Get function
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
defer func() { converter.Get = originalGet }()
// Set up the command with parallelism = 2 (same as the bug report)
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter")
converterType = constant.DefaultConversion
setupFormatFlag(cmd, &converterType, false)
// Run the command with a timeout to detect deadlocks
done := make(chan error, 1)
go func() {
done <- ConvertCbzCommand(cmd, []string{tempDir})
}()
select {
case err := <-done:
if err != nil {
t.Fatalf("Command execution failed: %v", err)
}
t.Logf("Command completed successfully without deadlock")
case <-time.After(60 * time.Second):
t.Fatal("Deadlock detected: Command did not complete within 60 seconds")
}
// Verify that converted files were created
var convertedCount int
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(info.Name(), "_converted.cbz") {
convertedCount++
}
return nil
})
if err != nil {
t.Fatalf("Error counting converted files: %v", err)
}
if convertedCount != totalFiles {
t.Errorf("Expected %d converted files, found %d", totalFiles, convertedCount)
}
t.Logf("Found %d converted files as expected", convertedCount)
}
// TestConvertCbzCommand_HighParallelism_NoDeadlock tests processing with high parallelism setting.
func TestConvertCbzCommand_HighParallelism_NoDeadlock(t *testing.T) {
// Create a temporary directory
tempDir, err := os.MkdirTemp("", "test_cbz_high_parallel")
if err != nil {
log.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Locate the testdata directory
testdataDir := filepath.Join("../../../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found")
}
// Find and copy sample CBZ files
var sampleCBZ string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
sampleCBZ = path
return filepath.SkipDir
}
return nil
})
if err != nil || sampleCBZ == "" {
t.Fatalf("Failed to find sample CBZ file: %v", err)
}
// Create many test files
numFiles := 15
for i := 0; i < numFiles; i++ {
destPath := filepath.Join(tempDir, fmt.Sprintf("test_file_%d.cbz", i+1))
data, err := os.ReadFile(sampleCBZ)
if err != nil {
t.Fatalf("Failed to read sample file: %v", err)
}
err = os.WriteFile(destPath, data, 0644)
if err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
}
// Mock the converter
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
defer func() { converter.Get = originalGet }()
// Test with high parallelism (8)
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 8, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ/CBR files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
cmd.Flags().DurationP("timeout", "t", 0, "Maximum time allowed for converting a single chapter")
converterType = constant.DefaultConversion
setupFormatFlag(cmd, &converterType, false)
done := make(chan error, 1)
go func() {
done <- ConvertCbzCommand(cmd, []string{tempDir})
}()
select {
case err := <-done:
if err != nil {
t.Fatalf("Command execution failed: %v", err)
}
case <-time.After(60 * time.Second):
t.Fatal("Deadlock detected with high parallelism")
}
}

View File

@@ -0,0 +1,135 @@
package commands
import (
"fmt"
"os"
"path/filepath"
"runtime"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
)
// Map zerolog levels to their textual representations
var LogLevelIds = map[zerolog.Level][]string{
zerolog.PanicLevel: {"panic"},
zerolog.FatalLevel: {"fatal"},
zerolog.ErrorLevel: {"error"},
zerolog.WarnLevel: {"warn", "warning"},
zerolog.InfoLevel: {"info"},
zerolog.DebugLevel: {"debug"},
zerolog.TraceLevel: {"trace"},
}
// Global log level variable with default
var logLevel zerolog.Level = zerolog.InfoLevel
var rootCmd = &cobra.Command{
Use: "cbzconverter",
Short: "Convert CBZ files using a specified converter",
}
func SetVersionInfo(version, commit, date string) {
rootCmd.Version = fmt.Sprintf("%s (Built on %s from Git SHA %s)", version, date, commit)
}
func getPath() string {
return filepath.Join(map[string]string{
"windows": filepath.Join(os.Getenv("APPDATA")),
"darwin": filepath.Join(os.Getenv("HOME"), ".config"),
"linux": filepath.Join(os.Getenv("HOME"), ".config"),
}[runtime.GOOS], "CBZOptimizer")
}
func init() {
configFolder := getPath()
viper.SetConfigName("config")
viper.SetConfigType("yaml")
viper.AddConfigPath(configFolder)
viper.SetEnvPrefix("CBZ")
viper.AutomaticEnv()
// Add log level flag (accepts zerolog levels: panic, fatal, error, warn, info, debug, trace)
ef := enumflag.New(&logLevel, "log", LogLevelIds, enumflag.EnumCaseInsensitive)
rootCmd.PersistentFlags().VarP(
ef,
"log", "l",
"Set log level; can be 'panic', 'fatal', 'error', 'warn', 'info', 'debug', or 'trace'")
ef.RegisterCompletion(rootCmd, "log", enumflag.Help[zerolog.Level]{
zerolog.PanicLevel: "Only log panic messages",
zerolog.FatalLevel: "Log fatal and panic messages",
zerolog.ErrorLevel: "Log error, fatal, and panic messages",
zerolog.WarnLevel: "Log warn, error, fatal, and panic messages",
zerolog.InfoLevel: "Log info, warn, error, fatal, and panic messages",
zerolog.DebugLevel: "Log debug, info, warn, error, fatal, and panic messages",
zerolog.TraceLevel: "Log all messages including trace",
})
// Add log level environment variable support
viper.BindEnv("log", "LOG_LEVEL")
viper.BindPFlag("log", rootCmd.PersistentFlags().Lookup("log"))
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
ConfigureLogging()
}
// Ensure the configuration directory exists
err := os.MkdirAll(configFolder, os.ModePerm)
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
if err := viper.ReadInConfig(); err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
err := viper.SafeWriteConfig()
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
} else {
panic(fmt.Errorf("fatal error config file: %w", err))
}
}
}
// Execute executes the root command.
func Execute() {
if err := rootCmd.Execute(); err != nil {
log.Fatal().Err(err).Msg("Command execution failed")
}
}
func AddCommand(cmd *cobra.Command) {
rootCmd.AddCommand(cmd)
}
// ConfigureLogging sets up zerolog based on command-line flags and environment variables
func ConfigureLogging() {
// Start with default log level (info)
level := zerolog.InfoLevel
// Check LOG_LEVEL environment variable first
envLogLevel := viper.GetString("log")
if envLogLevel != "" {
if parsedLevel, err := zerolog.ParseLevel(envLogLevel); err == nil {
level = parsedLevel
}
}
// Command-line log flag takes precedence over environment variable
// The logLevel variable will be set by the flag parsing, so if it's different from default, use it
if logLevel != zerolog.InfoLevel {
level = logLevel
}
// Set the global log level
zerolog.SetGlobalLevel(level)
// Configure console writer for readable output
log.Logger = log.Output(zerolog.ConsoleWriter{
Out: os.Stderr,
NoColor: false,
})
}

View File

@@ -1,18 +1,18 @@
package cmd
package commands
import (
"fmt"
"github.com/belphemur/CBZOptimizer/converter"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/utils"
"github.com/pablodz/inotifywaitgo/inotifywaitgo"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/thediveo/enumflag/v2"
"log"
"runtime"
"strings"
"sync"
utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/pablodz/inotifywaitgo/inotifywaitgo"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func init() {
@@ -21,26 +21,14 @@ func init() {
}
command := &cobra.Command{
Use: "watch [folder]",
Short: "Watch a folder for new CBZ files",
Long: "Watch a folder for new CBZ files.\nIt will watch a folder for new CBZ files and optimize them.",
Short: "Watch a folder for new CBZ/CBR files",
Long: "Watch a folder for new CBZ/CBR files.\nIt will watch a folder for new CBZ/CBR files and optimize them.",
RunE: WatchCommand,
Args: cobra.ExactArgs(1),
}
formatFlag := enumflag.New(&converterType, "format", constant.CommandValue, enumflag.EnumCaseInsensitive)
_ = formatFlag.RegisterCompletion(command, "format", constant.HelpText)
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
_ = viper.BindPFlag("quality", command.Flags().Lookup("quality"))
command.Flags().BoolP("override", "o", true, "Override the original CBZ files")
_ = viper.BindPFlag("override", command.Flags().Lookup("override"))
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
fmt.Sprintf("Format to convert the images to: %s", constant.ListAll()))
command.PersistentFlags().Lookup("format").NoOptDefVal = constant.DefaultConversion.String()
_ = viper.BindPFlag("format", command.PersistentFlags().Lookup("format"))
// Setup common flags (format, quality, override, split, timeout) with viper binding
setupCommonFlags(command, &converterType, 85, true, false, true)
AddCommand(command)
}
@@ -50,7 +38,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
return fmt.Errorf("path is required")
}
if !utils.IsValidFolder(path) {
if !utils2.IsValidFolder(path) {
return fmt.Errorf("the path needs to be a folder")
}
@@ -61,6 +49,10 @@ func WatchCommand(_ *cobra.Command, args []string) error {
override := viper.GetBool("override")
split := viper.GetBool("split")
timeout := viper.GetDuration("timeout")
converterType := constant.FindConversionFormat(viper.GetString("format"))
chapterConverter, err := converter.Get(converterType)
if err != nil {
@@ -71,7 +63,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err)
}
log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s]", path, override, quality, converterType.String())
log.Info().Str("path", path).Bool("override", override).Uint8("quality", quality).Str("format", converterType.String()).Bool("split", split).Msg("Watching directory")
events := make(chan inotifywaitgo.FileEvent)
errors := make(chan error)
@@ -100,16 +92,24 @@ func WatchCommand(_ *cobra.Command, args []string) error {
go func() {
defer wg.Done()
for event := range events {
log.Printf("[Event]%s, %v\n", event.Filename, event.Events)
log.Debug().Str("file", event.Filename).Interface("events", event.Events).Msg("File event")
if !strings.HasSuffix(strings.ToLower(event.Filename), ".cbz") {
filename := strings.ToLower(event.Filename)
if !strings.HasSuffix(filename, ".cbz") && !strings.HasSuffix(filename, ".cbr") {
continue
}
for _, e := range event.Events {
switch e {
case inotifywaitgo.CLOSE_WRITE, inotifywaitgo.MOVE:
err := utils.Optimize(chapterConverter, event.Filename, quality, override)
err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: event.Filename,
Quality: quality,
Override: override,
Split: split,
Timeout: timeout,
})
if err != nil {
errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err)
}
@@ -124,7 +124,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
go func() {
defer wg.Done()
for err := range errors {
log.Printf("Error: %v\n", err)
log.Error().Err(err).Msg("Watch error")
}
}()

17
cmd/cbzoptimizer/main.go Normal file
View File

@@ -0,0 +1,17 @@
package main
import (
"github.com/belphemur/CBZOptimizer/v2/cmd/cbzoptimizer/commands"
)
var (
version = "dev"
commit = "none"
date = "unknown"
)
func main() {
commands.SetVersionInfo(version, commit, date)
commands.Execute()
}

19
cmd/encoder-setup/main.go Normal file
View File

@@ -0,0 +1,19 @@
//go:build encoder_setup
// +build encoder_setup
package main
import (
"fmt"
"log"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/webp"
)
func main() {
fmt.Println("Setting up WebP encoder ...")
if err := webp.PrepareEncoder(); err != nil {
log.Fatalf("Failed to prepare WebP encoder: %v", err)
}
fmt.Println("WebP encoder setup complete.")
}

View File

@@ -1,128 +0,0 @@
package cmd
import (
"fmt"
"github.com/belphemur/CBZOptimizer/converter"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/utils"
"github.com/spf13/cobra"
"github.com/thediveo/enumflag/v2"
"os"
"path/filepath"
"strings"
"sync"
)
var converterType constant.ConversionFormat
func init() {
command := &cobra.Command{
Use: "optimize [folder]",
Short: "Optimize all CBZ files in a folder recursively",
Long: "Optimize all CBZ files in a folder recursively.\nIt will take all the different pages in the CBZ files and convert them to the given format.\nThe original CBZ files will be kept intact depending if you choose to override or not.",
RunE: ConvertCbzCommand,
Args: cobra.ExactArgs(1),
}
formatFlag := enumflag.New(&converterType, "format", constant.CommandValue, enumflag.EnumCaseInsensitive)
_ = formatFlag.RegisterCompletion(command, "format", constant.HelpText)
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
command.Flags().BoolP("override", "o", false, "Override the original CBZ files")
command.PersistentFlags().VarP(
formatFlag,
"format", "f",
fmt.Sprintf("Format to convert the images to: %s", constant.ListAll()))
command.PersistentFlags().Lookup("format").NoOptDefVal = constant.DefaultConversion.String()
AddCommand(command)
}
func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
path := args[0]
if path == "" {
return fmt.Errorf("path is required")
}
if !utils.IsValidFolder(path) {
return fmt.Errorf("the path needs to be a folder")
}
quality, err := cmd.Flags().GetUint8("quality")
if err != nil || quality <= 0 || quality > 100 {
return fmt.Errorf("invalid quality value")
}
override, err := cmd.Flags().GetBool("override")
if err != nil {
return fmt.Errorf("invalid quality value")
}
parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 {
return fmt.Errorf("invalid parallelism value")
}
chapterConverter, err := converter.Get(converterType)
if err != nil {
return fmt.Errorf("failed to get chapterConverter: %v", err)
}
err = chapterConverter.PrepareConverter()
if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err)
}
// Channel to manage the files to process
fileChan := make(chan string)
// Channel to collect errors
errorChan := make(chan error, parallelism)
// WaitGroup to wait for all goroutines to finish
var wg sync.WaitGroup
// Start worker goroutines
for i := 0; i < parallelism; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for path := range fileChan {
err := utils.Optimize(chapterConverter, path, quality, override)
if err != nil {
errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
}
}
}()
}
// Walk the path and send files to the channel
err = filepath.WalkDir(path, func(path string, info os.DirEntry, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
fileChan <- path
}
return nil
})
if err != nil {
return fmt.Errorf("error walking the path: %w", err)
}
close(fileChan) // Close the channel to signal workers to stop
wg.Wait() // Wait for all workers to finish
close(errorChan) // Close the error channel
var errs []error
for err := range errorChan {
errs = append(errs, err)
}
if len(errs) > 0 {
return fmt.Errorf("encountered errors: %v", errs)
}
return nil
}

View File

@@ -1,121 +0,0 @@
package cmd
import (
"github.com/belphemur/CBZOptimizer/cbz"
"github.com/belphemur/CBZOptimizer/converter"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/manga"
"github.com/spf13/cobra"
"log"
"os"
"path/filepath"
"strings"
"testing"
"time"
)
// MockConverter is a mock implementation of the Converter interface
type MockConverter struct{}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(string)) (*manga.Chapter, error) {
// Simulate conversion by setting the IsConverted flag
chapter.IsConverted = true
chapter.ConvertedTime = time.Now()
return chapter, nil
}
func (m *MockConverter) PrepareConverter() error {
return nil
}
func TestConvertCbzCommand(t *testing.T) {
// Create a temporary directory for testing
tempDir, err := os.MkdirTemp("", "test_cbz")
if err != nil {
log.Fatal(err)
}
defer os.RemoveAll(tempDir) // Clean up the temp directory when done
// Locate the testdata directory
testdataDir := filepath.Join("../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found")
}
// Copy sample CBZ files from testdata to the temporary directory
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
return os.WriteFile(destPath, data, info.Mode())
}
return nil
})
if err != nil {
t.Fatalf("Failed to copy sample files: %v", err)
}
// Mock the converter.Get function
originalGet := converter.Get
converter.Get = func(format constant.ConversionFormat) (converter.Converter, error) {
return &MockConverter{}, nil
}
defer func() { converter.Get = originalGet }()
// Set up the command
cmd := &cobra.Command{
Use: "optimize",
}
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files")
// Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir})
if err != nil {
t.Fatalf("Command execution failed: %v", err)
}
// Verify the results
err = filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() || !strings.HasSuffix(info.Name(), "_converted.cbz") {
return nil
}
t.Logf("CBZ file found: %s", path)
// Load the converted chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return err
}
// Check if the chapter is marked as converted
if !chapter.IsConverted {
t.Errorf("Chapter is not marked as converted: %s", path)
}
// Check if the ConvertedTime is set
if chapter.ConvertedTime.IsZero() {
t.Errorf("ConvertedTime is not set for chapter: %s", path)
}
t.Logf("CBZ file [%s] is converted: %s", path, chapter.ConvertedTime)
return nil
})
if err != nil {
t.Fatalf("Error verifying converted files: %v", err)
}
}

View File

@@ -1,62 +0,0 @@
package cmd
import (
"fmt"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"os"
"path/filepath"
"runtime"
)
var rootCmd = &cobra.Command{
Use: "cbzconverter",
Short: "Convert CBZ files using a specified converter",
}
func SetVersionInfo(version, commit, date string) {
rootCmd.Version = fmt.Sprintf("%s (Built on %s from Git SHA %s)", version, date, commit)
}
func getPath() string {
return filepath.Join(map[string]string{
"windows": filepath.Join(os.Getenv("APPDATA")),
"darwin": filepath.Join(os.Getenv("HOME"), ".config"),
"linux": filepath.Join(os.Getenv("HOME"), ".config"),
}[runtime.GOOS], "CBZOptimizer")
}
func init() {
configFolder := getPath()
viper.SetConfigName("config")
viper.SetConfigType("yaml")
viper.AddConfigPath(configFolder)
viper.SetEnvPrefix("CBZ")
viper.AutomaticEnv()
err := os.MkdirAll(configFolder, os.ModePerm)
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
if err := viper.ReadInConfig(); err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); ok {
err := viper.SafeWriteConfig()
if err != nil {
panic(fmt.Errorf("fatal error config file: %w", err))
}
} else {
panic(fmt.Errorf("fatal error config file: %w", err))
}
}
}
// Execute executes the root command.
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
func AddCommand(cmd *cobra.Command) {
rootCmd.AddCommand(cmd)
}

View File

@@ -1,162 +0,0 @@
package converter
import (
"bytes"
"github.com/belphemur/CBZOptimizer/manga"
"image"
"image/jpeg"
"os"
"testing"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
}{
{
name: "All split pages",
genTestChapter: genBigPages,
},
{
name: "No split pages",
genTestChapter: genSmallPages,
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
},
}
// Load test genTestChapter from testdata
temp, err := os.CreateTemp("", "test_chapter_*.cbz")
if err != nil {
t.Fatalf("failed to create temporary file: %v", err)
}
defer os.Remove(temp.Name())
for _, converter := range Available() {
converter, err := Get(converter)
if err != nil {
t.Fatalf("failed to get converter: %v", err)
}
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
quality := uint8(80)
progress := func(msg string) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(chapter, quality, progress)
if err != nil {
t.Fatalf("failed to convert genTestChapter: %v", err)
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
}
}
})
}
})
}
}
func genBigPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 10000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}

View File

@@ -1,231 +0,0 @@
package webp
import (
"bytes"
"fmt"
"github.com/belphemur/CBZOptimizer/converter/constant"
packer2 "github.com/belphemur/CBZOptimizer/manga"
"github.com/oliamb/cutter"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
"image"
_ "image/jpeg"
"image/png"
"io"
"runtime"
"sync"
"sync/atomic"
)
type Converter struct {
maxHeight int
cropHeight int
isPrepared bool
}
func (converter *Converter) Format() (format constant.ConversionFormat) {
return constant.WebP
}
func New() *Converter {
return &Converter{
//maxHeight: 16383 / 2,
maxHeight: 4000,
cropHeight: 2000,
isPrepared: false,
}
}
func (converter *Converter) PrepareConverter() error {
if converter.isPrepared {
return nil
}
err := PrepareEncoder()
if err != nil {
return err
}
converter.isPrepared = true
return nil
}
func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uint8, progress func(message string, current uint32, total uint32)) (*packer2.Chapter, error) {
err := converter.PrepareConverter()
if err != nil {
return nil, err
}
var wgConvertedPages sync.WaitGroup
maxGoroutines := runtime.NumCPU()
pagesChan := make(chan *packer2.PageContainer, maxGoroutines)
errChan := make(chan error, maxGoroutines)
var wgPages sync.WaitGroup
wgPages.Add(len(chapter.Pages))
guard := make(chan struct{}, maxGoroutines)
pagesMutex := sync.Mutex{}
var pages []*packer2.Page
var totalPages = uint32(len(chapter.Pages))
go func() {
for page := range pagesChan {
guard <- struct{}{} // would block if guard channel is already filled
go func(pageToConvert *packer2.PageContainer) {
defer wgConvertedPages.Done()
convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil {
if convertedPage == nil {
errChan <- err
<-guard
return
}
buffer := new(bytes.Buffer)
err := png.Encode(buffer, convertedPage.Image)
if err != nil {
errChan <- err
<-guard
return
}
convertedPage.Page.Contents = buffer
convertedPage.Page.Extension = ".png"
convertedPage.Page.Size = uint64(buffer.Len())
}
pagesMutex.Lock()
pages = append(pages, convertedPage.Page)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format()), uint32(len(pages)), totalPages)
pagesMutex.Unlock()
<-guard
}(page)
}
}()
for _, page := range chapter.Pages {
go func(page *packer2.Page) {
defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page)
if err != nil {
errChan <- fmt.Errorf("error checking if page %d of genTestChapter %s needs split: %v", page.Index, chapter.FilePath, err)
return
}
if !splitNeeded {
wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, format)
return
}
images, err := converter.cropImage(img)
if err != nil {
errChan <- fmt.Errorf("error converting page %d of genTestChapter %s to webp: %v", page.Index, chapter.FilePath, err)
return
}
atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images {
page := &packer2.Page{Index: page.Index, IsSplitted: true, SplitPartIndex: uint16(i)}
wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, "N/A")
}
}(page)
}
wgPages.Wait()
wgConvertedPages.Wait()
close(pagesChan)
close(errChan)
var errList []error
for err := range errChan {
errList = append(errList, err)
}
if len(errList) > 0 {
return nil, fmt.Errorf("encountered errors: %v", errList)
}
slices.SortFunc(pages, func(a, b *packer2.Page) int {
if a.Index == b.Index {
return int(b.SplitPartIndex - a.SplitPartIndex)
}
return int(b.Index - a.Index)
})
chapter.Pages = pages
runtime.GC()
return chapter, nil
}
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
bounds := img.Bounds()
height := bounds.Dy()
numParts := height / converter.cropHeight
if height%converter.cropHeight != 0 {
numParts++
}
parts := make([]image.Image, numParts)
for i := 0; i < numParts; i++ {
partHeight := converter.cropHeight
if i == numParts-1 {
partHeight = height - i*converter.cropHeight
}
part, err := cutter.Crop(img, cutter.Config{
Width: bounds.Dx(),
Height: partHeight,
Anchor: image.Point{Y: i * converter.cropHeight},
Mode: cutter.TopLeft,
})
if err != nil {
return nil, fmt.Errorf("error cropping part %d: %v", i+1, err)
}
parts[i] = part
}
return parts, nil
}
func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image.Image, string, error) {
reader := io.Reader(bytes.NewBuffer(page.Contents.Bytes()))
img, format, err := image.Decode(reader)
if err != nil {
return false, nil, format, err
}
bounds := img.Bounds()
height := bounds.Dy()
return height >= converter.maxHeight, img, format, nil
}
func (converter *Converter) convertPage(container *packer2.PageContainer, quality uint8) (*packer2.PageContainer, error) {
if container.Format == "webp" {
return container, nil
}
converted, err := converter.convert(container.Image, uint(quality))
if err != nil {
return nil, err
}
container.Page.Contents = converted
container.Page.Extension = ".webp"
container.Page.Size = uint64(converted.Len())
return container, nil
}
// convert converts an image to the WebP format. It decodes the image from the input buffer,
// encodes it as a WebP file using the webp.Encode() function, and returns the resulting WebP
// file as a bytes.Buffer.
func (converter *Converter) convert(image image.Image, quality uint) (*bytes.Buffer, error) {
var buf bytes.Buffer
err := Encode(&buf, image, quality)
if err != nil {
return nil, err
}
return &buf, nil
}

View File

@@ -1,22 +0,0 @@
package webp
import (
"github.com/belphemur/go-webpbin/v2"
"image"
"io"
)
const libwebpVersion = "1.4.0"
func PrepareEncoder() error {
webpbin.SetLibVersion(libwebpVersion)
container := webpbin.NewCWebP()
return container.BinWrapper.Run()
}
func Encode(w io.Writer, m image.Image, quality uint) error {
return webpbin.NewCWebP().
Quality(quality).
InputImage(m).
Output(w).
Run()
}

35
docker-compose.yml Normal file
View File

@@ -0,0 +1,35 @@
version: '3.8'
services:
cbzoptimizer:
image: ghcr.io/belphemur/cbzoptimizer:latest
container_name: cbzoptimizer
environment:
# Set log level (panic, fatal, error, warn, info, debug, trace)
- LOG_LEVEL=info
# User and Group ID for file permissions
- PUID=99
- PGID=100
volumes:
# Mount your comics directory
- /path/to/your/comics:/comics
# Optional: Mount a config directory for persistent settings
- ./config:/config
# Example: Optimize all comics in the /comics directory
command: optimize /comics --quality 85 --parallelism 2 --override --format webp --split
restart: unless-stopped
# Example: Watch mode service
cbzoptimizer-watch:
image: ghcr.io/belphemur/cbzoptimizer:latest
container_name: cbzoptimizer-watch
environment:
- LOG_LEVEL=info
- PUID=99
- PGID=100
volumes:
- /path/to/watch/directory:/watch
- ./config:/config
# Watch for new files and automatically optimize them
command: watch /watch --quality 85 --override --format webp --split
restart: unless-stopped

80
go.mod
View File

@@ -1,49 +1,57 @@
module github.com/belphemur/CBZOptimizer
module github.com/belphemur/CBZOptimizer/v2
go 1.23.0
go 1.25
require (
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de
github.com/belphemur/go-webpbin/v2 v2.0.0
github.com/belphemur/go-webpbin/v2 v2.1.0
github.com/mholt/archives v0.1.5
github.com/oliamb/cutter v0.2.2
github.com/pablodz/inotifywaitgo v0.0.7
github.com/samber/lo v1.47.0
github.com/spf13/cobra v1.8.1
github.com/spf13/viper v1.19.0
github.com/thediveo/enumflag/v2 v2.0.5
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948
golang.org/x/image v0.19.0
github.com/pablodz/inotifywaitgo v0.0.9
github.com/rs/zerolog v1.34.0
github.com/samber/lo v1.52.0
github.com/spf13/cobra v1.10.2
github.com/spf13/viper v1.21.0
github.com/stretchr/testify v1.11.1
github.com/thediveo/enumflag/v2 v2.1.0
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93
golang.org/x/image v0.34.0
)
require (
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 // indirect
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/STARRY-S/zip v0.2.3 // indirect
github.com/andybalholm/brotli v1.2.0 // indirect
github.com/belphemur/go-binwrapper v1.0.0 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/sevenzip v1.6.1 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jfrog/archiver/v3 v3.6.1 // indirect
github.com/klauspost/compress v1.17.9 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/minlz v1.0.1 // indirect
github.com/nwaples/rardecode/v2 v2.2.0 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/sagikazarmark/locafero v0.11.0 // indirect
github.com/sorairolake/lzip-go v0.3.8 // indirect
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.10.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/sys v0.18.0 // indirect
golang.org/x/text v0.17.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
github.com/ulikunitz/xz v0.5.15 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/sys v0.36.0 // indirect
golang.org/x/text v0.32.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

438
go.sum
View File

@@ -1,141 +1,399 @@
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/STARRY-S/zip v0.2.3 h1:luE4dMvRPDOWQdeDdUxUoZkzUIpTccdKdhHHsQJ1fm4=
github.com/STARRY-S/zip v0.2.3/go.mod h1:lqJ9JdeRipyOQJrYSOtpNAiaesFO6zVDsE8GIGFaoSk=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 h1:EzKgPYK90TyAOmytK7bvapqlkG/m7KWKK28mOAdQEaM=
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0/go.mod h1:s2Dv+CfgVbNM9ucqvE5qCCC0AkI1PE2OZb7N8PPlOh4=
github.com/belphemur/go-webpbin/v2 v2.0.0 h1:Do0TTTJ6cS6lgi+R67De+jXRYe+ZOwxFqTiFggyX5p8=
github.com/belphemur/go-webpbin/v2 v2.0.0/go.mod h1:VIHXZQaIwaIYDn08w0qeJFPj1MuYt5pyJnkQALPYc5g=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/belphemur/go-binwrapper v1.0.0 h1:kXNRqO3vrqex4O0Q1pfD9w5kKwrQT1Mg9CJOd/IWbtI=
github.com/belphemur/go-binwrapper v1.0.0/go.mod h1:PNID1xFdXpkAwjr7gCidIiC/JA8tpYl3zzNSIK9lCjc=
github.com/belphemur/go-webpbin/v2 v2.1.0 h1:SvdjLz/9wb7kqD7jYDjlbTA2xRwwQRo3L/a5Ee+Br5E=
github.com/belphemur/go-webpbin/v2 v2.1.0/go.mod h1:jRdjIZYdSkW6DM9pfiH2fjSYgX/jshRooDI03f6o658=
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
github.com/bodgit/sevenzip v1.6.1 h1:kikg2pUMYC9ljU7W9SaqHXhym5HyKm8/M/jd31fYan4=
github.com/bodgit/sevenzip v1.6.1/go.mod h1:GVoYQbEVbOGT8n2pfqCIMRUaRjQ8F9oSqoBEqZh5fQ8=
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY=
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg=
github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jfrog/archiver/v3 v3.6.1 h1:LOxnkw9pOn45DzCbZNFV6K0+6dCsQ0L8mR3ZcujO5eI=
github.com/jfrog/archiver/v3 v3.6.1/go.mod h1:VgR+3WZS4N+i9FaDwLZbq+jeU4B4zctXL+gL4EMzfLw=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/mholt/archives v0.1.5 h1:Fh2hl1j7VEhc6DZs2DLMgiBNChUux154a1G+2esNvzQ=
github.com/mholt/archives v0.1.5/go.mod h1:3TPMmBLPsgszL+1As5zECTuKwKvIfj6YcwWPpeTAXF4=
github.com/mikelolasagasti/xz v1.0.1 h1:Q2F2jX0RYJUG3+WsM+FJknv+6eVjsjXNDV0KJXZzkD0=
github.com/mikelolasagasti/xz v1.0.1/go.mod h1:muAirjiOUxPRXwm9HdDtB3uoRPrGnL85XHtokL9Hcgc=
github.com/minio/minlz v1.0.1 h1:OUZUzXcib8diiX+JYxyRLIdomyZYzHct6EShOKtQY2A=
github.com/minio/minlz v1.0.1/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
github.com/nwaples/rardecode/v2 v2.2.0/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsRNb6IbvGVHRmw=
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4=
github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o=
github.com/onsi/gomega v1.28.1 h1:MijcGUbfYuznzK/5R4CPNoUP/9Xvuo20sXfEm6XxoTA=
github.com/onsi/gomega v1.28.1/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ=
github.com/pablodz/inotifywaitgo v0.0.7 h1:1ii49dGBnRn0t1Sz7RGZS6/NberPEDQprwKHN49Bv6U=
github.com/pablodz/inotifywaitgo v0.0.7/go.mod h1:OtzRCsYTJlIr+vAzlOtauTkfQ1c25ebFuXq8tbbf8cw=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/pablodz/inotifywaitgo v0.0.9 h1:njquRbBU7fuwIe5rEvtaniVBjwWzcpdUVptSgzFqZsw=
github.com/pablodz/inotifywaitgo v0.0.9/go.mod h1:hAfx2oN+WKg8miwUKPs52trySpPignlRBRxWcXVHku0=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc=
github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU=
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/sagikazarmark/locafero v0.11.0 h1:1iurJgmM9G3PA/I+wWYIOw/5SyBtxapeHDcg+AAIFXc=
github.com/sagikazarmark/locafero v0.11.0/go.mod h1:nVIGvgyzw595SUSUE6tvCp3YYTeHs15MvlmU87WwIik=
github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw=
github.com/samber/lo v1.52.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0=
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/sorairolake/lzip-go v0.3.8 h1:j5Q2313INdTA80ureWYRhX+1K78mUXfMoPZCw/ivWik=
github.com/sorairolake/lzip-go v0.3.8/go.mod h1:JcBqGMV0frlxwrsE9sMWXDjqn3EeVf0/54YPsw66qkU=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/thediveo/enumflag/v2 v2.0.5 h1:VJjvlAqUb6m6mxOrB/0tfBJI0Kvi9wJ8ulh38xK87i8=
github.com/thediveo/enumflag/v2 v2.0.5/go.mod h1:0NcG67nYgwwFsAvoQCmezG0J0KaIxZ0f7skg9eLq1DA=
github.com/thediveo/success v1.0.1 h1:NVwUOwKUwaN8szjkJ+vsiM2L3sNBFscldoDJ2g2tAPg=
github.com/thediveo/success v1.0.1/go.mod h1:AZ8oUArgbIsCuDEWrzWNQHdKnPbDOLQsWOFj9ynwLt0=
github.com/thediveo/enumflag/v2 v2.0.7 h1:uxXDU+rTel7Hg4X0xdqICpG9rzuI/mzLAEYXWLflOfs=
github.com/thediveo/enumflag/v2 v2.0.7/go.mod h1:bWlnNvTJuUK+huyzf3WECFLy557Ttlc+yk3o+BPs0EA=
github.com/thediveo/enumflag/v2 v2.1.0 h1:F80w/h1U4B3/sBpFVUewzMVTfLk2m0D60+61UCuXSf8=
github.com/thediveo/enumflag/v2 v2.1.0/go.mod h1:wj2B0dHqqFOqIgnJ7mD8s97wK7/46oOZvDg93muD68g=
github.com/thediveo/success v1.0.2 h1:w+r3RbSjLmd7oiNnlCblfGqItcsaShcuAorRVh/+0xk=
github.com/thediveo/success v1.0.2/go.mod h1:hdPJB77k70w764lh8uLUZgNhgeTl3DYeZ4d4bwMO2CU=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ=
golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ=
golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY=
github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=

123
internal/cbz/cbz_creator.go Normal file
View File

@@ -0,0 +1,123 @@
package cbz
import (
"archive/zip"
"fmt"
"os"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/rs/zerolog/log"
)
func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
log.Debug().
Str("chapter_file", chapter.FilePath).
Str("output_path", outputFilePath).
Int("page_count", len(chapter.Pages)).
Bool("is_converted", chapter.IsConverted).
Msg("Starting CBZ file creation")
// Create a new ZIP file
log.Debug().Str("output_path", outputFilePath).Msg("Creating output CBZ file")
zipFile, err := os.Create(outputFilePath)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create CBZ file")
return fmt.Errorf("failed to create .cbz file: %w", err)
}
defer errs.Capture(&err, zipFile.Close, "failed to close .cbz file")
// Create a new ZIP writer
log.Debug().Str("output_path", outputFilePath).Msg("Creating ZIP writer")
zipWriter := zip.NewWriter(zipFile)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create ZIP writer")
return err
}
defer errs.Capture(&err, zipWriter.Close, "failed to close .cbz writer")
// Write each page to the ZIP archive
log.Debug().Str("output_path", outputFilePath).Int("pages_to_write", len(chapter.Pages)).Msg("Writing pages to CBZ archive")
for _, page := range chapter.Pages {
// Construct the file name for the page
var fileName string
if page.IsSplitted {
// Use the format page%03d-%02d for split pages
fileName = fmt.Sprintf("%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
} else {
// Use the format page%03d for non-split pages
fileName = fmt.Sprintf("%04d%s", page.Index, page.Extension)
}
log.Debug().
Str("output_path", outputFilePath).
Uint16("page_index", page.Index).
Bool("is_splitted", page.IsSplitted).
Uint16("split_part", page.SplitPartIndex).
Str("filename", fileName).
Int("size", len(page.Contents.Bytes())).
Msg("Writing page to CBZ archive")
// Create a new file in the ZIP archive
fileWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: fileName,
Method: zip.Store,
Modified: time.Now(),
})
if err != nil {
log.Error().Str("output_path", outputFilePath).Str("filename", fileName).Err(err).Msg("Failed to create file in CBZ archive")
return fmt.Errorf("failed to create file in .cbz: %w", err)
}
// Write the page contents to the file
bytesWritten, err := fileWriter.Write(page.Contents.Bytes())
if err != nil {
log.Error().Str("output_path", outputFilePath).Str("filename", fileName).Err(err).Msg("Failed to write page contents")
return fmt.Errorf("failed to write page contents: %w", err)
}
log.Debug().
Str("output_path", outputFilePath).
Str("filename", fileName).
Int("bytes_written", bytesWritten).
Msg("Page written successfully")
}
// Optionally, write the ComicInfo.xml file if present
if chapter.ComicInfoXml != "" {
log.Debug().Str("output_path", outputFilePath).Int("xml_size", len(chapter.ComicInfoXml)).Msg("Writing ComicInfo.xml to CBZ archive")
comicInfoWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "ComicInfo.xml",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to create ComicInfo.xml in CBZ archive")
return fmt.Errorf("failed to create ComicInfo.xml in .cbz: %w", err)
}
bytesWritten, err := comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to write ComicInfo.xml contents")
return fmt.Errorf("failed to write ComicInfo.xml contents: %w", err)
}
log.Debug().Str("output_path", outputFilePath).Int("bytes_written", bytesWritten).Msg("ComicInfo.xml written successfully")
} else {
log.Debug().Str("output_path", outputFilePath).Msg("No ComicInfo.xml to write")
}
if chapter.IsConverted {
convertedString := fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)
log.Debug().Str("output_path", outputFilePath).Str("comment", convertedString).Msg("Setting CBZ comment for converted chapter")
err = zipWriter.SetComment(convertedString)
if err != nil {
log.Error().Str("output_path", outputFilePath).Err(err).Msg("Failed to write CBZ comment")
return fmt.Errorf("failed to write comment: %w", err)
}
log.Debug().Str("output_path", outputFilePath).Msg("CBZ comment set successfully")
}
log.Debug().Str("output_path", outputFilePath).Msg("CBZ file creation completed successfully")
return nil
}

View File

@@ -3,18 +3,23 @@ package cbz
import (
"archive/zip"
"bytes"
"github.com/belphemur/CBZOptimizer/manga"
"fmt"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"os"
"testing"
"time"
)
func TestWriteChapterToCBZ(t *testing.T) {
currentTime := time.Now()
// Define test cases
testCases := []struct {
name string
chapter *manga.Chapter
expectedFiles []string
expectedComment string
}{
//test case where there is only one page and ComicInfo and the chapter is converted
{
@@ -29,9 +34,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
ComicInfoXml: "<Series>Boundless Necromancer</Series>",
IsConverted: true,
ConvertedTime: time.Now(),
ConvertedTime: currentTime,
},
expectedFiles: []string{"page_0000.jpg", "ComicInfo.xml", "Converted.txt"},
expectedFiles: []string{"0000.jpg", "ComicInfo.xml"},
expectedComment: fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", currentTime),
},
//test case where there is only one page and no
{
@@ -45,7 +51,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
},
},
expectedFiles: []string{"page_0000.jpg"},
expectedFiles: []string{"0000.jpg"},
},
{
name: "Multiple pages with ComicInfo",
@@ -64,7 +70,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
ComicInfoXml: "<Series>Boundless Necromancer</Series>",
},
expectedFiles: []string{"page_0000.jpg", "page_0001.jpg", "ComicInfo.xml"},
expectedFiles: []string{"0000.jpg", "0001.jpg", "ComicInfo.xml"},
},
{
name: "Split page",
@@ -79,7 +85,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
},
},
},
expectedFiles: []string{"page_0000-01.jpg"},
expectedFiles: []string{"0000-01.jpg"},
},
}
@@ -90,7 +96,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
if err != nil {
t.Fatalf("Failed to create temporary file: %v", err)
}
defer os.Remove(tempFile.Name())
defer errs.CaptureGeneric(&err, os.Remove, tempFile.Name(), "failed to remove temporary file")
// Write the chapter to the .cbz file
err = WriteChapterToCBZ(tc.chapter, tempFile.Name())
@@ -103,7 +109,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
if err != nil {
t.Fatalf("Failed to open CBZ file: %v", err)
}
defer r.Close()
defer errs.Capture(&err, r.Close, "failed to close CBZ file")
// Collect the names of the files in the archive
var filesInArchive []string
@@ -125,6 +131,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
}
}
if tc.expectedComment != "" && r.Comment != tc.expectedComment {
t.Errorf("Expected comment %s, but found %s", tc.expectedComment, r.Comment)
}
// Check if there are no unexpected files
if len(filesInArchive) != len(tc.expectedFiles) {
t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive))

166
internal/cbz/cbz_loader.go Normal file
View File

@@ -0,0 +1,166 @@
package cbz
import (
"archive/zip"
"bufio"
"bytes"
"context"
"fmt"
"io"
"io/fs"
"path/filepath"
"strings"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/mholt/archives"
"github.com/rs/zerolog/log"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
log.Debug().Str("file_path", filePath).Msg("Starting chapter loading")
ctx := context.Background()
chapter := &manga.Chapter{
FilePath: filePath,
}
// First, try to read the comment using zip.OpenReader for CBZ files
if strings.ToLower(filepath.Ext(filePath)) == ".cbz" {
log.Debug().Str("file_path", filePath).Msg("Checking CBZ comment for conversion status")
r, err := zip.OpenReader(filePath)
if err == nil {
defer errs.Capture(&err, r.Close, "failed to close zip reader for comment")
// Check for comment
if r.Comment != "" {
log.Debug().Str("file_path", filePath).Str("comment", r.Comment).Msg("Found CBZ comment")
scanner := bufio.NewScanner(strings.NewReader(r.Comment))
if scanner.Scan() {
convertedTime := scanner.Text()
log.Debug().Str("file_path", filePath).Str("converted_time", convertedTime).Msg("Parsing conversion timestamp")
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err == nil {
chapter.IsConverted = true
log.Debug().Str("file_path", filePath).Time("converted_time", chapter.ConvertedTime).Msg("Chapter marked as previously converted")
} else {
log.Debug().Str("file_path", filePath).Err(err).Msg("Failed to parse conversion timestamp")
}
}
} else {
log.Debug().Str("file_path", filePath).Msg("No CBZ comment found")
}
} else {
log.Debug().Str("file_path", filePath).Err(err).Msg("Failed to open CBZ file for comment reading")
}
// Continue even if comment reading fails
}
// Open the archive using archives library for file operations
log.Debug().Str("file_path", filePath).Msg("Opening archive file system")
fsys, err := archives.FileSystem(ctx, filePath, nil)
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed to open archive file system")
return nil, fmt.Errorf("failed to open archive file: %w", err)
}
// Walk through all files in the filesystem
log.Debug().Str("file_path", filePath).Msg("Starting filesystem walk")
err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
return func() error {
// Open the file
file, err := fsys.Open(path)
if err != nil {
return fmt.Errorf("failed to open file %s: %w", path, err)
}
defer errs.Capture(&err, file.Close, fmt.Sprintf("failed to close file %s", path))
// Determine the file extension
ext := strings.ToLower(filepath.Ext(path))
fileName := strings.ToLower(filepath.Base(path))
if ext == ".xml" && fileName == "comicinfo.xml" {
log.Debug().Str("file_path", filePath).Str("archive_file", path).Msg("Found ComicInfo.xml")
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read ComicInfo.xml")
return fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
log.Debug().Str("file_path", filePath).Int("xml_size", len(xmlContent)).Msg("ComicInfo.xml loaded")
} else if !chapter.IsConverted && ext == ".txt" && fileName == "converted.txt" {
log.Debug().Str("file_path", filePath).Str("archive_file", path).Msg("Found converted.txt")
textContent, err := io.ReadAll(file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read converted.txt")
return fmt.Errorf("failed to read converted.txt content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
log.Debug().Str("file_path", filePath).Str("converted_time", convertedTime).Msg("Parsing converted.txt timestamp")
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed to parse converted time from converted.txt")
return fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
log.Debug().Str("file_path", filePath).Time("converted_time", chapter.ConvertedTime).Msg("Chapter marked as converted from converted.txt")
}
} else {
// Read the file contents for page
log.Debug().Str("file_path", filePath).Str("archive_file", path).Str("extension", ext).Msg("Processing page file")
buf := new(bytes.Buffer)
bytesCopied, err := io.Copy(buf, file)
if err != nil {
log.Error().Str("file_path", filePath).Str("archive_file", path).Err(err).Msg("Failed to read page file contents")
return fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object
page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext,
Size: uint64(buf.Len()),
Contents: buf,
IsSplitted: false,
}
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
log.Debug().
Str("file_path", filePath).
Str("archive_file", path).
Uint16("page_index", page.Index).
Int64("bytes_read", bytesCopied).
Msg("Page loaded successfully")
}
return nil
}()
})
if err != nil {
log.Error().Str("file_path", filePath).Err(err).Msg("Failed during filesystem walk")
return nil, err
}
log.Debug().
Str("file_path", filePath).
Int("pages_loaded", len(chapter.Pages)).
Bool("is_converted", chapter.IsConverted).
Bool("has_comic_info", chapter.ComicInfoXml != "").
Msg("Chapter loading completed successfully")
return chapter, nil
}

View File

@@ -16,15 +16,22 @@ func TestLoadChapter(t *testing.T) {
testCases := []testCase{
{
name: "Original Chapter",
filePath: "../testdata/Chapter 1.cbz",
name: "Original Chapter CBZ",
filePath: "../../testdata/Chapter 128.cbz",
expectedPages: 14,
expectedSeries: "<Series>The Knight King Who Returned with a God</Series>",
expectedConversion: false,
},
{
name: "Original Chapter CBR",
filePath: "../../testdata/Chapter 1.cbr",
expectedPages: 16,
expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: false,
},
{
name: "Converted Chapter",
filePath: "../testdata/Chapter 1_converted.cbz",
filePath: "../../testdata/Chapter 10_converted.cbz",
expectedPages: 107,
expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: true,

View File

@@ -0,0 +1,32 @@
package manga
import (
"bytes"
"image"
)
// PageContainer is a struct that holds a manga page, its image, and the image format.
type PageContainer struct {
// Page is a pointer to a manga page object.
Page *Page
// Image is the decoded image of the manga page.
Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string
// IsToBeConverted is a boolean flag indicating whether the image needs to be converted to another format.
IsToBeConverted bool
// HasBeenConverted is a boolean flag indicating whether the image has been converted to another format.
HasBeenConverted bool
}
func NewContainer(Page *Page, img image.Image, format string, isToBeConverted bool) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format, IsToBeConverted: isToBeConverted, HasBeenConverted: false}
}
// SetConverted sets the converted image, its extension, and its size in the PageContainer.
func (pc *PageContainer) SetConverted(converted *bytes.Buffer, extension string) {
pc.Page.Contents = converted
pc.Page.Extension = extension
pc.Page.Size = uint64(converted.Len())
pc.HasBeenConverted = true
}

View File

@@ -0,0 +1,25 @@
package errs
import (
"errors"
"fmt"
)
// Capture runs errFunc and assigns the error, if any, to *errPtr. Preserves the
// original error by wrapping with errors.Join if the errFunc err is non-nil.
func Capture(errPtr *error, errFunc func() error, msg string) {
err := errFunc()
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}
// CaptureGeneric runs errFunc with a generic type K and assigns the error, if any, to *errPtr.
func CaptureGeneric[K any](errPtr *error, errFunc func(value K) error, value K, msg string) {
err := errFunc(value)
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}

View File

@@ -0,0 +1,122 @@
package errs
import (
"errors"
"fmt"
"testing"
)
func TestCapture(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func() error
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func() error { return nil },
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
Capture(&err, tt.errFunc, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}
func TestCaptureGeneric(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func(int) error
value int
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func(value int) error { return nil },
value: 0,
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error and value",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func(value int) error { return fmt.Errorf("hello error:%d", value) },
value: 1,
msg: "test message",
expected: "wrapped error: initial error\ntest message: hello error:1",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
CaptureGeneric(&err, tt.errFunc, tt.value, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}

170
internal/utils/optimize.go Normal file
View File

@@ -0,0 +1,170 @@
package utils
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
errors2 "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"github.com/rs/zerolog/log"
)
type OptimizeOptions struct {
ChapterConverter converter.Converter
Path string
Quality uint8
Override bool
Split bool
Timeout time.Duration
}
// Optimize optimizes a CBZ/CBR file using the specified converter.
func Optimize(options *OptimizeOptions) error {
log.Info().Str("file", options.Path).Msg("Processing file")
log.Debug().
Str("file", options.Path).
Uint8("quality", options.Quality).
Bool("override", options.Override).
Bool("split", options.Split).
Msg("Optimization parameters")
// Load the chapter
log.Debug().Str("file", options.Path).Msg("Loading chapter")
chapter, err := cbz.LoadChapter(options.Path)
if err != nil {
log.Error().Str("file", options.Path).Err(err).Msg("Failed to load chapter")
return fmt.Errorf("failed to load chapter: %v", err)
}
log.Debug().
Str("file", options.Path).
Int("pages", len(chapter.Pages)).
Bool("converted", chapter.IsConverted).
Msg("Chapter loaded successfully")
if chapter.IsConverted {
log.Info().Str("file", options.Path).Msg("Chapter already converted")
return nil
}
// Convert the chapter
log.Debug().
Str("file", chapter.FilePath).
Int("pages", len(chapter.Pages)).
Uint8("quality", options.Quality).
Bool("split", options.Split).
Msg("Starting chapter conversion")
var ctx context.Context
if options.Timeout > 0 {
var cancel context.CancelFunc
ctx, cancel = context.WithTimeout(context.Background(), options.Timeout)
defer cancel()
log.Debug().Str("file", chapter.FilePath).Dur("timeout", options.Timeout).Msg("Applying timeout to chapter conversion")
} else {
ctx = context.Background()
}
convertedChapter, err := options.ChapterConverter.ConvertChapter(ctx, chapter, options.Quality, options.Split, func(msg string, current uint32, total uint32) {
if current%10 == 0 || current == total {
log.Info().Str("file", chapter.FilePath).Uint32("current", current).Uint32("total", total).Msg("Converting")
} else {
log.Debug().Str("file", chapter.FilePath).Uint32("current", current).Uint32("total", total).Msg("Converting page")
}
})
if err != nil {
var pageIgnoredError *errors2.PageIgnoredError
if errors.As(err, &pageIgnoredError) {
log.Debug().Str("file", chapter.FilePath).Err(err).Msg("Page conversion error (non-fatal)")
} else {
log.Error().Str("file", chapter.FilePath).Err(err).Msg("Chapter conversion failed")
return fmt.Errorf("failed to convert chapter: %v", err)
}
}
if convertedChapter == nil {
log.Error().Str("file", chapter.FilePath).Msg("Conversion returned nil chapter")
return fmt.Errorf("failed to convert chapter")
}
log.Debug().
Str("file", chapter.FilePath).
Int("original_pages", len(chapter.Pages)).
Int("converted_pages", len(convertedChapter.Pages)).
Msg("Chapter conversion completed")
convertedChapter.SetConverted()
// Determine output path and handle CBR override logic
log.Debug().
Str("input_path", options.Path).
Bool("override", options.Override).
Msg("Determining output path")
outputPath := options.Path
originalPath := options.Path
isCbrOverride := false
if options.Override {
// For override mode, check if it's a CBR file that needs to be converted to CBZ
pathLower := strings.ToLower(options.Path)
if strings.HasSuffix(pathLower, ".cbr") {
// Convert CBR to CBZ: change extension and mark for deletion
outputPath = strings.TrimSuffix(options.Path, filepath.Ext(options.Path)) + ".cbz"
isCbrOverride = true
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("CBR to CBZ conversion: will delete original after conversion")
} else {
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("CBZ override mode: will overwrite original file")
}
} else {
// Handle both .cbz and .cbr files - strip the extension and add _converted.cbz
pathLower := strings.ToLower(options.Path)
if strings.HasSuffix(pathLower, ".cbz") {
outputPath = strings.TrimSuffix(options.Path, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(pathLower, ".cbr") {
outputPath = strings.TrimSuffix(options.Path, ".cbr") + "_converted.cbz"
} else {
// Fallback for other extensions - just add _converted.cbz
outputPath = options.Path + "_converted.cbz"
}
log.Debug().
Str("original_path", originalPath).
Str("output_path", outputPath).
Msg("Non-override mode: creating converted file alongside original")
}
// Write the converted chapter to CBZ file
log.Debug().Str("output_path", outputPath).Msg("Writing converted chapter to CBZ file")
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
log.Error().Str("output_path", outputPath).Err(err).Msg("Failed to write converted chapter")
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Debug().Str("output_path", outputPath).Msg("Successfully wrote converted chapter")
// If we're overriding a CBR file, delete the original CBR after successful write
if isCbrOverride {
log.Debug().Str("file", originalPath).Msg("Attempting to delete original CBR file")
err = os.Remove(originalPath)
if err != nil {
// Log the error but don't fail the operation since conversion succeeded
log.Warn().Str("file", originalPath).Err(err).Msg("Failed to delete original CBR file")
} else {
log.Info().Str("file", originalPath).Msg("Deleted original CBR file")
}
}
log.Info().Str("output", outputPath).Msg("Converted file written")
return nil
}

View File

@@ -0,0 +1,402 @@
package utils
import (
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
)
func TestOptimizeIntegration(t *testing.T) {
// Skip integration tests if no libwebp is available or testdata doesn't exist
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Check if testdata directory exists
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping integration tests")
}
// Create temporary directory for tests
tempDir, err := os.MkdirTemp("", "test_optimize_integration")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
// Collect all test files (CBZ/CBR, excluding converted ones)
var testFiles []string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if (strings.HasSuffix(fileName, ".cbz") || strings.HasSuffix(fileName, ".cbr")) && !strings.Contains(fileName, "converted") {
testFiles = append(testFiles, path)
}
}
return nil
})
if err != nil {
t.Fatal(err)
}
if len(testFiles) == 0 {
t.Skip("No test files found")
}
tests := []struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
}{}
// Generate test cases for each available test file
for _, testFile := range testFiles {
baseName := strings.TrimSuffix(filepath.Base(testFile), filepath.Ext(testFile))
isCBR := strings.HasSuffix(strings.ToLower(testFile), ".cbr")
// Test without override
tests = append(tests, struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
}{
name: fmt.Sprintf("%s file without override", strings.ToUpper(filepath.Ext(testFile)[1:])),
inputFile: testFile,
override: false,
expectedOutput: filepath.Join(filepath.Dir(testFile), baseName+"_converted.cbz"),
shouldDelete: false,
expectError: false,
})
// Test with override
if isCBR {
tests = append(tests, struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
}{
name: fmt.Sprintf("%s file with override", strings.ToUpper(filepath.Ext(testFile)[1:])),
inputFile: testFile,
override: true,
expectedOutput: filepath.Join(filepath.Dir(testFile), baseName+".cbz"),
shouldDelete: true,
expectError: false,
})
}
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a copy of the input file for this test
testFile := filepath.Join(tempDir, tt.name+"_"+filepath.Base(tt.inputFile))
data, err := os.ReadFile(tt.inputFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(testFile, data, 0644)
if err != nil {
t.Fatal(err)
}
// Setup options with real converter
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: testFile,
Quality: 85,
Override: tt.override,
Split: false,
Timeout: 0,
}
// Run optimization
err = Optimize(options)
if tt.expectError {
if err == nil {
t.Error("Expected error but got none")
}
return
}
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Determine expected output path for this test
expectedOutput := tt.expectedOutput
if tt.override && strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, filepath.Ext(testFile)) + ".cbz"
} else if !tt.override {
if strings.HasSuffix(strings.ToLower(testFile), ".cbz") {
expectedOutput = strings.TrimSuffix(testFile, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, ".cbr") + "_converted.cbz"
}
} else {
expectedOutput = testFile
}
// Verify output file exists
if _, err := os.Stat(expectedOutput); os.IsNotExist(err) {
t.Errorf("Expected output file not found: %s", expectedOutput)
}
// Verify output is a valid CBZ with converted content
chapter, err := cbz.LoadChapter(expectedOutput)
if err != nil {
t.Errorf("Failed to load converted chapter: %v", err)
}
if !chapter.IsConverted {
t.Error("Chapter is not marked as converted")
}
// Verify all pages are in WebP format (real conversion indicator)
for i, page := range chapter.Pages {
if page.Extension != ".webp" {
t.Errorf("Page %d is not converted to WebP format (got: %s)", i, page.Extension)
}
}
// Verify original file deletion for CBR override
if tt.shouldDelete {
if _, err := os.Stat(testFile); !os.IsNotExist(err) {
t.Error("Original CBR file should have been deleted but still exists")
}
} else {
// Verify original file still exists (unless it's the same as output)
if testFile != expectedOutput {
if _, err := os.Stat(testFile); os.IsNotExist(err) {
t.Error("Original file should not have been deleted")
}
}
}
// Clean up output file
os.Remove(expectedOutput)
})
}
}
func TestOptimizeIntegration_AlreadyConverted(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_integration_converted")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Use a converted test file
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping integration tests")
}
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
var convertedFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.Contains(strings.ToLower(info.Name()), "converted") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
convertedFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if convertedFile == "" {
t.Skip("No converted test file found")
}
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: convertedFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 30 * time.Second,
}
err = Optimize(options)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Should not create a new file since it's already converted
expectedOutput := strings.TrimSuffix(convertedFile, ".cbz") + "_converted.cbz"
if _, err := os.Stat(expectedOutput); !os.IsNotExist(err) {
t.Error("Should not have created a new converted file for already converted chapter")
}
}
func TestOptimizeIntegration_InvalidFile(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: "/nonexistent/file.cbz",
Quality: 85,
Override: false,
Split: false,
Timeout: 30 * time.Second,
}
err = Optimize(options)
if err == nil {
t.Error("Expected error for nonexistent file")
}
}
func TestOptimizeIntegration_Timeout(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_integration_timeout")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping integration tests")
}
// Get the real webp converter
converterInstance, err := converter.Get(constant.WebP)
if err != nil {
t.Skip("WebP converter not available, skipping integration tests")
}
// Prepare the converter
err = converterInstance.PrepareConverter()
if err != nil {
t.Skip("Failed to prepare WebP converter, skipping integration tests")
}
var cbzFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Test with short timeout to force timeout during conversion
options := &OptimizeOptions{
ChapterConverter: converterInstance,
Path: cbzFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 10 * time.Millisecond, // Very short timeout to force timeout
}
err = Optimize(options)
if err == nil {
t.Error("Expected timeout error but got none")
}
// Check that the error contains timeout information
if err != nil && !strings.Contains(err.Error(), "context deadline exceeded") && !strings.Contains(err.Error(), "timeout") {
t.Errorf("Expected timeout error message, got: %v", err)
}
}

View File

@@ -0,0 +1,424 @@
package utils
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
)
// MockConverter for testing
type MockConverter struct {
shouldFail bool
}
func (m *MockConverter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
if m.shouldFail {
return nil, &MockError{message: "mock conversion error"}
}
// Check if context is already cancelled
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
// Simulate some work that can be interrupted by context cancellation
for i := 0; i < len(chapter.Pages); i++ {
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
// Simulate processing time
time.Sleep(100 * time.Microsecond)
if progress != nil {
progress(fmt.Sprintf("Converting page %d/%d", i+1, len(chapter.Pages)), uint32(i+1), uint32(len(chapter.Pages)))
}
}
}
// Create a copy of the chapter to simulate conversion
converted := &manga.Chapter{
FilePath: chapter.FilePath,
Pages: chapter.Pages,
ComicInfoXml: chapter.ComicInfoXml,
IsConverted: true,
ConvertedTime: time.Now(),
}
return converted, nil
}
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error {
if m.shouldFail {
return &MockError{message: "mock prepare error"}
}
return nil
}
type MockError struct {
message string
}
func (e *MockError) Error() string {
return e.message
}
func TestOptimize(t *testing.T) {
// Create temporary directory for tests
tempDir, err := os.MkdirTemp("", "test_optimize")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
// Copy sample files
var cbzFile, cbrFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
fileName := strings.ToLower(info.Name())
if strings.HasSuffix(fileName, ".cbz") && !strings.Contains(fileName, "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
} else if strings.HasSuffix(fileName, ".cbr") {
destPath := filepath.Join(tempDir, "test.cbr")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbrFile = destPath
}
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Create a CBR file by copying the CBZ file if no CBR exists
if cbrFile == "" {
cbrFile = filepath.Join(tempDir, "test.cbr")
data, err := os.ReadFile(cbzFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(cbrFile, data, 0644)
if err != nil {
t.Fatal(err)
}
}
tests := []struct {
name string
inputFile string
override bool
expectedOutput string
shouldDelete bool
expectError bool
mockFail bool
}{
{
name: "CBZ file without override",
inputFile: cbzFile,
override: false,
expectedOutput: strings.TrimSuffix(cbzFile, ".cbz") + "_converted.cbz",
shouldDelete: false,
expectError: false,
},
{
name: "CBZ file with override",
inputFile: cbzFile,
override: true,
expectedOutput: cbzFile,
shouldDelete: false,
expectError: false,
},
{
name: "CBR file without override",
inputFile: cbrFile,
override: false,
expectedOutput: strings.TrimSuffix(cbrFile, ".cbr") + "_converted.cbz",
shouldDelete: false,
expectError: false,
},
{
name: "CBR file with override",
inputFile: cbrFile,
override: true,
expectedOutput: strings.TrimSuffix(cbrFile, ".cbr") + ".cbz",
shouldDelete: true,
expectError: false,
},
{
name: "Converter failure",
inputFile: cbzFile,
override: false,
expectError: true,
mockFail: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a copy of the input file for this test
testFile := filepath.Join(tempDir, tt.name+"_"+filepath.Base(tt.inputFile))
data, err := os.ReadFile(tt.inputFile)
if err != nil {
t.Fatal(err)
}
err = os.WriteFile(testFile, data, 0644)
if err != nil {
t.Fatal(err)
}
// Setup options
options := &OptimizeOptions{
ChapterConverter: &MockConverter{shouldFail: tt.mockFail},
Path: testFile,
Quality: 85,
Override: tt.override,
Split: false,
Timeout: 0,
}
// Run optimization
err = Optimize(options)
if tt.expectError {
if err == nil {
t.Error("Expected error but got none")
}
return
}
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Determine expected output path for this test
expectedOutput := tt.expectedOutput
if tt.override && strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, filepath.Ext(testFile)) + ".cbz"
} else if !tt.override {
if strings.HasSuffix(strings.ToLower(testFile), ".cbz") {
expectedOutput = strings.TrimSuffix(testFile, ".cbz") + "_converted.cbz"
} else if strings.HasSuffix(strings.ToLower(testFile), ".cbr") {
expectedOutput = strings.TrimSuffix(testFile, ".cbr") + "_converted.cbz"
}
} else {
expectedOutput = testFile
}
// Verify output file exists
if _, err := os.Stat(expectedOutput); os.IsNotExist(err) {
t.Errorf("Expected output file not found: %s", expectedOutput)
}
// Verify output is a valid CBZ
chapter, err := cbz.LoadChapter(expectedOutput)
if err != nil {
t.Errorf("Failed to load converted chapter: %v", err)
}
if !chapter.IsConverted {
t.Error("Chapter is not marked as converted")
}
// Verify original file deletion for CBR override
if tt.shouldDelete {
if _, err := os.Stat(testFile); !os.IsNotExist(err) {
t.Error("Original CBR file should have been deleted but still exists")
}
} else {
// Verify original file still exists (unless it's the same as output)
if testFile != expectedOutput {
if _, err := os.Stat(testFile); os.IsNotExist(err) {
t.Error("Original file should not have been deleted")
}
}
}
// Clean up output file
os.Remove(expectedOutput)
})
}
}
func TestOptimize_AlreadyConverted(t *testing.T) {
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_converted")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Use a converted test file
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
var convertedFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.Contains(strings.ToLower(info.Name()), "converted") {
destPath := filepath.Join(tempDir, info.Name())
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
convertedFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if convertedFile == "" {
t.Skip("No converted test file found")
}
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: convertedFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 0,
}
err = Optimize(options)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
// Should not create a new file since it's already converted
expectedOutput := strings.TrimSuffix(convertedFile, ".cbz") + "_converted.cbz"
if _, err := os.Stat(expectedOutput); !os.IsNotExist(err) {
t.Error("Should not have created a new converted file for already converted chapter")
}
}
func TestOptimize_InvalidFile(t *testing.T) {
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: "/nonexistent/file.cbz",
Quality: 85,
Override: false,
Split: false,
Timeout: 0,
}
err := Optimize(options)
if err == nil {
t.Error("Expected error for nonexistent file")
}
}
func TestOptimize_Timeout(t *testing.T) {
// Create temporary directory
tempDir, err := os.MkdirTemp("", "test_optimize_timeout")
if err != nil {
t.Fatal(err)
}
defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Copy test files
testdataDir := "../../testdata"
if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Skip("testdata directory not found, skipping tests")
}
var cbzFile string
err = filepath.Walk(testdataDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") && !strings.Contains(info.Name(), "converted") {
destPath := filepath.Join(tempDir, "test.cbz")
data, err := os.ReadFile(path)
if err != nil {
return err
}
err = os.WriteFile(destPath, data, info.Mode())
if err != nil {
return err
}
cbzFile = destPath
return filepath.SkipDir
}
return nil
})
if err != nil {
t.Fatal(err)
}
if cbzFile == "" {
t.Skip("No CBZ test file found")
}
// Test with short timeout (500 microseconds) to force timeout during conversion
options := &OptimizeOptions{
ChapterConverter: &MockConverter{},
Path: cbzFile,
Quality: 85,
Override: false,
Split: false,
Timeout: 500 * time.Microsecond, // 500 microseconds - should timeout during page processing
}
err = Optimize(options)
if err == nil {
t.Error("Expected timeout error but got none")
}
// Check that the error contains timeout information
if !strings.Contains(err.Error(), "context deadline exceeded") {
t.Errorf("Expected timeout error message, got: %v", err)
}
}

16
main.go
View File

@@ -1,16 +0,0 @@
package main
import (
"github.com/belphemur/CBZOptimizer/cmd"
)
var (
version = "dev"
commit = "none"
date = "unknown"
)
func main() {
cmd.SetVersionInfo(version, commit, date)
cmd.Execute()
}

View File

@@ -1,17 +0,0 @@
package manga
import "image"
// PageContainer is a struct that holds a manga page, its image, and the image format.
type PageContainer struct {
// Page is a pointer to a manga page object.
Page *Page
// Image is the decoded image of the manga page.
Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string
}
func NewContainer(Page *Page, img image.Image, format string) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format}
}

View File

@@ -1,5 +0,0 @@
package meta
var Version = "v0.0.0"
var Commit = ""
var Date = ""

View File

@@ -1,18 +1,23 @@
package converter
import (
"context"
"fmt"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/converter/webp"
"github.com/belphemur/CBZOptimizer/manga"
"github.com/samber/lo"
"strings"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/webp"
"github.com/samber/lo"
)
type Converter interface {
// Format of the converter
Format() (format constant.ConversionFormat)
ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
// ConvertChapter converts a manga chapter to the specified format.
//
// Returns partial success where some pages are converted and some are not.
ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
PrepareConverter() error
}

View File

@@ -0,0 +1,281 @@
package converter
import (
"bytes"
"context"
"image"
"image/jpeg"
"os"
"testing"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string, isSplit bool) (*manga.Chapter, []string, error)
split bool
expectError bool
}{
{
name: "All split pages",
genTestChapter: genHugePage,
split: true,
},
{
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectError: true,
},
{
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
},
{
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectError: true,
},
{
name: "Two corrupted pages",
genTestChapter: genTwoCorrupted,
split: false,
expectError: true,
},
}
// Load test genTestChapter from testdata
temp, err := os.CreateTemp("", "test_chapter_*.cbz")
if err != nil {
t.Fatalf("failed to create temporary file: %v", err)
}
defer errs.CaptureGeneric(&err, os.Remove, temp.Name(), "failed to remove temporary file")
for _, converter := range Available() {
converter, err := Get(converter)
if err != nil {
t.Fatalf("failed to get converter: %v", err)
}
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, expectedExtensions, err := tc.genTestChapter(temp.Name(), tc.split)
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
quality := uint8(80)
progress := func(msg string, current uint32, total uint32) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, quality, tc.split, progress)
if err != nil && !tc.expectError {
t.Fatalf("failed to convert genTestChapter: %v", err)
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
if len(convertedChapter.Pages) != len(expectedExtensions) {
t.Fatalf("converted chapter has %d pages but expected %d", len(convertedChapter.Pages), len(expectedExtensions))
}
// Check each page's extension against the expected array
for i, page := range convertedChapter.Pages {
expectedExt := expectedExtensions[i]
if page.Extension != expectedExt {
t.Errorf("page %d has extension %s but expected %s", page.Index, page.Extension, expectedExt)
}
}
})
}
})
}
}
func genHugePage(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
expectedExtensions := []string{".jpg"} // One image that's generated as JPEG
if isSplit {
expectedExtensions = []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp"}
}
// Create one tall page
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
page := &manga.Page{
Index: 0,
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, expectedExtensions, nil
}
func genSmallPages(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, []string{".webp", ".webp", ".webp", ".webp", ".webp"}, nil
}
func genMixSmallBig(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
expectedExtensions := []string{".webp", ".webp", ".webp", ".webp", ".webp"}
if isSplit {
expectedExtensions = []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp"}
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, expectedExtensions, nil
}
func genMixSmallHuge(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 10; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 2000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".jpg", ".jpg"}, nil
}
func genTwoCorrupted(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
numPages := 5
corruptedIndices := []int{2, 4} // Pages 2 and 4 are too tall to convert without splitting
for i := 0; i < numPages; i++ {
var buf *bytes.Buffer
var ext string
isCorrupted := false
for _, ci := range corruptedIndices {
if i == ci {
isCorrupted = true
break
}
}
if isCorrupted {
buf = bytes.NewBufferString("corrupted data") // Invalid data, can't decode as image
ext = ".jpg"
} else {
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf = new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
ext = ".jpg"
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ext,
}
pages = append(pages, page)
}
// Expected: small pages to .webp, corrupted pages to .jpg (kept as is)
expectedExtensions := []string{".webp", ".webp", ".jpg", ".webp", ".jpg"}
// Even with split, corrupted pages can't be decoded so stay as is
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, expectedExtensions, nil
}

View File

@@ -0,0 +1,13 @@
package errors
type PageIgnoredError struct {
s string
}
func (e *PageIgnoredError) Error() string {
return e.s
}
func NewPageIgnored(text string) error {
return &PageIgnoredError{text}
}

View File

@@ -0,0 +1,464 @@
package webp
import (
"bytes"
"context"
"errors"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
"image/png"
"runtime"
"sync"
"sync/atomic"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
converterrors "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"github.com/oliamb/cutter"
"github.com/rs/zerolog/log"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
)
const webpMaxHeight = 16383
type Converter struct {
maxHeight int
cropHeight int
isPrepared bool
}
func (converter *Converter) Format() (format constant.ConversionFormat) {
return constant.WebP
}
func New() *Converter {
return &Converter{
//maxHeight: 16383 / 2,
maxHeight: 4000,
cropHeight: 2000,
isPrepared: false,
}
}
func (converter *Converter) PrepareConverter() error {
if converter.isPrepared {
return nil
}
err := PrepareEncoder()
if err != nil {
return err
}
converter.isPrepared = true
return nil
}
func (converter *Converter) ConvertChapter(ctx context.Context, chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
log.Debug().
Str("chapter", chapter.FilePath).
Int("pages", len(chapter.Pages)).
Uint8("quality", quality).
Bool("split", split).
Int("max_goroutines", runtime.NumCPU()).
Msg("Starting chapter conversion")
err := converter.PrepareConverter()
if err != nil {
log.Error().Str("chapter", chapter.FilePath).Err(err).Msg("Failed to prepare converter")
return nil, err
}
var wgConvertedPages sync.WaitGroup
maxGoroutines := runtime.NumCPU()
pagesChan := make(chan *manga.PageContainer, maxGoroutines)
errChan := make(chan error, maxGoroutines)
doneChan := make(chan struct{})
var wgPages sync.WaitGroup
wgPages.Add(len(chapter.Pages))
guard := make(chan struct{}, maxGoroutines)
pagesMutex := sync.Mutex{}
var pages []*manga.Page
var totalPages = uint32(len(chapter.Pages))
log.Debug().
Str("chapter", chapter.FilePath).
Int("total_pages", len(chapter.Pages)).
Int("worker_count", maxGoroutines).
Msg("Initialized conversion worker pool")
// Check if context is already cancelled
select {
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
default:
}
// Start the worker pool
go func() {
defer close(doneChan)
for page := range pagesChan {
select {
case <-ctx.Done():
return
case guard <- struct{}{}: // would block if guard channel is already filled
}
go func(pageToConvert *manga.PageContainer) {
defer func() {
wgConvertedPages.Done()
<-guard
}()
// Check context cancellation before processing
select {
case <-ctx.Done():
return
default:
}
convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil {
if convertedPage == nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
buffer := new(bytes.Buffer)
err := png.Encode(buffer, convertedPage.Image)
if err != nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
convertedPage.Page.Contents = buffer
convertedPage.Page.Extension = ".png"
convertedPage.Page.Size = uint64(buffer.Len())
}
pagesMutex.Lock()
defer pagesMutex.Unlock()
pages = append(pages, convertedPage.Page)
currentTotalPages := atomic.LoadUint32(&totalPages)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), currentTotalPages, converter.Format()), uint32(len(pages)), currentTotalPages)
}(page)
}
}()
// Process pages
for _, page := range chapter.Pages {
select {
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
default:
}
go func(page *manga.Page) {
defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page, split)
if err != nil {
var pageIgnoredError *converterrors.PageIgnoredError
if errors.As(err, &pageIgnoredError) {
log.Info().Err(err).Msg("Page ignored due to image decode error")
}
select {
case errChan <- err:
case <-ctx.Done():
return
}
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, false):
case <-ctx.Done():
wgConvertedPages.Done()
return
}
return
}
if !splitNeeded {
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, true):
case <-ctx.Done():
wgConvertedPages.Done()
return
}
return
}
images, err := converter.cropImage(img)
if err != nil {
select {
case errChan <- err:
case <-ctx.Done():
return
}
return
}
atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images {
select {
case <-ctx.Done():
return
default:
}
newPage := &manga.Page{
Index: page.Index,
IsSplitted: true,
SplitPartIndex: uint16(i),
}
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(newPage, img, "N/A", true):
case <-ctx.Done():
wgConvertedPages.Done()
return
}
}
}(page)
}
wgPages.Wait()
close(pagesChan)
// Wait for all conversions to complete or context cancellation
done := make(chan struct{})
go func() {
defer close(done)
wgConvertedPages.Wait()
}()
select {
case <-done:
// Conversion completed successfully
case <-ctx.Done():
log.Warn().Str("chapter", chapter.FilePath).Msg("Chapter conversion cancelled due to timeout")
return nil, ctx.Err()
}
close(errChan)
close(guard)
var errList []error
for err := range errChan {
errList = append(errList, err)
}
var aggregatedError error = nil
if len(errList) > 0 {
aggregatedError = errors.Join(errList...)
log.Debug().
Str("chapter", chapter.FilePath).
Int("error_count", len(errList)).
Err(errors.Join(errList...)).
Msg("Conversion completed with errors")
} else {
log.Debug().
Str("chapter", chapter.FilePath).
Int("pages_converted", len(pages)).
Msg("Conversion completed successfully")
}
slices.SortFunc(pages, func(a, b *manga.Page) int {
if a.Index == b.Index {
return int(a.SplitPartIndex) - int(b.SplitPartIndex)
}
return int(a.Index) - int(b.Index)
})
chapter.Pages = pages
log.Debug().
Str("chapter", chapter.FilePath).
Int("final_page_count", len(pages)).
Msg("Pages sorted and chapter updated")
runtime.GC()
log.Debug().Str("chapter", chapter.FilePath).Msg("Garbage collection completed")
return chapter, aggregatedError
}
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
bounds := img.Bounds()
height := bounds.Dy()
width := bounds.Dx()
numParts := height / converter.cropHeight
if height%converter.cropHeight != 0 {
numParts++
}
log.Debug().
Int("original_width", width).
Int("original_height", height).
Int("crop_height", converter.cropHeight).
Int("num_parts", numParts).
Msg("Starting image cropping for page splitting")
parts := make([]image.Image, numParts)
for i := 0; i < numParts; i++ {
partHeight := converter.cropHeight
if i == numParts-1 {
partHeight = height - i*converter.cropHeight
}
log.Debug().
Int("part_index", i).
Int("part_height", partHeight).
Int("y_offset", i*converter.cropHeight).
Msg("Cropping image part")
part, err := cutter.Crop(img, cutter.Config{
Width: bounds.Dx(),
Height: partHeight,
Anchor: image.Point{Y: i * converter.cropHeight},
Mode: cutter.TopLeft,
})
if err != nil {
log.Error().
Int("part_index", i).
Err(err).
Msg("Failed to crop image part")
return nil, fmt.Errorf("error cropping part %d: %v", i+1, err)
}
parts[i] = part
log.Debug().
Int("part_index", i).
Int("cropped_width", part.Bounds().Dx()).
Int("cropped_height", part.Bounds().Dy()).
Msg("Image part cropped successfully")
}
log.Debug().
Int("total_parts", len(parts)).
Msg("Image cropping completed")
return parts, nil
}
func (converter *Converter) checkPageNeedsSplit(page *manga.Page, splitRequested bool) (bool, image.Image, string, error) {
log.Debug().
Uint16("page_index", page.Index).
Bool("split_requested", splitRequested).
Int("page_size", len(page.Contents.Bytes())).
Msg("Analyzing page for splitting")
reader := bytes.NewBuffer(page.Contents.Bytes())
img, format, err := image.Decode(reader)
if err != nil {
log.Debug().Uint16("page_index", page.Index).Err(err).Msg("Failed to decode page image")
return false, nil, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d: failed to decode image (%s)", page.Index, err.Error()))
}
bounds := img.Bounds()
height := bounds.Dy()
width := bounds.Dx()
log.Debug().
Uint16("page_index", page.Index).
Int("width", width).
Int("height", height).
Str("format", format).
Int("max_height", converter.maxHeight).
Int("webp_max_height", webpMaxHeight).
Msg("Page dimensions analyzed")
if height >= webpMaxHeight && !splitRequested {
log.Debug().
Uint16("page_index", page.Index).
Int("height", height).
Int("webp_max", webpMaxHeight).
Msg("Page too tall for WebP format, would be ignored")
return false, img, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d is too tall [max: %dpx] to be converted to webp format", page.Index, webpMaxHeight))
}
needsSplit := height >= converter.maxHeight && splitRequested
log.Debug().
Uint16("page_index", page.Index).
Bool("needs_split", needsSplit).
Msg("Page splitting decision made")
return needsSplit, img, format, nil
}
func (converter *Converter) convertPage(container *manga.PageContainer, quality uint8) (*manga.PageContainer, error) {
log.Debug().
Uint16("page_index", container.Page.Index).
Str("format", container.Format).
Bool("to_be_converted", container.IsToBeConverted).
Uint8("quality", quality).
Msg("Converting page")
// Fix WebP format detection (case insensitive)
if container.Format == "webp" || container.Format == "WEBP" {
log.Debug().
Uint16("page_index", container.Page.Index).
Msg("Page already in WebP format, skipping conversion")
container.Page.Extension = ".webp"
return container, nil
}
if !container.IsToBeConverted {
log.Debug().
Uint16("page_index", container.Page.Index).
Msg("Page marked as not to be converted, skipping")
return container, nil
}
log.Debug().
Uint16("page_index", container.Page.Index).
Uint8("quality", quality).
Msg("Encoding page to WebP format")
converted, err := converter.convert(container.Image, uint(quality))
if err != nil {
log.Error().
Uint16("page_index", container.Page.Index).
Err(err).
Msg("Failed to convert page to WebP")
return nil, err
}
container.SetConverted(converted, ".webp")
log.Debug().
Uint16("page_index", container.Page.Index).
Int("original_size", len(container.Page.Contents.Bytes())).
Int("converted_size", len(converted.Bytes())).
Msg("Page conversion completed")
return container, nil
}
// convert converts an image to the WebP format. It decodes the image from the input buffer,
// encodes it as a WebP file using the webp.Encode() function, and returns the resulting WebP
// file as a bytes.Buffer.
func (converter *Converter) convert(image image.Image, quality uint) (*bytes.Buffer, error) {
var buf bytes.Buffer
err := Encode(&buf, image, quality)
if err != nil {
return nil, err
}
return &buf, nil
}

View File

@@ -0,0 +1,595 @@
package webp
import (
"bytes"
"context"
"fmt"
"image"
"image/color"
"image/gif"
"image/jpeg"
"image/png"
"sync"
"testing"
"time"
_ "golang.org/x/image/webp"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func createTestImage(width, height int, format string) (image.Image, error) {
img := image.NewRGBA(image.Rect(0, 0, width, height))
// Create a gradient pattern to ensure we have actual image data
for y := 0; y < height; y++ {
for x := 0; x < width; x++ {
img.Set(x, y, color.RGBA{
R: uint8((x * 255) / width),
G: uint8((y * 255) / height),
B: 100,
A: 255,
})
}
}
return img, nil
}
func encodeImage(img image.Image, format string) (*bytes.Buffer, string, error) {
buf := new(bytes.Buffer)
switch format {
case "jpeg", "jpg":
if err := jpeg.Encode(buf, img, &jpeg.Options{Quality: 85}); err != nil {
return nil, "", err
}
return buf, ".jpg", nil
case "gif":
if err := gif.Encode(buf, img, nil); err != nil {
return nil, "", err
}
return buf, ".gif", nil
case "webp":
PrepareEncoder()
if err := Encode(buf, img, 80); err != nil {
return nil, "", err
}
return buf, ".webp", nil
case "png":
fallthrough
default:
if err := png.Encode(buf, img); err != nil {
return nil, "", err
}
return buf, ".png", nil
}
}
func createTestPage(t *testing.T, index int, width, height int, format string) *manga.Page {
img, err := createTestImage(width, height, format)
require.NoError(t, err)
buf, ext, err := encodeImage(img, format)
require.NoError(t, err)
return &manga.Page{
Index: uint16(index),
Contents: buf,
Extension: ext,
Size: uint64(buf.Len()),
}
}
func validateConvertedImage(t *testing.T, page *manga.Page) {
require.NotNil(t, page.Contents)
require.Greater(t, page.Contents.Len(), 0)
// Try to decode the image
img, format, err := image.Decode(bytes.NewReader(page.Contents.Bytes()))
require.NoError(t, err, "Failed to decode converted image")
if page.Extension == ".webp" {
assert.Equal(t, "webp", format, "Expected WebP format")
}
require.NotNil(t, img)
bounds := img.Bounds()
assert.Greater(t, bounds.Dx(), 0, "Image width should be positive")
assert.Greater(t, bounds.Dy(), 0, "Image height should be positive")
}
// TestConverter_ConvertChapter tests the ConvertChapter method of the WebP converter.
// It verifies various scenarios including:
// - Converting single normal images
// - Converting multiple normal images
// - Converting tall images with split enabled
// - Handling tall images that exceed maximum height
//
// For each test case it validates:
// - Proper error handling
// - Expected number of output pages
// - Correct page ordering
// - Split page handling and indexing
// - Progress callback behavior
//
// The test uses different image dimensions and split settings to ensure
// the converter handles all cases correctly while maintaining proper
// progress reporting and page ordering.
func TestConverter_ConvertChapter(t *testing.T) {
tests := []struct {
name string
pages []*manga.Page
split bool
expectSplit bool
expectError bool
numExpected int
}{
{
name: "Single normal image",
pages: []*manga.Page{createTestPage(t, 1, 800, 1200, "jpeg")},
split: false,
expectSplit: false,
numExpected: 1,
},
{
name: "Multiple normal images",
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "gif"),
},
split: false,
expectSplit: false,
numExpected: 3,
},
{
name: "Multiple normal images with webp",
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "gif"),
createTestPage(t, 4, 800, 1200, "webp"),
},
split: false,
expectSplit: false,
numExpected: 4,
},
{
name: "Tall image with split enabled",
pages: []*manga.Page{createTestPage(t, 1, 800, 5000, "jpeg")},
split: true,
expectSplit: true,
numExpected: 3, // Based on cropHeight of 2000
},
{
name: "Tall image without split",
pages: []*manga.Page{createTestPage(t, 1, 800, webpMaxHeight+100, "png")},
split: false,
expectError: true,
numExpected: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
chapter := &manga.Chapter{
Pages: tt.pages,
}
var progressMutex sync.Mutex
var lastProgress uint32
progress := func(message string, current uint32, total uint32) {
progressMutex.Lock()
defer progressMutex.Unlock()
assert.GreaterOrEqual(t, current, lastProgress, "Progress should never decrease")
lastProgress = current
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, 80, tt.split, progress)
if tt.expectError {
assert.Error(t, err)
if convertedChapter != nil {
assert.LessOrEqual(t, len(convertedChapter.Pages), tt.numExpected)
}
return
}
require.NoError(t, err)
require.NotNil(t, convertedChapter)
assert.Len(t, convertedChapter.Pages, tt.numExpected)
// Validate all converted images
for _, page := range convertedChapter.Pages {
validateConvertedImage(t, page)
}
// Verify page order
for i := 1; i < len(convertedChapter.Pages); i++ {
prevPage := convertedChapter.Pages[i-1]
currPage := convertedChapter.Pages[i]
if prevPage.Index == currPage.Index {
assert.Less(t, prevPage.SplitPartIndex, currPage.SplitPartIndex,
"Split parts should be in ascending order for page %d", prevPage.Index)
} else {
assert.Less(t, prevPage.Index, currPage.Index,
"Pages should be in ascending order")
}
}
if tt.expectSplit {
splitFound := false
for _, page := range convertedChapter.Pages {
if page.IsSplitted {
splitFound = true
break
}
}
assert.True(t, splitFound, "Expected to find at least one split page")
}
})
}
}
func TestConverter_convertPage(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
tests := []struct {
name string
format string
isToBeConverted bool
expectWebP bool
expectError bool
}{
{
name: "Convert PNG to WebP",
format: "png",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Convert GIF to WebP",
format: "gif",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Already WebP",
format: "webp",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Skip conversion",
format: "png",
isToBeConverted: false,
expectWebP: false,
expectError: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
page := createTestPage(t, 1, 100, 100, tt.format)
img, err := createTestImage(100, 100, tt.format)
require.NoError(t, err)
container := manga.NewContainer(page, img, tt.format, tt.isToBeConverted)
converted, err := converter.convertPage(container, 80)
if tt.expectError {
assert.Error(t, err)
assert.Nil(t, converted)
} else {
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
}
}
})
}
}
func TestConverter_convertPage_EncodingError(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a test case with nil image to test encoding error path
// when isToBeConverted is true but the image is nil, simulating a failure in the encoding step
corruptedPage := &manga.Page{
Index: 1,
Contents: &bytes.Buffer{}, // Empty buffer
Extension: ".png",
Size: 0,
}
container := manga.NewContainer(corruptedPage, nil, "png", true)
converted, err := converter.convertPage(container, 80)
// This should return nil container and error because encoding will fail with nil image
assert.Error(t, err)
assert.Nil(t, converted)
}
func TestConverter_checkPageNeedsSplit(t *testing.T) {
converter := New()
tests := []struct {
name string
imageHeight int
split bool
expectSplit bool
expectError bool
}{
{
name: "Normal height",
imageHeight: 1000,
split: true,
expectSplit: false,
},
{
name: "Height exceeds max with split enabled",
imageHeight: 5000,
split: true,
expectSplit: true,
},
{
name: "Height exceeds webp max without split",
imageHeight: webpMaxHeight + 100,
split: false,
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
page := createTestPage(t, 1, 800, tt.imageHeight, "jpeg")
needsSplit, img, format, err := converter.checkPageNeedsSplit(page, tt.split)
if tt.expectError {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.NotNil(t, img)
assert.NotEmpty(t, format)
assert.Equal(t, tt.expectSplit, needsSplit)
})
}
}
func TestConverter_Format(t *testing.T) {
converter := New()
assert.Equal(t, constant.WebP, converter.Format())
}
func TestConverter_ConvertChapter_Timeout(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a test chapter with a few pages
pages := []*manga.Page{
createTestPage(t, 1, 800, 1200, "jpeg"),
createTestPage(t, 2, 800, 1200, "png"),
createTestPage(t, 3, 800, 1200, "gif"),
}
chapter := &manga.Chapter{
FilePath: "/test/chapter.cbz",
Pages: pages,
}
var progressMutex sync.Mutex
var lastProgress uint32
progress := func(message string, current uint32, total uint32) {
progressMutex.Lock()
defer progressMutex.Unlock()
assert.GreaterOrEqual(t, current, lastProgress, "Progress should never decrease")
lastProgress = current
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
// Test with very short timeout (1 nanosecond)
ctx, cancel := context.WithTimeout(context.Background(), 1)
defer cancel()
convertedChapter, err := converter.ConvertChapter(ctx, chapter, 80, false, progress)
// Should return context error due to timeout
assert.Error(t, err)
assert.Nil(t, convertedChapter)
assert.Equal(t, context.DeadlineExceeded, err)
}
// TestConverter_ConvertChapter_ManyPages_NoDeadlock tests that converting chapters with many pages
// does not cause a deadlock. This test reproduces the scenario where processing
// many files with context cancellation could cause "all goroutines are asleep - deadlock!" error.
// The fix ensures that wgConvertedPages.Done() is called when context is cancelled after Add(1).
func TestConverter_ConvertChapter_ManyPages_NoDeadlock(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a chapter with many pages to increase the chance of hitting the race condition
numPages := 50
pages := make([]*manga.Page, numPages)
for i := 0; i < numPages; i++ {
pages[i] = createTestPage(t, i+1, 100, 100, "jpeg")
}
chapter := &manga.Chapter{
FilePath: "/test/chapter_many_pages.cbz",
Pages: pages,
}
progress := func(message string, current uint32, total uint32) {
// No-op progress callback
}
// Run multiple iterations to increase the chance of hitting the race condition
for iteration := 0; iteration < 10; iteration++ {
t.Run(fmt.Sprintf("iteration_%d", iteration), func(t *testing.T) {
// Use a very short timeout to trigger context cancellation during processing
ctx, cancel := context.WithTimeout(context.Background(), time.Nanosecond)
defer cancel()
// This should NOT deadlock - it should return quickly with context error
done := make(chan struct{})
var convertErr error
go func() {
defer close(done)
_, convertErr = converter.ConvertChapter(ctx, chapter, 80, false, progress)
}()
// Wait with a reasonable timeout - if it takes longer than 5 seconds, we have a deadlock
select {
case <-done:
// Expected - conversion should complete (with error) quickly
assert.Error(t, convertErr, "Expected context error")
case <-time.After(5 * time.Second):
t.Fatal("Deadlock detected: ConvertChapter did not return within 5 seconds")
}
})
}
}
// TestConverter_ConvertChapter_ManyPages_WithSplit_NoDeadlock tests that converting chapters
// with many pages and split enabled does not cause a deadlock.
func TestConverter_ConvertChapter_ManyPages_WithSplit_NoDeadlock(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create pages with varying heights, some requiring splits
numPages := 30
pages := make([]*manga.Page, numPages)
for i := 0; i < numPages; i++ {
height := 1000 // Normal height
if i%5 == 0 {
height = 5000 // Tall image that will be split
}
pages[i] = createTestPage(t, i+1, 100, height, "png")
}
chapter := &manga.Chapter{
FilePath: "/test/chapter_split_test.cbz",
Pages: pages,
}
progress := func(message string, current uint32, total uint32) {
// No-op progress callback
}
// Run multiple iterations with short timeouts
for iteration := 0; iteration < 10; iteration++ {
t.Run(fmt.Sprintf("iteration_%d", iteration), func(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), time.Nanosecond)
defer cancel()
done := make(chan struct{})
var convertErr error
go func() {
defer close(done)
_, convertErr = converter.ConvertChapter(ctx, chapter, 80, true, progress) // split=true
}()
select {
case <-done:
assert.Error(t, convertErr, "Expected context error")
case <-time.After(5 * time.Second):
t.Fatal("Deadlock detected: ConvertChapter with split did not return within 5 seconds")
}
})
}
}
// TestConverter_ConvertChapter_ConcurrentChapters_NoDeadlock simulates the scenario from the
// original bug report where multiple chapters are processed in parallel with parallelism > 1.
// This test ensures no deadlock occurs when multiple goroutines are converting chapters concurrently.
func TestConverter_ConvertChapter_ConcurrentChapters_NoDeadlock(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create multiple chapters, each with many pages
numChapters := 20
pagesPerChapter := 30
chapters := make([]*manga.Chapter, numChapters)
for c := 0; c < numChapters; c++ {
pages := make([]*manga.Page, pagesPerChapter)
for i := 0; i < pagesPerChapter; i++ {
pages[i] = createTestPage(t, i+1, 100, 100, "jpeg")
}
chapters[c] = &manga.Chapter{
FilePath: fmt.Sprintf("/test/chapter_%d.cbz", c+1),
Pages: pages,
}
}
progress := func(message string, current uint32, total uint32) {}
// Process chapters concurrently with short timeouts (simulating parallelism flag)
parallelism := 4
var wg sync.WaitGroup
semaphore := make(chan struct{}, parallelism)
// Overall test timeout
testCtx, testCancel := context.WithTimeout(context.Background(), 30*time.Second)
defer testCancel()
for _, chapter := range chapters {
wg.Add(1)
semaphore <- struct{}{} // Acquire
go func(ch *manga.Chapter) {
defer wg.Done()
defer func() { <-semaphore }() // Release
// Use very short timeout to trigger cancellation
ctx, cancel := context.WithTimeout(context.Background(), time.Nanosecond)
defer cancel()
// This should not deadlock
_, _ = converter.ConvertChapter(ctx, ch, 80, false, progress)
}(chapter)
}
// Wait for all conversions with a timeout
done := make(chan struct{})
go func() {
wg.Wait()
close(done)
}()
select {
case <-done:
// All goroutines completed successfully
case <-testCtx.Done():
t.Fatal("Deadlock detected: Concurrent chapter conversions did not complete within 30 seconds")
}
}

View File

@@ -0,0 +1,46 @@
package webp
import (
"fmt"
"image"
"io"
"strings"
"sync"
"github.com/belphemur/go-webpbin/v2"
)
const libwebpVersion = "1.6.0"
var config = webpbin.NewConfig()
var prepareMutex sync.Mutex
func init() {
config.SetLibVersion(libwebpVersion)
}
func PrepareEncoder() error {
prepareMutex.Lock()
defer prepareMutex.Unlock()
container := webpbin.NewCWebP(config)
version, err := container.Version()
if err != nil {
return err
}
if !strings.HasPrefix(version, libwebpVersion) {
return fmt.Errorf("unexpected webp version: got %s, want %s", version, libwebpVersion)
}
return nil
}
func Encode(w io.Writer, m image.Image, quality uint) error {
return webpbin.NewCWebP(config).
Quality(quality).
InputImage(m).
Output(w).
Run()
}

View File

@@ -2,5 +2,16 @@
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended"
],
"packageRules": [
{
"matchUpdateTypes": [
"minor",
"patch",
"digest"
],
"matchCurrentVersion": "!/^0/",
"automerge": true
}
]
}

Binary file not shown.

BIN
testdata/Chapter 128.cbz vendored Normal file

Binary file not shown.

Binary file not shown.

View File

@@ -1,50 +0,0 @@
package utils
import (
"fmt"
"github.com/belphemur/CBZOptimizer/cbz"
"github.com/belphemur/CBZOptimizer/converter"
"log"
"strings"
)
// Optimize optimizes a CBZ file using the specified converter.
func Optimize(chapterConverter converter.Converter, path string, quality uint8, override bool) error {
log.Printf("Processing file: %s\n", path)
// Load the chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return fmt.Errorf("failed to load chapter: %v", err)
}
if chapter.IsConverted {
log.Printf("Chapter already converted: %s", path)
return nil
}
// Convert the chapter
convertedChapter, err := chapterConverter.ConvertChapter(chapter, quality, func(msg string, current uint32, total uint32) {
if current%10 == 0 || current == total {
log.Printf("[%s] Converting: %d/%d", chapter.FilePath, current, total)
}
})
if err != nil {
return fmt.Errorf("failed to convert chapter: %v", err)
}
convertedChapter.SetConverted()
// Write the converted chapter back to a CBZ file
outputPath := path
if !override {
outputPath = strings.TrimSuffix(path, ".cbz") + "_converted.cbz"
}
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Printf("Converted file written to: %s\n", outputPath)
return nil
}