105 Commits

Author SHA1 Message Date
Antoine Aflalo
c87fde31c4 chore: Removes unused import
Removes the `image/jpeg` import, as it is already imported via blank identifier.
2025-02-14 10:55:39 -05:00
Antoine Aflalo
23eb43c691 fix(chapter): fix chapter conversion.
Still need to figure out the memory issues

Consolidates image conversion logic into a dedicated method.

This change streamlines the conversion process by centralizing the
setting of converted image data, extension, and size. It also
introduces a flag to track whether an image has been converted.

The old resource cleanup has been removed since it is not needed anymore.
2025-02-14 10:03:35 -05:00
Antoine Aflalo
4d3391273c ci: Sets up QEMU for cross-platform builds
Configures QEMU to enable emulation of different architectures,
allowing for cross-platform builds and testing in the release workflow.
2025-02-13 20:18:14 -05:00
Antoine Aflalo
2da3bae04a Updates build configuration for multi-platform support
Configures the build process to support multiple platforms (Linux, Darwin, Windows) and architectures (amd64, arm64).

Disables CGO to simplify cross-compilation.

Updates Docker image creation to produce separate images for amd64 and arm64, and creates manifest lists for `latest` and versioned tags.
2025-02-13 20:12:49 -05:00
Antoine Aflalo
a3dfec642c test: add webp converter test 2025-02-13 20:05:08 -05:00
Antoine Aflalo
0303c80feb test: fix path 2025-02-13 20:04:33 -05:00
Antoine Aflalo
efe1696bfa fix(memory): fix possible memory leak and add better tests 2025-02-13 20:02:45 -05:00
Antoine Aflalo
25cd4585b7 feat: revert to use webp executable 2025-02-13 19:47:13 -05:00
Antoine Aflalo
dd7b6a332c refactor: update import paths to use internal package 2025-02-13 19:43:18 -05:00
renovate[bot]
5428134d15 chore(deps): update dependency go to v1.24.0 (#49)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-11 20:04:36 +00:00
renovate[bot]
8d59530234 fix(deps): update golang.org/x/exp digest to 939b2ce (#48)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-10 20:23:31 +00:00
renovate[bot]
af30f34aa6 fix(deps): update golang.org/x/exp digest to f9890c6 (#47)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-07 05:00:13 +00:00
renovate[bot]
b3c412c09d chore(deps): update sigstore/cosign-installer action to v3.8.0 (#46)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-05 00:35:33 +00:00
renovate[bot]
16ba484f28 fix(deps): update module golang.org/x/image to v0.24.0 (#45)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-04 20:39:34 +00:00
renovate[bot]
2de8a81137 chore(deps): update dependency go to v1.23.6 (#44)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-02-04 17:36:25 +00:00
renovate[bot]
c223c9dca6 fix(deps): update golang.org/x/exp digest to e0ece0d (#43)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 21:04:20 +00:00
renovate[bot]
dcf57c7646 fix(deps): update golang.org/x/exp digest to 3edf0e9 (#42)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 17:46:34 +00:00
renovate[bot]
77e7724de2 fix(deps): update module github.com/samber/lo to v1.49.1 (#41)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-28 13:22:17 +00:00
renovate[bot]
ea8fd55cc2 fix(deps): update module github.com/samber/lo to v1.49.0 (#40)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-27 04:30:54 +00:00
renovate[bot]
709c53d647 fix(deps): update module github.com/pablodz/inotifywaitgo to v0.0.9 (#39)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-26 20:57:06 +00:00
renovate[bot]
919a53fec7 fix(deps): update module github.com/samber/lo to v1.48.0 (#38)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-26 10:22:50 +00:00
renovate[bot]
d3b3a73b8f chore(deps): update anchore/sbom-action action to v0.18.0 (#37)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-23 20:47:16 +00:00
renovate[bot]
188211e26d fix(deps): update golang.org/x/exp digest to 7588d65 (#36)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-06 21:12:16 +00:00
renovate[bot]
f57a88eaf4 fix(deps): update module github.com/thediveo/enumflag/v2 to v2.0.7 (#35)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-05 19:29:38 +00:00
renovate[bot]
6e6b66b5eb fix(deps): update golang.org/x/exp digest to 7d7fa50 (#34)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-03 21:05:57 +00:00
renovate[bot]
1ff1bed3cc fix(deps): update golang.org/x/exp digest to dd03c70 (#33)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-03 18:34:17 +00:00
renovate[bot]
196938718c fix(deps): update golang.org/x/exp digest to b2144cd (#32)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-17 20:15:57 +00:00
renovate[bot]
9972709d32 fix(deps): update golang.org/x/exp digest to 4a55095 (#31)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-15 18:36:01 +00:00
renovate[bot]
152fa85577 chore(deps): update anchore/sbom-action action to v0.17.9 (#30)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-13 23:01:28 +00:00
renovate[bot]
554fce5d1e fix(deps): update golang.org/x/exp digest to 1829a12 (#29)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-10 23:09:05 +00:00
renovate[bot]
25357e9ec6 fix(deps): update golang.org/x/exp digest to 1443442 (#28)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-10 17:58:23 +00:00
Antoine Aflalo
ecc561263f fix: move image to debian 2024-12-06 18:00:24 -05:00
Antoine Aflalo
fb1056e5e7 ci: remove bash completion 2024-12-06 17:40:42 -05:00
Antoine Aflalo
07bc88bb04 fix: v2 versioning 2024-12-06 17:28:09 -05:00
Antoine Aflalo
8c3665fa53 ci: fix dockerfile 2024-12-06 17:26:25 -05:00
Antoine Aflalo
8dce346997 ci: debug release 2 2024-12-06 17:22:27 -05:00
Antoine Aflalo
4646789e4e ci: debug release 2024-12-06 17:21:12 -05:00
Antoine Aflalo
22ca56c98b ci: fix building 2024-12-06 17:18:28 -05:00
Antoine Aflalo
f45a1d4ed0 ci: remove arm64 2024-12-06 17:10:52 -05:00
Antoine Aflalo
ee53fddf02 ci: fix version 2024-12-06 17:03:42 -05:00
Antoine Aflalo
f416f1ff32 feat: replace webp lib by C libwebp
Avoid having to download anything
2024-12-06 17:01:00 -05:00
renovate[bot]
969993161f fix(deps): update golang.org/x/exp digest to 43b7b7c (#27)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-05 01:57:59 +00:00
renovate[bot]
f6b41f6391 fix(deps): update module golang.org/x/image to v0.23.0 (#26)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-12-04 18:33:59 +00:00
renovate[bot]
0bb9e4320c chore(deps): update anchore/sbom-action action to v0.17.8 (#25)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-21 18:47:50 +00:00
Antoine Aflalo
35cfe41aa6 Merge pull request #24 from Belphemur/renovate/codecov-codecov-action-5.x
chore(deps): update codecov/codecov-action action to v5
2024-11-18 14:37:12 -05:00
renovate[bot]
021c647a6e chore(deps): update codecov/codecov-action action to v5 2024-11-14 19:14:51 +00:00
renovate[bot]
6217254305 fix(deps): update golang.org/x/exp digest to 2d47ceb (#23)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 23:01:56 +00:00
renovate[bot]
0ad711a24d fix(deps): update golang.org/x/exp digest to 04b2079 (#22)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 19:25:44 +00:00
renovate[bot]
f24e4cc26e fix(deps): update module golang.org/x/image to v0.22.0 (#21)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-11-08 00:06:54 +00:00
Antoine Aflalo
1d3a8396f2 Merge pull request #20 from Belphemur/renovate/anchore-sbom-action-0.x 2024-11-05 09:49:07 -05:00
renovate[bot]
497f206c50 chore(deps): update anchore/sbom-action action to v0.17.7 2024-11-05 14:37:06 +00:00
renovate[bot]
9ade876952 chore(deps): update anchore/sbom-action action to v0.17.6 (#19)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-29 16:46:36 +00:00
Antoine Aflalo
103d38c74b Merge pull request #18 from Belphemur/renovate/anchore-sbom-action-0.x 2024-10-22 10:23:05 -04:00
renovate[bot]
80a1afe7c3 chore(deps): update anchore/sbom-action action to v0.17.5 2024-10-21 20:34:41 +00:00
Antoine Aflalo
2de7bc7a04 Merge pull request #17 from Belphemur/renovate/anchore-sbom-action-0.x 2024-10-15 14:29:56 -04:00
renovate[bot]
bccf7a7029 chore(deps): update anchore/sbom-action action to v0.17.4 2024-10-15 17:14:00 +00:00
renovate[bot]
4e80ddfb3a chore(deps): update anchore/sbom-action action to v0.17.3 (#16)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-12 00:29:35 +00:00
renovate[bot]
090bbac593 fix(deps): update golang.org/x/exp digest to f66d83c (#15)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-09 22:19:26 +00:00
renovate[bot]
c8b0f11784 fix(deps): update golang.org/x/exp digest to 225e2ab (#14)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-10-05 00:22:41 +00:00
Antoine Aflalo
449b57b14e Merge pull request #13 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.21.0
2024-10-04 14:25:05 -04:00
renovate[bot]
0dcb55f06d fix(deps): update module golang.org/x/image to v0.21.0 2024-10-04 16:40:30 +00:00
Antoine Aflalo
33ae460caf Merge pull request #12 from Belphemur/renovate/sigstore-cosign-installer-3.x
chore(deps): update sigstore/cosign-installer action to v3.7.0
2024-10-04 09:57:48 -04:00
renovate[bot]
1af484aea8 chore(deps): update sigstore/cosign-installer action to v3.7.0 2024-10-04 13:31:05 +00:00
Antoine Aflalo
e798a59a43 perf: fix any unhandled errors 2024-09-10 15:10:40 -04:00
Antoine Aflalo
72086d658e refactor: clean up 2024-09-10 13:58:52 -04:00
Antoine Aflalo
a7bca7ee05 ci(qodana): add qodana 2024-09-10 13:54:08 -04:00
Antoine Aflalo
ba82003b53 Merge pull request #11 from Belphemur/renovate
perf(error): better deal with deferred errors
2024-09-09 14:47:12 -04:00
Antoine Aflalo
5f7e7de644 ci(tests): fix possible error with tests 2024-09-09 14:45:30 -04:00
Antoine Aflalo
5b183cca29 perf(error): better deal with deferred errors 2024-09-09 14:45:30 -04:00
Antoine Aflalo
d901be14fa Merge pull request #9 from Belphemur/renovatebot
ci(renovate): auto merge digest
2024-09-09 14:11:56 -04:00
Antoine Aflalo
a80997835a chore(deps): update deps 2024-09-09 14:09:39 -04:00
Antoine Aflalo
37bb12fd61 ci(renovate): auto merge digest 2024-09-09 14:09:23 -04:00
Antoine Aflalo
c19afb9f40 Merge pull request #7 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to 701f63a
2024-09-09 14:06:57 -04:00
renovate[bot]
911e1041ff fix(deps): update golang.org/x/exp digest to 701f63a 2024-09-09 18:06:01 +00:00
Antoine Aflalo
a10d589b67 Merge pull request #8 from Belphemur/fix-ci
ci: always generate and upload test results
2024-09-09 14:05:05 -04:00
Antoine Aflalo
da508fcb3f ci: always generate and upload test results 2024-09-09 14:03:28 -04:00
Antoine Aflalo
57f5282032 ci(renovate): add automerge 2024-09-09 09:27:33 -04:00
Antoine Aflalo
d4f8d8b5ff ci(test): fix the report xml file 2024-09-09 09:25:46 -04:00
Antoine Aflalo
1b026b9dbd fix(watch): add missing split option in log 2024-09-09 09:11:45 -04:00
Antoine Aflalo
12cc8d4e25 Merge pull request #6 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to e7e105d
2024-09-06 17:10:42 -04:00
renovate[bot]
3442b2a845 fix(deps): update golang.org/x/exp digest to e7e105d 2024-09-06 21:08:46 +00:00
Antoine Aflalo
b9a1fb213a Merge pull request #5 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.20.0
2024-09-06 17:07:40 -04:00
renovate[bot]
278ee130e3 fix(deps): update module golang.org/x/image to v0.20.0 2024-09-04 19:30:08 +00:00
Antoine Aflalo
5357ece2b7 perf: use comment of the zip to know if it's converted instead of txt file 2024-08-29 09:38:39 -04:00
Antoine Aflalo
dbef43d376 fix(watch): fix watch command not using proper path 2024-08-28 15:06:47 -04:00
Antoine Aflalo
7c63ea49c0 fix(docker): fix docker image config folder 2024-08-28 14:36:14 -04:00
Antoine Aflalo
8a067939af Merge pull request #4 from Belphemur/renovate/major-github-artifact-actions
chore(deps): update actions/upload-artifact action to v4
2024-08-28 14:24:57 -04:00
Antoine Aflalo
f89974ac79 ci: Another attempt at reducing 2024-08-28 14:22:25 -04:00
Antoine Aflalo
ce365a6bdf ci: reduce size of page to pass tests
Fix failing test
2024-08-28 14:16:51 -04:00
renovate[bot]
9e61ff4634 chore(deps): update actions/upload-artifact action to v4 2024-08-28 17:56:03 +00:00
Antoine Aflalo
63a1b592c3 ci: add test result to pipeline 2024-08-28 13:55:33 -04:00
Antoine Aflalo
673484692b perf(webp): improve the error message for page too tall 2024-08-28 13:52:27 -04:00
Antoine Aflalo
ad35e2655f feat(webp): add partial success to conversion
So we only keep images that couldn't be optimized and return the chapter
2024-08-28 13:49:14 -04:00
Antoine Aflalo
d7f55fa886 fix(webp): improve error message in page not convertible 2024-08-28 12:09:40 -04:00
Antoine Aflalo
62638517e4 test: improve testing suite for expected failure 2024-08-28 12:03:33 -04:00
Antoine Aflalo
dbf7f6c262 fix(webp): be sure we split big page when requested 2024-08-28 11:55:53 -04:00
Antoine Aflalo
9ecd5ff3a5 fix(webp): fix the actual maximum limit 2024-08-28 11:53:26 -04:00
Antoine Aflalo
a63d2395f0 fix(webp): better handling of error for page too big for webp 2024-08-28 11:51:06 -04:00
Antoine Aflalo
839ad9ed9d fix(cbz): make pages be the first in the cbz by only be number 2024-08-28 09:16:19 -04:00
Antoine Aflalo
c8879349e1 feat(split): Make the split configurable for the watch command 2024-08-28 09:10:08 -04:00
Antoine Aflalo
5ac59a93c5 feat(split): Make the split configurable for the optimize command 2024-08-28 09:06:49 -04:00
Antoine Aflalo
72c6776793 fix: make the progress more readable 2024-08-27 20:42:26 -04:00
Antoine Aflalo
e0b6d7fcef ci: add bash completion to image 2024-08-27 20:29:38 -04:00
Antoine Aflalo
9305c8fa76 perf: add completion to bash docker image 2024-08-27 20:27:45 -04:00
Antoine Aflalo
9cc45e75cf fix(ci): fix built date 2024-08-27 20:26:29 -04:00
41 changed files with 1311 additions and 512 deletions

23
.github/workflows/qodana.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: Qodana
on:
workflow_dispatch:
pull_request:
push:
branches:
jobs:
qodana:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
checks: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }} # to check out the actual pull request commit, not the merge commit
fetch-depth: 0 # a full history is required for pull request analysis
- name: 'Qodana Scan'
uses: JetBrains/qodana-action@v2024.1
env:
QODANA_TOKEN: ${{ secrets.QODANA_TOKEN }}

View File

@@ -24,10 +24,12 @@ jobs:
fetch-depth: 0 # this is important, otherwise it won't checkout the full tree (i.e. no previous tags) fetch-depth: 0 # this is important, otherwise it won't checkout the full tree (i.e. no previous tags)
- uses: actions/setup-go@v5 - uses: actions/setup-go@v5
with: with:
go-version: 1.23 go-version: 1.24
cache: true cache: true
- uses: sigstore/cosign-installer@v3.6.0 # installs cosign - uses: sigstore/cosign-installer@v3.8.0 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.17.2 # installs syft - uses: anchore/sbom-action/download-syft@v0.18.0 # installs syft
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- uses: docker/login-action@v3 # login to ghcr - uses: docker/login-action@v3 # login to ghcr
with: with:
registry: ghcr.io registry: ghcr.io

View File

@@ -15,7 +15,7 @@ jobs:
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: '1.23' go-version: '1.24'
- name: Install dependencies - name: Install dependencies
run: go mod tidy run: go mod tidy
@@ -28,9 +28,23 @@ jobs:
mv go-junit-report /usr/local/bin/ mv go-junit-report /usr/local/bin/
- name: Run tests - name: Run tests
run: go test -v 2>&1 ./... -coverprofile=coverage.txt | go-junit-report -set-exit-code > junit.xml run: |
set -o pipefail
go test -v 2>&1 ./... -coverprofile=coverage.txt | tee test-results.txt
- name: Analyse test results
if: ${{ !cancelled() }}
run: go-junit-report < test-results.txt > junit.xml
- name: Upload test result artifact
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v4
with:
name: test-results
path: |
test-results.txt
junit.xml
retention-days: 7
- name: Upload results to Codecov - name: Upload results to Codecov
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v5
with: with:
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload test results to Codecov - name: Upload test results to Codecov

View File

@@ -24,19 +24,24 @@ changelog:
order: 2 order: 2
builds: builds:
- id: cbzoptimizer - id: cbzoptimizer
main: main.go main: cmd/cbzoptimizer/main.go
goos: goos:
- linux - linux
- darwin
- windows
goarch: goarch:
- amd64 - amd64
- arm64 - arm64
ignore:
- goos: windows
goarch: arm64
# ensures mod timestamp to be the commit timestamp # ensures mod timestamp to be the commit timestamp
mod_timestamp: "{{ .CommitTimestamp }}" mod_timestamp: "{{ .CommitTimestamp }}"
flags: flags:
# trims path # trims path
- -trimpath - -trimpath
ldflags: ldflags:
- -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X meta.date={{ .CommitDate }} - -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{ .CommitDate }}
env: env:
- CGO_ENABLED=0 - CGO_ENABLED=0
# config the checksum filename # config the checksum filename
@@ -61,11 +66,25 @@ sboms:
# https://goreleaser.com/customization/docker # https://goreleaser.com/customization/docker
dockers: dockers:
- image_templates: - image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest" - "ghcr.io/belphemur/cbzoptimizer:latest-amd64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}" - "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-amd64"
dockerfile: Dockerfile use: buildx
build_flag_templates: build_flag_templates:
- "--pull" - "--pull"
- "--platform=linux/amd64"
- "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}"
- "--label=org.opencontainers.image.version={{.Version}}"
- "--label=org.opencontainers.image.source={{.GitURL}}"
- image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-arm64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-arm64"
use: buildx
goarch: arm64
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
- "--label=org.opencontainers.image.created={{.Date}}" - "--label=org.opencontainers.image.created={{.Date}}"
- "--label=org.opencontainers.image.name={{.ProjectName}}" - "--label=org.opencontainers.image.name={{.ProjectName}}"
- "--label=org.opencontainers.image.revision={{.FullCommit}}" - "--label=org.opencontainers.image.revision={{.FullCommit}}"
@@ -98,4 +117,13 @@ docker_signs:
args: args:
- "sign" - "sign"
- "${artifact}" - "${artifact}"
- "--yes" # needed on cosign 2.0.0+ - "--yes" # needed on cosign 2.0.0+
docker_manifests:
- name_template: "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}"
image_templates:
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-amd64"
- "ghcr.io/belphemur/cbzoptimizer:{{ .Version }}-arm64"
- name_template: "ghcr.io/belphemur/cbzoptimizer:latest"
image_templates:
- "ghcr.io/belphemur/cbzoptimizer:latest-amd64"
- "ghcr.io/belphemur/cbzoptimizer:latest-arm64"

View File

@@ -0,0 +1,10 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="GoDfaErrorMayBeNotNil" enabled="true" level="WARNING" enabled_by_default="true">
<methods>
<method importPath="github.com/belphemur/CBZOptimizer/converter" receiver="Converter" name="ConvertChapter" />
</methods>
</inspection_tool>
</profile>
</component>

16
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Package",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${fileDirname}"
}
]
}

View File

@@ -1,21 +1,29 @@
FROM alpine:latest FROM alpine:latest
LABEL authors="Belphemur" LABEL authors="Belphemur"
ARG APP_PATH=/usr/local/bin/CBZOptimizer
ENV USER=abc ENV USER=abc
ENV CONFIG_FOLDER=/config ENV CONFIG_FOLDER=/config
ENV PUID=99 ENV PUID=99
RUN mkdir -p "${CONFIG_FOLDER}" && adduser \
--disabled-password \
--gecos "" \
--home "$(pwd)" \
--ingroup "users" \
--no-create-home \
--uid "${PUID}" \
"${USER}" && \
chown ${PUID}:${GUID} "${CONFIG_FOLDER}"
COPY CBZOptimizer /usr/local/bin/CBZOptimizer RUN mkdir -p "${CONFIG_FOLDER}" && \
adduser \
-S \
-H \
-h "${CONFIG_FOLDER}" \
-G "users" \
-u "${PUID}" \
"${USER}" && \
chown ${PUID}:users "${CONFIG_FOLDER}"
RUN apk add --no-cache inotify-tools && chmod +x /usr/local/bin/CBZOptimizer COPY CBZOptimizer ${APP_PATH}
RUN apk add --no-cache \
inotify-tools \
bash \
bash-completion && \
chmod +x ${APP_PATH} && \
${APP_PATH} completion bash > /etc/bash_completion.d/CBZOptimizer
VOLUME ${CONFIG_FOLDER}
USER ${USER} USER ${USER}
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"] ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]

View File

@@ -1,3 +1,4 @@
# CBZOptimizer # CBZOptimizer
CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality. CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files by converting images to a specified format and quality. This tool is useful for reducing the size of comic book archives while maintaining acceptable image quality.
@@ -8,14 +9,15 @@ CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files
- Adjust the quality of the converted images. - Adjust the quality of the converted images.
- Process multiple chapters in parallel. - Process multiple chapters in parallel.
- Option to override the original CBZ files. - Option to override the original CBZ files.
- Watch a folder for new CBZ files and optimize them automatically.
## Installation ## Installation
1. Clone the repository: 1. Clone the repository:
```sh ```sh
git clone https://github.com/belphemur/CBZOptimizer.git git clone https://github.com/belphemur/CBZOptimizer.git
cd CBZOptimizer cd CBZOptimizer
``` ```
2. Install dependencies: 2. Install dependencies:
```sh ```sh
@@ -26,10 +28,22 @@ CBZOptimizer is a Go-based tool designed to optimize CBZ (Comic Book Zip) files
### Command Line Interface ### Command Line Interface
The tool provides a CLI command to optimize CBZ files. Below is an example of how to use it: The tool provides CLI commands to optimize and watch CBZ files. Below are examples of how to use them:
#### Optimize Command
Optimize all CBZ files in a folder recursively:
```sh ```sh
go run main.go optimize --quality 85 --parallelism 2 --override /path/to/cbz/files go run main.go optimize [folder] --quality 85 --parallelism 2 --override --format webp --split
```
#### Watch Command
Watch a folder for new CBZ files and optimize them automatically:
```sh
go run main.go watch [folder] --quality 85 --override --format webp --split
``` ```
### Flags ### Flags
@@ -37,6 +51,8 @@ go run main.go optimize --quality 85 --parallelism 2 --override /path/to/cbz/fil
- `--quality`, `-q`: Quality for conversion (0-100). Default is 85. - `--quality`, `-q`: Quality for conversion (0-100). Default is 85.
- `--parallelism`, `-n`: Number of chapters to convert in parallel. Default is 2. - `--parallelism`, `-n`: Number of chapters to convert in parallel. Default is 2.
- `--override`, `-o`: Override the original CBZ files. Default is false. - `--override`, `-o`: Override the original CBZ files. Default is false.
- `--split`, `-s`: Split long pages into smaller chunks. Default is false.
- `--format`, `-f`: Format to convert the images to (e.g., webp). Default is webp.
## Testing ## Testing
@@ -46,6 +62,12 @@ To run the tests, use the following command:
go test ./... -v go test ./... -v
``` ```
## Requirement
Needs to have libwep installed on the machine if you're not using the docker image
## Docker
`ghcr.io/belphemur/cbzoptimizer:latest`
## GitHub Actions ## GitHub Actions
The project includes a GitHub Actions workflow to run tests on every push and pull request to the `main` branch. The workflow is defined in `.github/workflows/go.yml`. The project includes a GitHub Actions workflow to run tests on every push and pull request to the `main` branch. The workflow is defined in `.github/workflows/go.yml`.
@@ -60,4 +82,4 @@ The project includes a GitHub Actions workflow to run tests on every push and pu
## License ## License
This project is licensed under the MIT License. See the `LICENSE` file for details. This project is licensed under the MIT License. See the `LICENSE` file for details.

View File

@@ -1,90 +0,0 @@
package cbz
import (
"archive/zip"
"bufio"
"bytes"
"fmt"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/manga"
"io"
"path/filepath"
"strings"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
// Open the .cbz file
r, err := zip.OpenReader(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open .cbz file: %w", err)
}
defer r.Close()
chapter := &manga.Chapter{
FilePath: filePath,
}
for _, f := range r.File {
if f.FileInfo().IsDir() {
continue
}
// Open the file inside the zip
rc, err := f.Open()
if err != nil {
return nil, fmt.Errorf("failed to open file inside .cbz: %w", err)
}
// Determine the file extension
ext := strings.ToLower(filepath.Ext(f.Name))
if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
} else if ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" {
textContent, err := io.ReadAll(rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read Converted.xml content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
}
} else {
// Read the file contents for page
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object
page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext,
Size: uint64(buf.Len()),
Contents: buf,
IsSplitted: false,
}
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
}
rc.Close()
}
return chapter, nil
}

View File

@@ -1,10 +1,10 @@
package cmd package commands
import ( import (
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/converter" utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/converter/constant" "github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/utils" "github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/thediveo/enumflag/v2" "github.com/thediveo/enumflag/v2"
"os" "os"
@@ -29,6 +29,7 @@ func init() {
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)") command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel") command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
command.Flags().BoolP("override", "o", false, "Override the original CBZ files") command.Flags().BoolP("override", "o", false, "Override the original CBZ files")
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
command.PersistentFlags().VarP( command.PersistentFlags().VarP(
formatFlag, formatFlag,
"format", "f", "format", "f",
@@ -44,7 +45,7 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
return fmt.Errorf("path is required") return fmt.Errorf("path is required")
} }
if !utils.IsValidFolder(path) { if !utils2.IsValidFolder(path) {
return fmt.Errorf("the path needs to be a folder") return fmt.Errorf("the path needs to be a folder")
} }
@@ -58,6 +59,11 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
return fmt.Errorf("invalid quality value") return fmt.Errorf("invalid quality value")
} }
split, err := cmd.Flags().GetBool("split")
if err != nil {
return fmt.Errorf("invalid split value")
}
parallelism, err := cmd.Flags().GetInt("parallelism") parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 { if err != nil || parallelism < 1 {
return fmt.Errorf("invalid parallelism value") return fmt.Errorf("invalid parallelism value")
@@ -86,7 +92,13 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
go func() { go func() {
defer wg.Done() defer wg.Done()
for path := range fileChan { for path := range fileChan {
err := utils.Optimize(chapterConverter, path, quality, override) err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: path,
Quality: quality,
Override: override,
Split: split,
})
if err != nil { if err != nil {
errorChan <- fmt.Errorf("error processing file %s: %w", path, err) errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
} }

View File

@@ -1,10 +1,11 @@
package cmd package commands
import ( import (
"github.com/belphemur/CBZOptimizer/cbz" "github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/converter" "github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/converter/constant" "github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"log" "log"
"os" "os"
@@ -17,17 +18,16 @@ import (
// MockConverter is a mock implementation of the Converter interface // MockConverter is a mock implementation of the Converter interface
type MockConverter struct{} type MockConverter struct{}
func (m *MockConverter) Format() constant.ConversionFormat { func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
return constant.WebP
}
func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(string)) (*manga.Chapter, error) {
// Simulate conversion by setting the IsConverted flag
chapter.IsConverted = true chapter.IsConverted = true
chapter.ConvertedTime = time.Now() chapter.ConvertedTime = time.Now()
return chapter, nil return chapter, nil
} }
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error { func (m *MockConverter) PrepareConverter() error {
return nil return nil
} }
@@ -38,10 +38,10 @@ func TestConvertCbzCommand(t *testing.T) {
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
defer os.RemoveAll(tempDir) // Clean up the temp directory when done defer errs.CaptureGeneric(&err, os.RemoveAll, tempDir, "failed to remove temporary directory")
// Locate the testdata directory // Locate the testdata directory
testdataDir := filepath.Join("../testdata") testdataDir := filepath.Join("../../../testdata")
if _, err := os.Stat(testdataDir); os.IsNotExist(err) { if _, err := os.Stat(testdataDir); os.IsNotExist(err) {
t.Fatalf("testdata directory not found") t.Fatalf("testdata directory not found")
} }
@@ -79,6 +79,7 @@ func TestConvertCbzCommand(t *testing.T) {
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)") cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel") cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files") cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
// Execute the command // Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir}) err = ConvertCbzCommand(cmd, []string{tempDir})

View File

@@ -1,4 +1,4 @@
package cmd package commands
import ( import (
"fmt" "fmt"

View File

@@ -1,10 +1,10 @@
package cmd package commands
import ( import (
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/converter" utils2 "github.com/belphemur/CBZOptimizer/v2/internal/utils"
"github.com/belphemur/CBZOptimizer/converter/constant" "github.com/belphemur/CBZOptimizer/v2/pkg/converter"
"github.com/belphemur/CBZOptimizer/utils" "github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/pablodz/inotifywaitgo/inotifywaitgo" "github.com/pablodz/inotifywaitgo/inotifywaitgo"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
@@ -35,6 +35,9 @@ func init() {
command.Flags().BoolP("override", "o", true, "Override the original CBZ files") command.Flags().BoolP("override", "o", true, "Override the original CBZ files")
_ = viper.BindPFlag("override", command.Flags().Lookup("override")) _ = viper.BindPFlag("override", command.Flags().Lookup("override"))
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
_ = viper.BindPFlag("split", command.Flags().Lookup("split"))
command.PersistentFlags().VarP( command.PersistentFlags().VarP(
formatFlag, formatFlag,
"format", "f", "format", "f",
@@ -50,7 +53,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
return fmt.Errorf("path is required") return fmt.Errorf("path is required")
} }
if !utils.IsValidFolder(path) { if !utils2.IsValidFolder(path) {
return fmt.Errorf("the path needs to be a folder") return fmt.Errorf("the path needs to be a folder")
} }
@@ -61,6 +64,8 @@ func WatchCommand(_ *cobra.Command, args []string) error {
override := viper.GetBool("override") override := viper.GetBool("override")
split := viper.GetBool("split")
converterType := constant.FindConversionFormat(viper.GetString("format")) converterType := constant.FindConversionFormat(viper.GetString("format"))
chapterConverter, err := converter.Get(converterType) chapterConverter, err := converter.Get(converterType)
if err != nil { if err != nil {
@@ -71,7 +76,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err) return fmt.Errorf("failed to prepare converter: %v", err)
} }
log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s]", path, override, quality, converterType.String()) log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s, split: %t]", path, override, quality, converterType.String(), split)
events := make(chan inotifywaitgo.FileEvent) events := make(chan inotifywaitgo.FileEvent)
errors := make(chan error) errors := make(chan error)
@@ -109,7 +114,13 @@ func WatchCommand(_ *cobra.Command, args []string) error {
for _, e := range event.Events { for _, e := range event.Events {
switch e { switch e {
case inotifywaitgo.CLOSE_WRITE, inotifywaitgo.MOVE: case inotifywaitgo.CLOSE_WRITE, inotifywaitgo.MOVE:
err := utils.Optimize(chapterConverter, event.Filename, quality, override) err := utils2.Optimize(&utils2.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: event.Filename,
Quality: quality,
Override: override,
Split: split,
})
if err != nil { if err != nil {
errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err) errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err)
} }

16
cmd/cbzoptimizer/main.go Normal file
View File

@@ -0,0 +1,16 @@
package main
import (
"github.com/belphemur/CBZOptimizer/v2/cmd/cbzoptimizer/commands"
)
var (
version = "dev"
commit = "none"
date = "unknown"
)
func main() {
commands.SetVersionInfo(version, commit, date)
commands.Execute()
}

View File

@@ -1,162 +0,0 @@
package converter
import (
"bytes"
"github.com/belphemur/CBZOptimizer/manga"
"image"
"image/jpeg"
"os"
"testing"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
}{
{
name: "All split pages",
genTestChapter: genBigPages,
},
{
name: "No split pages",
genTestChapter: genSmallPages,
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
},
}
// Load test genTestChapter from testdata
temp, err := os.CreateTemp("", "test_chapter_*.cbz")
if err != nil {
t.Fatalf("failed to create temporary file: %v", err)
}
defer os.Remove(temp.Name())
for _, converter := range Available() {
converter, err := Get(converter)
if err != nil {
t.Fatalf("failed to get converter: %v", err)
}
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
quality := uint8(80)
progress := func(msg string) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(chapter, quality, progress)
if err != nil {
t.Fatalf("failed to convert genTestChapter: %v", err)
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
}
}
})
}
})
}
}
func genBigPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 10000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}

25
go.mod
View File

@@ -1,24 +1,28 @@
module github.com/belphemur/CBZOptimizer module github.com/belphemur/CBZOptimizer/v2
go 1.23.0 go 1.24
toolchain go1.24.0
require ( require (
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de
github.com/belphemur/go-webpbin/v2 v2.0.0 github.com/belphemur/go-webpbin/v2 v2.0.0
github.com/oliamb/cutter v0.2.2 github.com/oliamb/cutter v0.2.2
github.com/pablodz/inotifywaitgo v0.0.7 github.com/pablodz/inotifywaitgo v0.0.9
github.com/samber/lo v1.47.0 github.com/samber/lo v1.49.1
github.com/spf13/cobra v1.8.1 github.com/spf13/cobra v1.8.1
github.com/spf13/viper v1.19.0 github.com/spf13/viper v1.19.0
github.com/thediveo/enumflag/v2 v2.0.5 github.com/stretchr/testify v1.10.0
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 github.com/thediveo/enumflag/v2 v2.0.7
golang.org/x/image v0.19.0 golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
golang.org/x/image v0.24.0
) )
require ( require (
github.com/andybalholm/brotli v1.1.0 // indirect github.com/andybalholm/brotli v1.1.0 // indirect
github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 // indirect github.com/belphemur/go-binwrapper v0.0.0-20240827152605-33977349b1f0 // indirect
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dsnet/compress v0.0.1 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/golang/snappy v0.0.4 // indirect github.com/golang/snappy v0.0.4 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect
@@ -31,6 +35,7 @@ require (
github.com/nwaples/rardecode v1.1.3 // indirect github.com/nwaples/rardecode v1.1.3 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect
@@ -42,8 +47,8 @@ require (
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
go.uber.org/atomic v1.9.0 // indirect go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect go.uber.org/multierr v1.9.0 // indirect
golang.org/x/sys v0.18.0 // indirect golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.17.0 // indirect golang.org/x/text v0.22.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

76
go.sum
View File

@@ -11,24 +11,24 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY= github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE= github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@@ -50,16 +50,18 @@ github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3v
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k= github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU= github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4= github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o= github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
github.com/onsi/gomega v1.28.1 h1:MijcGUbfYuznzK/5R4CPNoUP/9Xvuo20sXfEm6XxoTA= github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.28.1/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/pablodz/inotifywaitgo v0.0.7 h1:1ii49dGBnRn0t1Sz7RGZS6/NberPEDQprwKHN49Bv6U= github.com/pablodz/inotifywaitgo v0.0.9 h1:njquRbBU7fuwIe5rEvtaniVBjwWzcpdUVptSgzFqZsw=
github.com/pablodz/inotifywaitgo v0.0.7/go.mod h1:OtzRCsYTJlIr+vAzlOtauTkfQ1c25ebFuXq8tbbf8cw= github.com/pablodz/inotifywaitgo v0.0.9/go.mod h1:hAfx2oN+WKg8miwUKPs52trySpPignlRBRxWcXVHku0=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
@@ -75,8 +77,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc= github.com/samber/lo v1.49.1 h1:4BIFyVfuQSEpluc7Fua+j1NolZHiEHEpaSEKdsH0tew=
github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU= github.com/samber/lo v1.49.1/go.mod h1:dO6KHFzUKXgP8LDhU0oI8d2hekjXnGOu0DB8Jecxd6o=
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg= github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
@@ -101,15 +103,16 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/thediveo/enumflag/v2 v2.0.5 h1:VJjvlAqUb6m6mxOrB/0tfBJI0Kvi9wJ8ulh38xK87i8= github.com/thediveo/enumflag/v2 v2.0.7 h1:uxXDU+rTel7Hg4X0xdqICpG9rzuI/mzLAEYXWLflOfs=
github.com/thediveo/enumflag/v2 v2.0.5/go.mod h1:0NcG67nYgwwFsAvoQCmezG0J0KaIxZ0f7skg9eLq1DA= github.com/thediveo/enumflag/v2 v2.0.7/go.mod h1:bWlnNvTJuUK+huyzf3WECFLy557Ttlc+yk3o+BPs0EA=
github.com/thediveo/success v1.0.1 h1:NVwUOwKUwaN8szjkJ+vsiM2L3sNBFscldoDJ2g2tAPg= github.com/thediveo/success v1.0.2 h1:w+r3RbSjLmd7oiNnlCblfGqItcsaShcuAorRVh/+0xk=
github.com/thediveo/success v1.0.1/go.mod h1:AZ8oUArgbIsCuDEWrzWNQHdKnPbDOLQsWOFj9ynwLt0= github.com/thediveo/success v1.0.2/go.mod h1:hdPJB77k70w764lh8uLUZgNhgeTl3DYeZ4d4bwMO2CU=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
@@ -118,22 +121,21 @@ go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA= golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac h1:l5+whBCLH3iH2ZNHYLbAe58bo7yrN4mVcnkHDYz5vvs=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac/go.mod h1:hH+7mtFmImwwcMvScyxUhjuVHR3HGaDPMn9rMSUUbxo=
golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ= golang.org/x/image v0.24.0 h1:AN7zRgVsbvmTfNyqIbbOraYL8mSwcKncEj8ofjgzcMQ=
golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys= golang.org/x/image v0.24.0/go.mod h1:4b/ITuLfqYq1hqZcjofwctIhi7sZh2WaCjvsBNjjya8=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24= golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ= golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -3,7 +3,8 @@ package cbz
import ( import (
"archive/zip" "archive/zip"
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"os" "os"
"time" "time"
) )
@@ -14,15 +15,14 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to create .cbz file: %w", err) return fmt.Errorf("failed to create .cbz file: %w", err)
} }
defer zipFile.Close() defer errs.Capture(&err, zipFile.Close, "failed to close .cbz file")
// Create a new ZIP writer // Create a new ZIP writer
zipWriter := zip.NewWriter(zipFile) zipWriter := zip.NewWriter(zipFile)
err = zipWriter.SetComment("Created by CBZOptimizer")
if err != nil { if err != nil {
return err return err
} }
defer zipWriter.Close() defer errs.Capture(&err, zipWriter.Close, "failed to close .cbz writer")
// Write each page to the ZIP archive // Write each page to the ZIP archive
for _, page := range chapter.Pages { for _, page := range chapter.Pages {
@@ -30,10 +30,10 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
var fileName string var fileName string
if page.IsSplitted { if page.IsSplitted {
// Use the format page%03d-%02d for split pages // Use the format page%03d-%02d for split pages
fileName = fmt.Sprintf("page_%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension) fileName = fmt.Sprintf("%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
} else { } else {
// Use the format page%03d for non-split pages // Use the format page%03d for non-split pages
fileName = fmt.Sprintf("page_%04d%s", page.Index, page.Extension) fileName = fmt.Sprintf("%04d%s", page.Index, page.Extension)
} }
// Create a new file in the ZIP archive // Create a new file in the ZIP archive
@@ -71,18 +71,11 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
} }
if chapter.IsConverted { if chapter.IsConverted {
convertedWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "Converted.txt",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create Converted.txt in .cbz: %w", err)
}
_, err = convertedWriter.Write([]byte(fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime))) convertedString := fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)
err = zipWriter.SetComment(convertedString)
if err != nil { if err != nil {
return fmt.Errorf("failed to write Converted.txt contents: %w", err) return fmt.Errorf("failed to write comment: %w", err)
} }
} }

View File

@@ -3,18 +3,23 @@ package cbz
import ( import (
"archive/zip" "archive/zip"
"bytes" "bytes"
"github.com/belphemur/CBZOptimizer/manga" "fmt"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"os" "os"
"testing" "testing"
"time" "time"
) )
func TestWriteChapterToCBZ(t *testing.T) { func TestWriteChapterToCBZ(t *testing.T) {
currentTime := time.Now()
// Define test cases // Define test cases
testCases := []struct { testCases := []struct {
name string name string
chapter *manga.Chapter chapter *manga.Chapter
expectedFiles []string expectedFiles []string
expectedComment string
}{ }{
//test case where there is only one page and ComicInfo and the chapter is converted //test case where there is only one page and ComicInfo and the chapter is converted
{ {
@@ -29,9 +34,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
ComicInfoXml: "<Series>Boundless Necromancer</Series>", ComicInfoXml: "<Series>Boundless Necromancer</Series>",
IsConverted: true, IsConverted: true,
ConvertedTime: time.Now(), ConvertedTime: currentTime,
}, },
expectedFiles: []string{"page_0000.jpg", "ComicInfo.xml", "Converted.txt"}, expectedFiles: []string{"0000.jpg", "ComicInfo.xml"},
expectedComment: fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", currentTime),
}, },
//test case where there is only one page and no //test case where there is only one page and no
{ {
@@ -45,7 +51,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
}, },
}, },
expectedFiles: []string{"page_0000.jpg"}, expectedFiles: []string{"0000.jpg"},
}, },
{ {
name: "Multiple pages with ComicInfo", name: "Multiple pages with ComicInfo",
@@ -64,7 +70,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
ComicInfoXml: "<Series>Boundless Necromancer</Series>", ComicInfoXml: "<Series>Boundless Necromancer</Series>",
}, },
expectedFiles: []string{"page_0000.jpg", "page_0001.jpg", "ComicInfo.xml"}, expectedFiles: []string{"0000.jpg", "0001.jpg", "ComicInfo.xml"},
}, },
{ {
name: "Split page", name: "Split page",
@@ -79,7 +85,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
}, },
}, },
expectedFiles: []string{"page_0000-01.jpg"}, expectedFiles: []string{"0000-01.jpg"},
}, },
} }
@@ -90,7 +96,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("Failed to create temporary file: %v", err) t.Fatalf("Failed to create temporary file: %v", err)
} }
defer os.Remove(tempFile.Name()) defer errs.CaptureGeneric(&err, os.Remove, tempFile.Name(), "failed to remove temporary file")
// Write the chapter to the .cbz file // Write the chapter to the .cbz file
err = WriteChapterToCBZ(tc.chapter, tempFile.Name()) err = WriteChapterToCBZ(tc.chapter, tempFile.Name())
@@ -103,7 +109,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("Failed to open CBZ file: %v", err) t.Fatalf("Failed to open CBZ file: %v", err)
} }
defer r.Close() defer errs.Capture(&err, r.Close, "failed to close CBZ file")
// Collect the names of the files in the archive // Collect the names of the files in the archive
var filesInArchive []string var filesInArchive []string
@@ -125,6 +131,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
} }
} }
if tc.expectedComment != "" && r.Comment != tc.expectedComment {
t.Errorf("Expected comment %s, but found %s", tc.expectedComment, r.Comment)
}
// Check if there are no unexpected files // Check if there are no unexpected files
if len(filesInArchive) != len(tc.expectedFiles) { if len(filesInArchive) != len(tc.expectedFiles) {
t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive)) t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive))

104
internal/cbz/cbz_loader.go Normal file
View File

@@ -0,0 +1,104 @@
package cbz
import (
"archive/zip"
"bufio"
"bytes"
"fmt"
"github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"io"
"path/filepath"
"strings"
)
func LoadChapter(filePath string) (*manga.Chapter, error) {
// Open the .cbz file
r, err := zip.OpenReader(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open .cbz file: %w", err)
}
defer errs.Capture(&err, r.Close, "failed to close opened .cbz file")
chapter := &manga.Chapter{
FilePath: filePath,
}
// Check for comment
if r.Comment != "" {
scanner := bufio.NewScanner(strings.NewReader(r.Comment))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err == nil {
chapter.IsConverted = true
}
}
}
for _, f := range r.File {
if f.FileInfo().IsDir() {
continue
}
err := func() error {
// Open the file inside the zip
rc, err := f.Open()
if err != nil {
return fmt.Errorf("failed to open file inside .cbz: %w", err)
}
defer errs.Capture(&err, rc.Close, "failed to close file inside .cbz")
// Determine the file extension
ext := strings.ToLower(filepath.Ext(f.Name))
if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(rc)
if err != nil {
return fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
}
chapter.ComicInfoXml = string(xmlContent)
} else if !chapter.IsConverted && ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" {
textContent, err := io.ReadAll(rc)
if err != nil {
return fmt.Errorf("failed to read Converted.xml content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
return fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
}
} else {
// Read the file contents for page
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
if err != nil {
return fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object
page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext,
Size: uint64(buf.Len()),
Contents: buf,
IsSplitted: false,
}
// Add the page to the chapter
chapter.Pages = append(chapter.Pages, page)
}
return nil
}()
if err != nil {
return nil, err
}
}
return chapter, nil
}

View File

@@ -17,14 +17,14 @@ func TestLoadChapter(t *testing.T) {
testCases := []testCase{ testCases := []testCase{
{ {
name: "Original Chapter", name: "Original Chapter",
filePath: "../testdata/Chapter 1.cbz", filePath: "../../testdata/Chapter 1.cbz",
expectedPages: 16, expectedPages: 16,
expectedSeries: "<Series>Boundless Necromancer</Series>", expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: false, expectedConversion: false,
}, },
{ {
name: "Converted Chapter", name: "Converted Chapter",
filePath: "../testdata/Chapter 1_converted.cbz", filePath: "../../testdata/Chapter 10_converted.cbz",
expectedPages: 107, expectedPages: 107,
expectedSeries: "<Series>Boundless Necromancer</Series>", expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: true, expectedConversion: true,

View File

@@ -0,0 +1,32 @@
package manga
import (
"bytes"
"image"
)
// PageContainer is a struct that holds a manga page, its image, and the image format.
type PageContainer struct {
// Page is a pointer to a manga page object.
Page *Page
// Image is the decoded image of the manga page.
Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string
// IsToBeConverted is a boolean flag indicating whether the image needs to be converted to another format.
IsToBeConverted bool
// HasBeenConverted is a boolean flag indicating whether the image has been converted to another format.
HasBeenConverted bool
}
func NewContainer(Page *Page, img image.Image, format string, isToBeConverted bool) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format, IsToBeConverted: isToBeConverted, HasBeenConverted: false}
}
// SetConverted sets the converted image, its extension, and its size in the PageContainer.
func (pc *PageContainer) SetConverted(converted *bytes.Buffer, extension string) {
pc.Page.Contents = converted
pc.Page.Extension = extension
pc.Page.Size = uint64(converted.Len())
pc.HasBeenConverted = true
}

View File

@@ -0,0 +1,25 @@
package errs
import (
"errors"
"fmt"
)
// Capture runs errFunc and assigns the error, if any, to *errPtr. Preserves the
// original error by wrapping with errors.Join if the errFunc err is non-nil.
func Capture(errPtr *error, errFunc func() error, msg string) {
err := errFunc()
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}
// CaptureGeneric runs errFunc with a generic type K and assigns the error, if any, to *errPtr.
func CaptureGeneric[K any](errPtr *error, errFunc func(value K) error, value K, msg string) {
err := errFunc(value)
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}

View File

@@ -0,0 +1,122 @@
package errs
import (
"errors"
"fmt"
"testing"
)
func TestCapture(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func() error
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func() error { return nil },
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
Capture(&err, tt.errFunc, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}
func TestCaptureGeneric(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func(int) error
value int
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func(value int) error { return nil },
value: 0,
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func(value int) error { return errors.New("error from func") },
value: 0,
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error and value",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func(value int) error { return fmt.Errorf("hello error:%d", value) },
value: 1,
msg: "test message",
expected: "wrapped error: initial error\ntest message: hello error:1",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
CaptureGeneric(&err, tt.errFunc, tt.value, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}

View File

@@ -0,0 +1,67 @@
package utils
import (
"errors"
"fmt"
"github.com/belphemur/CBZOptimizer/v2/internal/cbz"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter"
errors2 "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"log"
"strings"
)
type OptimizeOptions struct {
ChapterConverter converter.Converter
Path string
Quality uint8
Override bool
Split bool
}
// Optimize optimizes a CBZ file using the specified converter.
func Optimize(options *OptimizeOptions) error {
log.Printf("Processing file: %s\n", options.Path)
// Load the chapter
chapter, err := cbz.LoadChapter(options.Path)
if err != nil {
return fmt.Errorf("failed to load chapter: %v", err)
}
if chapter.IsConverted {
log.Printf("Chapter already converted: %s", options.Path)
return nil
}
// Convert the chapter
convertedChapter, err := options.ChapterConverter.ConvertChapter(chapter, options.Quality, options.Split, func(msg string, current uint32, total uint32) {
if current%10 == 0 || current == total {
log.Printf("[%s] Converting: %d/%d", chapter.FilePath, current, total)
}
})
if err != nil {
var pageIgnoredError *errors2.PageIgnoredError
if !errors.As(err, &pageIgnoredError) {
return fmt.Errorf("failed to convert chapter: %v", err)
}
}
if convertedChapter == nil {
return fmt.Errorf("failed to convert chapter")
}
convertedChapter.SetConverted()
// Write the converted chapter back to a CBZ file
outputPath := options.Path
if !options.Override {
outputPath = strings.TrimSuffix(options.Path, ".cbz") + "_converted.cbz"
}
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Printf("Converted file written to: %s\n", outputPath)
return nil
}

16
main.go
View File

@@ -1,16 +0,0 @@
package main
import (
"github.com/belphemur/CBZOptimizer/cmd"
)
var (
version = "dev"
commit = "none"
date = "unknown"
)
func main() {
cmd.SetVersionInfo(version, commit, date)
cmd.Execute()
}

View File

@@ -1,17 +0,0 @@
package manga
import "image"
// PageContainer is a struct that holds a manga page, its image, and the image format.
type PageContainer struct {
// Page is a pointer to a manga page object.
Page *Page
// Image is the decoded image of the manga page.
Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string
}
func NewContainer(Page *Page, img image.Image, format string) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format}
}

View File

@@ -1,5 +0,0 @@
package meta
var Version = "v0.0.0"
var Commit = ""
var Date = ""

View File

@@ -2,9 +2,9 @@ package converter
import ( import (
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/converter/constant" "github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/converter/webp" "github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/v2/pkg/converter/webp"
"github.com/samber/lo" "github.com/samber/lo"
"strings" "strings"
) )
@@ -12,7 +12,10 @@ import (
type Converter interface { type Converter interface {
// Format of the converter // Format of the converter
Format() (format constant.ConversionFormat) Format() (format constant.ConversionFormat)
ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(string)) (*manga.Chapter, error) // ConvertChapter converts a manga chapter to the specified format.
//
// Returns partial success where some pages are converted and some are not.
ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
PrepareConverter() error PrepareConverter() error
} }

View File

@@ -0,0 +1,234 @@
package converter
import (
"bytes"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"golang.org/x/exp/slices"
"image"
"image/jpeg"
"os"
"testing"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
split bool
expectFailure []constant.ConversionFormat
expectPartialSuccess []constant.ConversionFormat
}{
{
name: "All split pages",
genTestChapter: genHugePage,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectFailure: []constant.ConversionFormat{constant.WebP},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{constant.WebP},
},
}
// Load test genTestChapter from testdata
temp, err := os.CreateTemp("", "test_chapter_*.cbz")
if err != nil {
t.Fatalf("failed to create temporary file: %v", err)
}
defer errs.CaptureGeneric(&err, os.Remove, temp.Name(), "failed to remove temporary file")
for _, converter := range Available() {
converter, err := Get(converter)
if err != nil {
t.Fatalf("failed to get converter: %v", err)
}
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
quality := uint8(80)
progress := func(msg string, current uint32, total uint32) {
t.Log(msg)
}
convertedChapter, err := converter.ConvertChapter(chapter, quality, tc.split, progress)
if err != nil {
if convertedChapter != nil && slices.Contains(tc.expectPartialSuccess, converter.Format()) {
t.Logf("Partial success to convert genTestChapter: %v", err)
return
}
if slices.Contains(tc.expectFailure, converter.Format()) {
t.Logf("Expected failure to convert genTestChapter: %v", err)
return
}
t.Fatalf("failed to convert genTestChapter: %v", err)
} else if slices.Contains(tc.expectFailure, converter.Format()) {
t.Fatalf("expected failure to convert genTestChapter didn't happen")
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
if len(convertedChapter.Pages) != len(chapter.Pages) {
t.Fatalf("converted chapter has different number of pages")
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
}
}
})
}
})
}
}
func genHugePage(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 1; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}
func genMixSmallHuge(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 10; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 2000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}

View File

@@ -0,0 +1,13 @@
package errors
type PageIgnoredError struct {
s string
}
func (e *PageIgnoredError) Error() string {
return e.s
}
func NewPageIgnored(text string) error {
return &PageIgnoredError{text}
}

View File

@@ -2,21 +2,25 @@ package webp
import ( import (
"bytes" "bytes"
"errors"
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/converter/constant"
packer2 "github.com/belphemur/CBZOptimizer/manga"
"github.com/oliamb/cutter"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
"image" "image"
_ "image/jpeg" _ "image/jpeg"
"image/png" "image/png"
"io"
"runtime" "runtime"
"sync" "sync"
"sync/atomic" "sync/atomic"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
converterrors "github.com/belphemur/CBZOptimizer/v2/pkg/converter/errors"
"github.com/oliamb/cutter"
"golang.org/x/exp/slices"
_ "golang.org/x/image/webp"
) )
const webpMaxHeight = 16383
type Converter struct { type Converter struct {
maxHeight int maxHeight int
cropHeight int cropHeight int
@@ -48,7 +52,7 @@ func (converter *Converter) PrepareConverter() error {
return nil return nil
} }
func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uint8, progress func(string)) (*packer2.Chapter, error) { func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
err := converter.PrepareConverter() err := converter.PrepareConverter()
if err != nil { if err != nil {
return nil, err return nil, err
@@ -57,34 +61,38 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
var wgConvertedPages sync.WaitGroup var wgConvertedPages sync.WaitGroup
maxGoroutines := runtime.NumCPU() maxGoroutines := runtime.NumCPU()
pagesChan := make(chan *packer2.PageContainer, maxGoroutines) pagesChan := make(chan *manga.PageContainer, maxGoroutines)
errChan := make(chan error, maxGoroutines) errChan := make(chan error, maxGoroutines)
doneChan := make(chan struct{})
var wgPages sync.WaitGroup var wgPages sync.WaitGroup
wgPages.Add(len(chapter.Pages)) wgPages.Add(len(chapter.Pages))
guard := make(chan struct{}, maxGoroutines) guard := make(chan struct{}, maxGoroutines)
pagesMutex := sync.Mutex{} pagesMutex := sync.Mutex{}
var pages []*packer2.Page var pages []*manga.Page
var totalPages = uint32(len(chapter.Pages)) var totalPages = uint32(len(chapter.Pages))
// Start the worker pool
go func() { go func() {
for page := range pagesChan { for page := range pagesChan {
guard <- struct{}{} // would block if guard channel is already filled guard <- struct{}{} // would block if guard channel is already filled
go func(pageToConvert *packer2.PageContainer) { go func(pageToConvert *manga.PageContainer) {
defer wgConvertedPages.Done() defer func() {
wgConvertedPages.Done()
<-guard
}()
convertedPage, err := converter.convertPage(pageToConvert, quality) convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil { if err != nil {
if convertedPage == nil { if convertedPage == nil {
errChan <- err errChan <- err
<-guard
return return
} }
buffer := new(bytes.Buffer) buffer := new(bytes.Buffer)
err := png.Encode(buffer, convertedPage.Image) err := png.Encode(buffer, convertedPage.Image)
if err != nil { if err != nil {
errChan <- err errChan <- err
<-guard
return return
} }
convertedPage.Page.Contents = buffer convertedPage.Page.Contents = buffer
@@ -93,68 +101,83 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
} }
pagesMutex.Lock() pagesMutex.Lock()
pages = append(pages, convertedPage.Page) pages = append(pages, convertedPage.Page)
progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format())) progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format()), uint32(len(pages)), totalPages)
pagesMutex.Unlock() pagesMutex.Unlock()
<-guard
}(page) }(page)
} }
close(doneChan)
}() }()
// Process pages
for _, page := range chapter.Pages { for _, page := range chapter.Pages {
go func(page *packer2.Page) { go func(page *manga.Page) {
defer wgPages.Done() defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page) splitNeeded, img, format, err := converter.checkPageNeedsSplit(page, split)
if err != nil { if err != nil {
errChan <- fmt.Errorf("error checking if page %d of genTestChapter %s needs split: %v", page.Index, chapter.FilePath, err) errChan <- err
if img != nil {
wgConvertedPages.Add(1)
pagesChan <- manga.NewContainer(page, img, format, false)
}
return return
} }
if !splitNeeded { if !splitNeeded {
wgConvertedPages.Add(1) wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, format) pagesChan <- manga.NewContainer(page, img, format, true)
return return
} }
images, err := converter.cropImage(img) images, err := converter.cropImage(img)
if err != nil { if err != nil {
errChan <- fmt.Errorf("error converting page %d of genTestChapter %s to webp: %v", page.Index, chapter.FilePath, err) errChan <- err
return return
} }
atomic.AddUint32(&totalPages, uint32(len(images)-1)) atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images { for i, img := range images {
page := &packer2.Page{Index: page.Index, IsSplitted: true, SplitPartIndex: uint16(i)} newPage := &manga.Page{
Index: page.Index,
IsSplitted: true,
SplitPartIndex: uint16(i),
}
wgConvertedPages.Add(1) wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, "N/A") pagesChan <- manga.NewContainer(newPage, img, "N/A", true)
} }
}(page) }(page)
} }
wgPages.Wait() wgPages.Wait()
wgConvertedPages.Wait()
close(pagesChan) close(pagesChan)
// Wait for all conversions to complete
<-doneChan
wgConvertedPages.Wait()
close(errChan) close(errChan)
close(guard)
var errList []error var errList []error
for err := range errChan { for err := range errChan {
errList = append(errList, err) errList = append(errList, err)
} }
var aggregatedError error = nil
if len(errList) > 0 { if len(errList) > 0 {
return nil, fmt.Errorf("encountered errors: %v", errList) aggregatedError = errors.Join(errList...)
} }
slices.SortFunc(pages, func(a, b *packer2.Page) int { slices.SortFunc(pages, func(a, b *manga.Page) int {
if a.Index == b.Index { if a.Index == b.Index {
return int(b.SplitPartIndex - a.SplitPartIndex) return int(a.SplitPartIndex) - int(b.SplitPartIndex)
} }
return int(b.Index - a.Index) return int(a.Index) - int(b.Index)
}) })
chapter.Pages = pages chapter.Pages = pages
runtime.GC() runtime.GC()
return chapter, nil return chapter, aggregatedError
} }
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) { func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
@@ -190,8 +213,8 @@ func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
return parts, nil return parts, nil
} }
func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image.Image, string, error) { func (converter *Converter) checkPageNeedsSplit(page *manga.Page, splitRequested bool) (bool, image.Image, string, error) {
reader := io.Reader(bytes.NewBuffer(page.Contents.Bytes())) reader := bytes.NewBuffer(page.Contents.Bytes())
img, format, err := image.Decode(reader) img, format, err := image.Decode(reader)
if err != nil { if err != nil {
return false, nil, format, err return false, nil, format, err
@@ -200,20 +223,26 @@ func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image
bounds := img.Bounds() bounds := img.Bounds()
height := bounds.Dy() height := bounds.Dy()
return height >= converter.maxHeight, img, format, nil if height >= webpMaxHeight && !splitRequested {
return false, img, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d is too tall [max: %dpx] to be converted to webp format", page.Index, webpMaxHeight))
}
return height >= converter.maxHeight && splitRequested, img, format, nil
} }
func (converter *Converter) convertPage(container *packer2.PageContainer, quality uint8) (*packer2.PageContainer, error) { func (converter *Converter) convertPage(container *manga.PageContainer, quality uint8) (*manga.PageContainer, error) {
if container.Format == "webp" { // Fix WebP format detection (case insensitive)
if container.Format == "webp" || container.Format == "WEBP" {
container.Page.Extension = ".webp"
return container, nil
}
if !container.IsToBeConverted {
return container, nil return container, nil
} }
converted, err := converter.convert(container.Image, uint(quality)) converted, err := converter.convert(container.Image, uint(quality))
if err != nil { if err != nil {
return nil, err return nil, err
} }
container.Page.Contents = converted container.SetConverted(converted, ".webp");
container.Page.Extension = ".webp"
container.Page.Size = uint64(converted.Len())
return container, nil return container, nil
} }

View File

@@ -0,0 +1,324 @@
package webp
import (
"bytes"
"image"
"image/color"
"image/png"
"sync"
"testing"
"image/jpeg"
_ "golang.org/x/image/webp"
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func createTestImage(width, height int, format string) (image.Image, error) {
img := image.NewRGBA(image.Rect(0, 0, width, height))
// Create a gradient pattern to ensure we have actual image data
for y := 0; y < height; y++ {
for x := 0; x < width; x++ {
img.Set(x, y, color.RGBA{
R: uint8((x * 255) / width),
G: uint8((y * 255) / height),
B: 100,
A: 255,
})
}
}
return img, nil
}
func encodeImage(img image.Image, format string) (*bytes.Buffer, string, error) {
buf := new(bytes.Buffer)
switch format {
case "jpeg", "jpg":
if err := jpeg.Encode(buf, img, &jpeg.Options{Quality: 85}); err != nil {
return nil, "", err
}
return buf, ".jpg", nil
case "webp":
PrepareEncoder()
if err := Encode(buf, img, 80); err != nil {
return nil, "", err
}
return buf, ".webp", nil
case "png":
fallthrough
default:
if err := png.Encode(buf, img); err != nil {
return nil, "", err
}
return buf, ".png", nil
}
}
func createTestPage(t *testing.T, index int, width, height int, format string) *manga.Page {
img, err := createTestImage(width, height, format)
require.NoError(t, err)
buf, ext, err := encodeImage(img, format)
require.NoError(t, err)
return &manga.Page{
Index: uint16(index),
Contents: buf,
Extension: ext,
Size: uint64(buf.Len()),
}
}
func validateConvertedImage(t *testing.T, page *manga.Page) {
require.NotNil(t, page.Contents)
require.Greater(t, page.Contents.Len(), 0)
// Try to decode the image
img, format, err := image.Decode(bytes.NewReader(page.Contents.Bytes()))
require.NoError(t, err, "Failed to decode converted image")
if page.Extension == ".webp" {
assert.Equal(t, "webp", format, "Expected WebP format")
}
require.NotNil(t, img)
bounds := img.Bounds()
assert.Greater(t, bounds.Dx(), 0, "Image width should be positive")
assert.Greater(t, bounds.Dy(), 0, "Image height should be positive")
}
// TestConverter_ConvertChapter tests the ConvertChapter method of the WebP converter.
// It verifies various scenarios including:
// - Converting single normal images
// - Converting multiple normal images
// - Converting tall images with split enabled
// - Handling tall images that exceed maximum height
//
// For each test case it validates:
// - Proper error handling
// - Expected number of output pages
// - Correct page ordering
// - Split page handling and indexing
// - Progress callback behavior
//
// The test uses different image dimensions and split settings to ensure
// the converter handles all cases correctly while maintaining proper
// progress reporting and page ordering.
func TestConverter_ConvertChapter(t *testing.T) {
tests := []struct {
name string
pages []*manga.Page
split bool
expectSplit bool
expectError bool
numExpected int
}{
{
name: "Single normal image",
pages: []*manga.Page{createTestPage(t, 1, 800, 1200, "jpeg")},
split: false,
expectSplit: false,
numExpected: 1,
},
{
name: "Multiple normal images",
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
},
split: false,
expectSplit: false,
numExpected: 2,
},
{
name: "Tall image with split enabled",
pages: []*manga.Page{createTestPage(t, 1, 800, 5000, "jpeg")},
split: true,
expectSplit: true,
numExpected: 3, // Based on cropHeight of 2000
},
{
name: "Tall image without split",
pages: []*manga.Page{createTestPage(t, 1, 800, webpMaxHeight+100, "png")},
split: false,
expectError: true,
numExpected: 1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
chapter := &manga.Chapter{
Pages: tt.pages,
}
var progressMutex sync.Mutex
var lastProgress uint32
progress := func(message string, current uint32, total uint32) {
progressMutex.Lock()
defer progressMutex.Unlock()
assert.GreaterOrEqual(t, current, lastProgress, "Progress should never decrease")
lastProgress = current
assert.LessOrEqual(t, current, total, "Current progress should not exceed total")
}
convertedChapter, err := converter.ConvertChapter(chapter, 80, tt.split, progress)
if tt.expectError {
assert.Error(t, err)
if convertedChapter != nil {
assert.LessOrEqual(t, len(convertedChapter.Pages), tt.numExpected)
}
return
}
require.NoError(t, err)
require.NotNil(t, convertedChapter)
assert.Len(t, convertedChapter.Pages, tt.numExpected)
// Validate all converted images
for _, page := range convertedChapter.Pages {
validateConvertedImage(t, page)
}
// Verify page order
for i := 1; i < len(convertedChapter.Pages); i++ {
prevPage := convertedChapter.Pages[i-1]
currPage := convertedChapter.Pages[i]
if prevPage.Index == currPage.Index {
assert.Less(t, prevPage.SplitPartIndex, currPage.SplitPartIndex,
"Split parts should be in ascending order for page %d", prevPage.Index)
} else {
assert.Less(t, prevPage.Index, currPage.Index,
"Pages should be in ascending order")
}
}
if tt.expectSplit {
splitFound := false
for _, page := range convertedChapter.Pages {
if page.IsSplitted {
splitFound = true
break
}
}
assert.True(t, splitFound, "Expected to find at least one split page")
}
})
}
}
func TestConverter_convertPage(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
tests := []struct {
name string
format string
isToBeConverted bool
expectWebP bool
}{
{
name: "Convert PNG to WebP",
format: "png",
isToBeConverted: true,
expectWebP: true,
},
{
name: "Already WebP",
format: "webp",
isToBeConverted: true,
expectWebP: true,
},
{
name: "Skip conversion",
format: "png",
isToBeConverted: false,
expectWebP: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
page := createTestPage(t, 1, 100, 100, tt.format)
img, err := createTestImage(100, 100, tt.format)
require.NoError(t, err)
container := manga.NewContainer(page, img, tt.format, tt.isToBeConverted)
converted, err := converter.convertPage(container, 80)
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
}
})
}
}
func TestConverter_checkPageNeedsSplit(t *testing.T) {
converter := New()
tests := []struct {
name string
imageHeight int
split bool
expectSplit bool
expectError bool
}{
{
name: "Normal height",
imageHeight: 1000,
split: true,
expectSplit: false,
},
{
name: "Height exceeds max with split enabled",
imageHeight: 5000,
split: true,
expectSplit: true,
},
{
name: "Height exceeds webp max without split",
imageHeight: webpMaxHeight + 100,
split: false,
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
page := createTestPage(t, 1, 800, tt.imageHeight, "jpeg")
needsSplit, img, format, err := converter.checkPageNeedsSplit(page, tt.split)
if tt.expectError {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.NotNil(t, img)
assert.NotEmpty(t, format)
assert.Equal(t, tt.expectSplit, needsSplit)
})
}
}
func TestConverter_Format(t *testing.T) {
converter := New()
assert.Equal(t, constant.WebP, converter.Format())
}

View File

@@ -6,7 +6,7 @@ import (
"io" "io"
) )
const libwebpVersion = "1.4.0" const libwebpVersion = "1.5.0"
func PrepareEncoder() error { func PrepareEncoder() error {
webpbin.SetLibVersion(libwebpVersion) webpbin.SetLibVersion(libwebpVersion)

View File

@@ -2,5 +2,16 @@
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [ "extends": [
"config:recommended" "config:recommended"
],
"packageRules": [
{
"matchUpdateTypes": [
"minor",
"patch",
"digest"
],
"matchCurrentVersion": "!/^0/",
"automerge": true
}
] ]
} }

View File

@@ -1,48 +0,0 @@
package utils
import (
"fmt"
"github.com/belphemur/CBZOptimizer/cbz"
"github.com/belphemur/CBZOptimizer/converter"
"log"
"strings"
)
// Optimize optimizes a CBZ file using the specified converter.
func Optimize(chapterConverter converter.Converter, path string, quality uint8, override bool) error {
log.Printf("Processing file: %s\n", path)
// Load the chapter
chapter, err := cbz.LoadChapter(path)
if err != nil {
return fmt.Errorf("failed to load chapter: %v", err)
}
if chapter.IsConverted {
log.Printf("Chapter already converted: %s", path)
return nil
}
// Convert the chapter
convertedChapter, err := chapterConverter.ConvertChapter(chapter, quality, func(msg string) {
log.Printf("[%s]%s", path, msg)
})
if err != nil {
return fmt.Errorf("failed to convert chapter: %v", err)
}
convertedChapter.SetConverted()
// Write the converted chapter back to a CBZ file
outputPath := path
if !override {
outputPath = strings.TrimSuffix(path, ".cbz") + "_converted.cbz"
}
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil {
return fmt.Errorf("failed to write converted chapter: %v", err)
}
log.Printf("Converted file written to: %s\n", outputPath)
return nil
}