21 Commits

Author SHA1 Message Date
Antoine Aflalo
d245b80c65 fix: naming issue 2025-09-03 22:21:37 -04:00
Antoine Aflalo
011f7a7a7f Merge pull request #109 from Belphemur/feat/gif
Feat/gif
2025-09-03 22:19:07 -04:00
Antoine Aflalo
f159d3d0d0 fix: keep the error 2025-09-03 22:17:41 -04:00
Antoine Aflalo
ede8d62572 fix: Keep page as they are if we can't decode them and disable conversion 2025-09-03 22:15:10 -04:00
Antoine Aflalo
a151a1d4f8 tests(corruption): add test for corrupt pages 2025-09-03 21:38:21 -04:00
Antoine Aflalo
30ea3d4583 test: add test for page type 2025-09-03 21:34:39 -04:00
Antoine Aflalo
6205e3ea28 feat(gif): support gif file
See .gif file extension support and more exception handling
Fixes #105
2025-09-03 21:04:51 -04:00
Antoine Aflalo
f6bdc3cd86 Merge pull request #106 from Belphemur/dependabot/go_modules/go_modules-004c5295e3 2025-09-03 11:01:04 -04:00
Antoine Aflalo
70257a0439 Merge pull request #107 from Belphemur/renovate/actions-attest-build-provenance-3.x
chore(deps): update actions/attest-build-provenance action to v3
2025-09-03 08:40:01 -04:00
dependabot[bot]
41108514d9 chore(deps): bump github.com/ulikunitz/xz
Bumps the go_modules group with 1 update in the / directory: [github.com/ulikunitz/xz](https://github.com/ulikunitz/xz).


Updates `github.com/ulikunitz/xz` from 0.5.12 to 0.5.14
- [Commits](https://github.com/ulikunitz/xz/compare/v0.5.12...v0.5.14)

---
updated-dependencies:
- dependency-name: github.com/ulikunitz/xz
  dependency-version: 0.5.14
  dependency-type: indirect
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-01 23:13:21 +00:00
renovate[bot]
7e2bb7cf90 fix(deps): update module github.com/spf13/cobra to v1.10.1 (#108)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-01 23:10:00 +00:00
renovate[bot]
8ab75421b1 chore(deps): update actions/attest-build-provenance action to v3 2025-08-31 08:55:43 +00:00
renovate[bot]
4894b14b90 fix(deps): update module github.com/stretchr/testify to v1.11.1 (#104)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-27 13:34:53 +00:00
Antoine Aflalo
9a29b6b45c fix: rollback dockerfile changes 2025-08-27 09:27:11 -04:00
Antoine Aflalo
fcc4ac57ca fix(ci): rollback to docker config instead of docker_v2 2025-08-26 23:08:08 -04:00
Antoine Aflalo
4cc33db553 fix(goreleaser): fix ci 2025-08-26 23:03:47 -04:00
Antoine Aflalo
d36c5cf0f1 fix: ci issue with goreleaser 2025-08-26 23:00:53 -04:00
Antoine Aflalo
ed70eb81cd ci: update to new setup for docker images 2025-08-26 22:59:13 -04:00
Antoine Aflalo
419edbce7b fix: ci config for goreleaser 2025-08-26 22:50:14 -04:00
Antoine Aflalo
4524e94b17 ci: fix goreleaser 2025-08-26 22:47:36 -04:00
Antoine Aflalo
c6823168af fix: add attestations 2025-08-26 22:45:35 -04:00
9 changed files with 238 additions and 131 deletions

View File

@@ -8,12 +8,13 @@ name: release
on:
push:
tags:
- 'v*'
- "v*"
permissions:
contents: write # needed to write releases
id-token: write # needed for keyless signing
packages: write # needed for ghcr access
attestations: write # needed for attestations
jobs:
release:
@@ -26,18 +27,26 @@ jobs:
with:
go-version: 1.25
cache: true
- uses: sigstore/cosign-installer@v3.9.2 # installs cosign
- uses: sigstore/cosign-installer@v3.9.2 # installs cosign
- uses: anchore/sbom-action/download-syft@v0.20.5 # installs syft
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- uses: docker/login-action@v3 # login to ghcr
- uses: docker/login-action@v3 # login to ghcr
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: goreleaser/goreleaser-action@v6 # run goreleaser
- uses: goreleaser/goreleaser-action@v6 # run goreleaser
with:
version: latest
version: nightly
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# After GoReleaser runs, attest all the files in ./dist/checksums.txt:
- uses: actions/attest-build-provenance@v3
with:
subject-checksums: ./dist/checksums.txt
# After GoReleaser runs, attest all the images in ./dist/digests.txt:
- uses: actions/attest-build-provenance@v3
with:
subject-checksums: ./dist/digests.txt

View File

@@ -48,6 +48,9 @@ builds:
# https://goreleaser.com/customization/checksum
checksum:
name_template: "checksums.txt"
# Change the digests filename:
docker_digest:
name_template: "digests.txt"
# create a source tarball
# https://goreleaser.com/customization/source/
source:

View File

@@ -7,13 +7,13 @@ ENV PUID=99
RUN mkdir -p "${CONFIG_FOLDER}" && \
adduser \
-S \
-H \
-h "${CONFIG_FOLDER}" \
-G "users" \
-u "${PUID}" \
"${USER}" && \
chown ${PUID}:users "${CONFIG_FOLDER}"
-S \
-H \
-h "${CONFIG_FOLDER}" \
-G "users" \
-u "${PUID}" \
"${USER}" && \
chown ${PUID}:users "${CONFIG_FOLDER}"
COPY CBZOptimizer ${APP_PATH}
@@ -26,4 +26,4 @@ RUN apk add --no-cache \
VOLUME ${CONFIG_FOLDER}
USER ${USER}
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]

View File

@@ -113,8 +113,9 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
// Channel to manage the files to process
fileChan := make(chan string)
// Channel to collect errors
errorChan := make(chan error, parallelism)
// Slice to collect errors with mutex for thread safety
var errs []error
var errMutex sync.Mutex
// WaitGroup to wait for all goroutines to finish
var wg sync.WaitGroup
@@ -138,7 +139,9 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
})
if err != nil {
log.Error().Int("worker_id", workerID).Str("file_path", path).Err(err).Msg("Worker encountered error")
errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
errMutex.Lock()
errs = append(errs, fmt.Errorf("error processing file %s: %w", path, err))
errMutex.Unlock()
} else {
log.Debug().Int("worker_id", workerID).Str("file_path", path).Msg("Worker completed file successfully")
}
@@ -177,13 +180,6 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
log.Debug().Msg("File channel closed, waiting for workers to complete")
wg.Wait() // Wait for all workers to finish
log.Debug().Msg("All workers completed")
close(errorChan) // Close the error channel
var errs []error
for err := range errorChan {
errs = append(errs, err)
log.Error().Err(err).Msg("Collected processing error")
}
if len(errs) > 0 {
log.Error().Int("error_count", len(errs)).Msg("Command completed with errors")

8
go.mod
View File

@@ -10,9 +10,9 @@ require (
github.com/pablodz/inotifywaitgo v0.0.9
github.com/rs/zerolog v1.34.0
github.com/samber/lo v1.51.0
github.com/spf13/cobra v1.9.1
github.com/spf13/cobra v1.10.1
github.com/spf13/viper v1.20.1
github.com/stretchr/testify v1.11.0
github.com/stretchr/testify v1.11.1
github.com/thediveo/enumflag/v2 v2.0.7
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b
golang.org/x/image v0.30.0
@@ -51,9 +51,9 @@ require (
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.6 // indirect
github.com/spf13/pflag v1.0.9 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect
github.com/ulikunitz/xz v0.5.14 // indirect
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect

16
go.sum
View File

@@ -182,10 +182,10 @@ github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@@ -197,8 +197,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8=
github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/thediveo/enumflag/v2 v2.0.7 h1:uxXDU+rTel7Hg4X0xdqICpG9rzuI/mzLAEYXWLflOfs=
@@ -206,8 +206,8 @@ github.com/thediveo/enumflag/v2 v2.0.7/go.mod h1:bWlnNvTJuUK+huyzf3WECFLy557Ttlc
github.com/thediveo/success v1.0.2 h1:w+r3RbSjLmd7oiNnlCblfGqItcsaShcuAorRVh/+0xk=
github.com/thediveo/success v1.0.2/go.mod h1:hdPJB77k70w764lh8uLUZgNhgeTl3DYeZ4d4bwMO2CU=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.14 h1:uv/0Bq533iFdnMHZdRBTOlaNMdb1+ZxXIlHDZHIHcvg=
github.com/ulikunitz/xz v0.5.14/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=

View File

@@ -10,53 +10,48 @@ import (
"github.com/belphemur/CBZOptimizer/v2/internal/manga"
"github.com/belphemur/CBZOptimizer/v2/internal/utils/errs"
"github.com/belphemur/CBZOptimizer/v2/pkg/converter/constant"
"golang.org/x/exp/slices"
)
func TestConvertChapter(t *testing.T) {
testCases := []struct {
name string
genTestChapter func(path string) (*manga.Chapter, error)
split bool
expectFailure []constant.ConversionFormat
expectPartialSuccess []constant.ConversionFormat
name string
genTestChapter func(path string, isSplit bool) (*manga.Chapter, []string, error)
split bool
expectError bool
}{
{
name: "All split pages",
genTestChapter: genHugePage,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
name: "All split pages",
genTestChapter: genHugePage,
split: true,
},
{
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectFailure: []constant.ConversionFormat{constant.WebP},
expectPartialSuccess: []constant.ConversionFormat{},
name: "Big Pages, no split",
genTestChapter: genHugePage,
split: false,
expectError: true,
},
{
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
name: "No split pages",
genTestChapter: genSmallPages,
split: false,
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
},
{
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{constant.WebP},
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectError: true,
},
{
name: "Two corrupted pages",
genTestChapter: genTwoCorrupted,
split: false,
expectError: true,
},
}
// Load test genTestChapter from testdata
@@ -74,7 +69,7 @@ func TestConvertChapter(t *testing.T) {
t.Run(converter.Format().String(), func(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
chapter, err := tc.genTestChapter(temp.Name())
chapter, expectedExtensions, err := tc.genTestChapter(temp.Name(), tc.split)
if err != nil {
t.Fatalf("failed to load test genTestChapter: %v", err)
}
@@ -86,31 +81,23 @@ func TestConvertChapter(t *testing.T) {
}
convertedChapter, err := converter.ConvertChapter(context.Background(), chapter, quality, tc.split, progress)
if err != nil {
if convertedChapter != nil && slices.Contains(tc.expectPartialSuccess, converter.Format()) {
t.Logf("Partial success to convert genTestChapter: %v", err)
return
}
if slices.Contains(tc.expectFailure, converter.Format()) {
t.Logf("Expected failure to convert genTestChapter: %v", err)
return
}
if err != nil && !tc.expectError {
t.Fatalf("failed to convert genTestChapter: %v", err)
} else if slices.Contains(tc.expectFailure, converter.Format()) {
t.Fatalf("expected failure to convert genTestChapter didn't happen")
}
if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted")
}
if len(convertedChapter.Pages) != len(chapter.Pages) {
t.Fatalf("converted chapter has different number of pages")
if len(convertedChapter.Pages) != len(expectedExtensions) {
t.Fatalf("converted chapter has %d pages but expected %d", len(convertedChapter.Pages), len(expectedExtensions))
}
for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index)
// Check each page's extension against the expected array
for i, page := range convertedChapter.Pages {
expectedExt := expectedExtensions[i]
if page.Extension != expectedExt {
t.Errorf("page %d has extension %s but expected %s", page.Index, page.Extension, expectedExt)
}
}
})
@@ -119,39 +106,43 @@ func TestConvertChapter(t *testing.T) {
}
}
func genHugePage(path string) (*manga.Chapter, error) {
func genHugePage(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
for i := 0; i < 1; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
expectedExtensions := []string{".jpg"} // One image that's generated as JPEG
if isSplit {
expectedExtensions = []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp"}
}
// Create one tall page
img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
page := &manga.Page{
Index: 0,
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, expectedExtensions, nil
}
func genSmallPages(path string) (*manga.Chapter, error) {
func genSmallPages(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
@@ -159,9 +150,9 @@ func genSmallPages(path string) (*manga.Chapter, error) {
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
@@ -174,13 +165,13 @@ func genSmallPages(path string) (*manga.Chapter, error) {
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, []string{".webp", ".webp", ".webp", ".webp", ".webp"}, nil
}
func genMixSmallBig(path string) (*manga.Chapter, error) {
func genMixSmallBig(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
@@ -190,7 +181,7 @@ func genMixSmallBig(path string) (*manga.Chapter, error) {
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
@@ -199,17 +190,21 @@ func genMixSmallBig(path string) (*manga.Chapter, error) {
}
pages = append(pages, page)
}
expectedExtensions := []string{".webp", ".webp", ".webp", ".webp", ".webp"}
if isSplit {
expectedExtensions = []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp"}
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, expectedExtensions, nil
}
func genMixSmallHuge(path string) (*manga.Chapter, error) {
func genMixSmallHuge(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
@@ -219,7 +214,7 @@ func genMixSmallHuge(path string) (*manga.Chapter, error) {
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
return nil, nil, err
}
page := &manga.Page{
Index: uint16(i),
@@ -232,5 +227,55 @@ func genMixSmallHuge(path string) (*manga.Chapter, error) {
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}, []string{".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".webp", ".jpg", ".jpg"}, nil
}
func genTwoCorrupted(path string, isSplit bool) (*manga.Chapter, []string, error) {
file, err := os.Open(path)
if err != nil {
return nil, nil, err
}
defer errs.Capture(&err, file.Close, "failed to close file")
var pages []*manga.Page
numPages := 5
corruptedIndices := []int{2, 4} // Pages 2 and 4 are too tall to convert without splitting
for i := 0; i < numPages; i++ {
var buf *bytes.Buffer
var ext string
isCorrupted := false
for _, ci := range corruptedIndices {
if i == ci {
isCorrupted = true
break
}
}
if isCorrupted {
buf = bytes.NewBufferString("corrupted data") // Invalid data, can't decode as image
ext = ".jpg"
} else {
img := image.NewRGBA(image.Rect(0, 0, 300, 1000))
buf = new(bytes.Buffer)
err = jpeg.Encode(buf, img, nil)
if err != nil {
return nil, nil, err
}
ext = ".jpg"
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ext,
}
pages = append(pages, page)
}
// Expected: small pages to .webp, corrupted pages to .jpg (kept as is)
expectedExtensions := []string{".webp", ".webp", ".jpg", ".webp", ".jpg"}
// Even with split, corrupted pages can't be decoded so stay as is
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, expectedExtensions, nil
}

View File

@@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
"image/png"
"runtime"
@@ -167,19 +168,24 @@ func (converter *Converter) ConvertChapter(ctx context.Context, chapter *manga.C
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page, split)
if err != nil {
var pageIgnoredError *converterrors.PageIgnoredError
if errors.As(err, &pageIgnoredError) {
log.Info().Err(err).Msg("Page ignored due to image decode error")
}
select {
case errChan <- err:
case <-ctx.Done():
return
}
if img != nil {
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, false):
case <-ctx.Done():
return
}
wgConvertedPages.Add(1)
select {
case pagesChan <- manga.NewContainer(page, img, format, false):
case <-ctx.Done():
return
}
return
}
@@ -258,6 +264,7 @@ func (converter *Converter) ConvertChapter(ctx context.Context, chapter *manga.C
log.Debug().
Str("chapter", chapter.FilePath).
Int("error_count", len(errList)).
Err(errors.Join(errList...)).
Msg("Conversion completed with errors")
} else {
log.Debug().
@@ -357,7 +364,7 @@ func (converter *Converter) checkPageNeedsSplit(page *manga.Page, splitRequested
img, format, err := image.Decode(reader)
if err != nil {
log.Debug().Uint16("page_index", page.Index).Err(err).Msg("Failed to decode page image")
return false, nil, format, err
return false, nil, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d: failed to decode image (%s)", page.Index, err.Error()))
}
bounds := img.Bounds()

View File

@@ -5,6 +5,7 @@ import (
"context"
"image"
"image/color"
"image/gif"
"image/jpeg"
"image/png"
"sync"
@@ -44,6 +45,11 @@ func encodeImage(img image.Image, format string) (*bytes.Buffer, string, error)
return nil, "", err
}
return buf, ".jpg", nil
case "gif":
if err := gif.Encode(buf, img, nil); err != nil {
return nil, "", err
}
return buf, ".gif", nil
case "webp":
PrepareEncoder()
if err := Encode(buf, img, 80); err != nil {
@@ -131,10 +137,11 @@ func TestConverter_ConvertChapter(t *testing.T) {
pages: []*manga.Page{
createTestPage(t, 1, 800, 1200, "png"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "gif"),
},
split: false,
expectSplit: false,
numExpected: 2,
numExpected: 3,
},
{
name: "Tall image with split enabled",
@@ -229,24 +236,35 @@ func TestConverter_convertPage(t *testing.T) {
format string
isToBeConverted bool
expectWebP bool
expectError bool
}{
{
name: "Convert PNG to WebP",
format: "png",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Convert GIF to WebP",
format: "gif",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Already WebP",
format: "webp",
isToBeConverted: true,
expectWebP: true,
expectError: false,
},
{
name: "Skip conversion",
format: "png",
isToBeConverted: false,
expectWebP: false,
expectError: false,
},
}
@@ -258,19 +276,48 @@ func TestConverter_convertPage(t *testing.T) {
container := manga.NewContainer(page, img, tt.format, tt.isToBeConverted)
converted, err := converter.convertPage(container, 80)
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
if tt.expectError {
assert.Error(t, err)
assert.Nil(t, converted)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
require.NoError(t, err)
assert.NotNil(t, converted)
if tt.expectWebP {
assert.Equal(t, ".webp", converted.Page.Extension)
validateConvertedImage(t, converted.Page)
} else {
assert.NotEqual(t, ".webp", converted.Page.Extension)
}
}
})
}
}
func TestConverter_convertPage_EncodingError(t *testing.T) {
converter := New()
err := converter.PrepareConverter()
require.NoError(t, err)
// Create a test case with nil image to test encoding error path
// when isToBeConverted is true but the image is nil, simulating a failure in the encoding step
corruptedPage := &manga.Page{
Index: 1,
Contents: &bytes.Buffer{}, // Empty buffer
Extension: ".png",
Size: 0,
}
container := manga.NewContainer(corruptedPage, nil, "png", true)
converted, err := converter.convertPage(container, 80)
// This should return nil container and error because encoding will fail with nil image
assert.Error(t, err)
assert.Nil(t, converted)
}
func TestConverter_checkPageNeedsSplit(t *testing.T) {
converter := New()
@@ -333,8 +380,8 @@ func TestConverter_ConvertChapter_Timeout(t *testing.T) {
// Create a test chapter with a few pages
pages := []*manga.Page{
createTestPage(t, 1, 800, 1200, "jpeg"),
createTestPage(t, 2, 800, 1200, "jpeg"),
createTestPage(t, 3, 800, 1200, "jpeg"),
createTestPage(t, 2, 800, 1200, "png"),
createTestPage(t, 3, 800, 1200, "gif"),
}
chapter := &manga.Chapter{