35 Commits

Author SHA1 Message Date
Antoine Aflalo
ba82003b53 Merge pull request #11 from Belphemur/renovate
perf(error): better deal with deferred errors
2024-09-09 14:47:12 -04:00
Antoine Aflalo
5f7e7de644 ci(tests): fix possible error with tests 2024-09-09 14:45:30 -04:00
Antoine Aflalo
5b183cca29 perf(error): better deal with deferred errors 2024-09-09 14:45:30 -04:00
Antoine Aflalo
d901be14fa Merge pull request #9 from Belphemur/renovatebot
ci(renovate): auto merge digest
2024-09-09 14:11:56 -04:00
Antoine Aflalo
a80997835a chore(deps): update deps 2024-09-09 14:09:39 -04:00
Antoine Aflalo
37bb12fd61 ci(renovate): auto merge digest 2024-09-09 14:09:23 -04:00
Antoine Aflalo
c19afb9f40 Merge pull request #7 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to 701f63a
2024-09-09 14:06:57 -04:00
renovate[bot]
911e1041ff fix(deps): update golang.org/x/exp digest to 701f63a 2024-09-09 18:06:01 +00:00
Antoine Aflalo
a10d589b67 Merge pull request #8 from Belphemur/fix-ci
ci: always generate and upload test results
2024-09-09 14:05:05 -04:00
Antoine Aflalo
da508fcb3f ci: always generate and upload test results 2024-09-09 14:03:28 -04:00
Antoine Aflalo
57f5282032 ci(renovate): add automerge 2024-09-09 09:27:33 -04:00
Antoine Aflalo
d4f8d8b5ff ci(test): fix the report xml file 2024-09-09 09:25:46 -04:00
Antoine Aflalo
1b026b9dbd fix(watch): add missing split option in log 2024-09-09 09:11:45 -04:00
Antoine Aflalo
12cc8d4e25 Merge pull request #6 from Belphemur/renovate/golang.org-x-exp-digest
fix(deps): update golang.org/x/exp digest to e7e105d
2024-09-06 17:10:42 -04:00
renovate[bot]
3442b2a845 fix(deps): update golang.org/x/exp digest to e7e105d 2024-09-06 21:08:46 +00:00
Antoine Aflalo
b9a1fb213a Merge pull request #5 from Belphemur/renovate/golang.org-x-image-0.x
fix(deps): update module golang.org/x/image to v0.20.0
2024-09-06 17:07:40 -04:00
renovate[bot]
278ee130e3 fix(deps): update module golang.org/x/image to v0.20.0 2024-09-04 19:30:08 +00:00
Antoine Aflalo
5357ece2b7 perf: use comment of the zip to know if it's converted instead of txt file 2024-08-29 09:38:39 -04:00
Antoine Aflalo
dbef43d376 fix(watch): fix watch command not using proper path 2024-08-28 15:06:47 -04:00
Antoine Aflalo
7c63ea49c0 fix(docker): fix docker image config folder 2024-08-28 14:36:14 -04:00
Antoine Aflalo
8a067939af Merge pull request #4 from Belphemur/renovate/major-github-artifact-actions
chore(deps): update actions/upload-artifact action to v4
2024-08-28 14:24:57 -04:00
Antoine Aflalo
f89974ac79 ci: Another attempt at reducing 2024-08-28 14:22:25 -04:00
Antoine Aflalo
ce365a6bdf ci: reduce size of page to pass tests
Fix failing test
2024-08-28 14:16:51 -04:00
renovate[bot]
9e61ff4634 chore(deps): update actions/upload-artifact action to v4 2024-08-28 17:56:03 +00:00
Antoine Aflalo
63a1b592c3 ci: add test result to pipeline 2024-08-28 13:55:33 -04:00
Antoine Aflalo
673484692b perf(webp): improve the error message for page too tall 2024-08-28 13:52:27 -04:00
Antoine Aflalo
ad35e2655f feat(webp): add partial success to conversion
So we only keep images that couldn't be optimized and return the chapter
2024-08-28 13:49:14 -04:00
Antoine Aflalo
d7f55fa886 fix(webp): improve error message in page not convertible 2024-08-28 12:09:40 -04:00
Antoine Aflalo
62638517e4 test: improve testing suite for expected failure 2024-08-28 12:03:33 -04:00
Antoine Aflalo
dbf7f6c262 fix(webp): be sure we split big page when requested 2024-08-28 11:55:53 -04:00
Antoine Aflalo
9ecd5ff3a5 fix(webp): fix the actual maximum limit 2024-08-28 11:53:26 -04:00
Antoine Aflalo
a63d2395f0 fix(webp): better handling of error for page too big for webp 2024-08-28 11:51:06 -04:00
Antoine Aflalo
839ad9ed9d fix(cbz): make pages be the first in the cbz by only be number 2024-08-28 09:16:19 -04:00
Antoine Aflalo
c8879349e1 feat(split): Make the split configurable for the watch command 2024-08-28 09:10:08 -04:00
Antoine Aflalo
5ac59a93c5 feat(split): Make the split configurable for the optimize command 2024-08-28 09:06:49 -04:00
21 changed files with 401 additions and 140 deletions

View File

@@ -28,7 +28,21 @@ jobs:
mv go-junit-report /usr/local/bin/ mv go-junit-report /usr/local/bin/
- name: Run tests - name: Run tests
run: go test -v 2>&1 ./... -coverprofile=coverage.txt | go-junit-report -set-exit-code > junit.xml run: |
set -o pipefail
go test -v 2>&1 ./... -coverprofile=coverage.txt | tee test-results.txt
- name: Analyse test results
if: ${{ !cancelled() }}
run: go-junit-report < test-results.txt > junit.xml
- name: Upload test result artifact
if: ${{ !cancelled() }}
uses: actions/upload-artifact@v4
with:
name: test-results
path: |
test-results.txt
junit.xml
retention-days: 7
- name: Upload results to Codecov - name: Upload results to Codecov
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v4
with: with:

View File

@@ -3,13 +3,13 @@ LABEL authors="Belphemur"
ENV USER=abc ENV USER=abc
ENV CONFIG_FOLDER=/config ENV CONFIG_FOLDER=/config
ENV PUID=99 ENV PUID=99
RUN mkdir -p "${CONFIG_FOLDER}" && adduser \ RUN adduser \
--disabled-password \ --disabled-password \
--gecos "" \ --gecos "" \
--home "$(pwd)" \ --home "$(pwd)" \
--ingroup "users" \ --ingroup "users" \
--no-create-home \
--uid "${PUID}" \ --uid "${PUID}" \
--home "${CONFIG_FOLDER}" \
"${USER}" && \ "${USER}" && \
chown ${PUID}:${GUID} "${CONFIG_FOLDER}" chown ${PUID}:${GUID} "${CONFIG_FOLDER}"
@@ -17,5 +17,6 @@ COPY CBZOptimizer /usr/local/bin/CBZOptimizer
RUN apk add --no-cache inotify-tools bash-completion && chmod +x /usr/local/bin/CBZOptimizer && /usr/local/bin/CBZOptimizer completion bash > /etc/bash_completion.d/CBZOptimizer RUN apk add --no-cache inotify-tools bash-completion && chmod +x /usr/local/bin/CBZOptimizer && /usr/local/bin/CBZOptimizer completion bash > /etc/bash_completion.d/CBZOptimizer
VOLUME ${CONFIG_FOLDER}
USER ${USER} USER ${USER}
ENTRYPOINT ["/usr/local/bin/CBZOptimizer"] ENTRYPOINT ["/usr/local/bin/CBZOptimizer"]

View File

@@ -4,6 +4,7 @@ import (
"archive/zip" "archive/zip"
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/manga"
"github.com/belphemur/CBZOptimizer/utils/errs"
"os" "os"
"time" "time"
) )
@@ -14,15 +15,14 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to create .cbz file: %w", err) return fmt.Errorf("failed to create .cbz file: %w", err)
} }
defer zipFile.Close() defer errs.Capture(&err, zipFile.Close, "failed to close .cbz file")
// Create a new ZIP writer // Create a new ZIP writer
zipWriter := zip.NewWriter(zipFile) zipWriter := zip.NewWriter(zipFile)
err = zipWriter.SetComment("Created by CBZOptimizer")
if err != nil { if err != nil {
return err return err
} }
defer zipWriter.Close() defer errs.Capture(&err, zipWriter.Close, "failed to close .cbz writer")
// Write each page to the ZIP archive // Write each page to the ZIP archive
for _, page := range chapter.Pages { for _, page := range chapter.Pages {
@@ -30,10 +30,10 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
var fileName string var fileName string
if page.IsSplitted { if page.IsSplitted {
// Use the format page%03d-%02d for split pages // Use the format page%03d-%02d for split pages
fileName = fmt.Sprintf("page_%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension) fileName = fmt.Sprintf("%04d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
} else { } else {
// Use the format page%03d for non-split pages // Use the format page%03d for non-split pages
fileName = fmt.Sprintf("page_%04d%s", page.Index, page.Extension) fileName = fmt.Sprintf("%04d%s", page.Index, page.Extension)
} }
// Create a new file in the ZIP archive // Create a new file in the ZIP archive
@@ -71,18 +71,11 @@ func WriteChapterToCBZ(chapter *manga.Chapter, outputFilePath string) error {
} }
if chapter.IsConverted { if chapter.IsConverted {
convertedWriter, err := zipWriter.CreateHeader(&zip.FileHeader{
Name: "Converted.txt",
Method: zip.Deflate,
Modified: time.Now(),
})
if err != nil {
return fmt.Errorf("failed to create Converted.txt in .cbz: %w", err)
}
_, err = convertedWriter.Write([]byte(fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime))) convertedString := fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", chapter.ConvertedTime)
err = zipWriter.SetComment(convertedString)
if err != nil { if err != nil {
return fmt.Errorf("failed to write Converted.txt contents: %w", err) return fmt.Errorf("failed to write comment: %w", err)
} }
} }

View File

@@ -3,6 +3,7 @@ package cbz
import ( import (
"archive/zip" "archive/zip"
"bytes" "bytes"
"fmt"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/manga"
"os" "os"
"testing" "testing"
@@ -10,11 +11,14 @@ import (
) )
func TestWriteChapterToCBZ(t *testing.T) { func TestWriteChapterToCBZ(t *testing.T) {
currentTime := time.Now()
// Define test cases // Define test cases
testCases := []struct { testCases := []struct {
name string name string
chapter *manga.Chapter chapter *manga.Chapter
expectedFiles []string expectedFiles []string
expectedComment string
}{ }{
//test case where there is only one page and ComicInfo and the chapter is converted //test case where there is only one page and ComicInfo and the chapter is converted
{ {
@@ -29,9 +33,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
ComicInfoXml: "<Series>Boundless Necromancer</Series>", ComicInfoXml: "<Series>Boundless Necromancer</Series>",
IsConverted: true, IsConverted: true,
ConvertedTime: time.Now(), ConvertedTime: currentTime,
}, },
expectedFiles: []string{"page_0000.jpg", "ComicInfo.xml", "Converted.txt"}, expectedFiles: []string{"0000.jpg", "ComicInfo.xml"},
expectedComment: fmt.Sprintf("%s\nThis chapter has been converted by CBZOptimizer.", currentTime),
}, },
//test case where there is only one page and no //test case where there is only one page and no
{ {
@@ -45,7 +50,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
}, },
}, },
expectedFiles: []string{"page_0000.jpg"}, expectedFiles: []string{"0000.jpg"},
}, },
{ {
name: "Multiple pages with ComicInfo", name: "Multiple pages with ComicInfo",
@@ -64,7 +69,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
ComicInfoXml: "<Series>Boundless Necromancer</Series>", ComicInfoXml: "<Series>Boundless Necromancer</Series>",
}, },
expectedFiles: []string{"page_0000.jpg", "page_0001.jpg", "ComicInfo.xml"}, expectedFiles: []string{"0000.jpg", "0001.jpg", "ComicInfo.xml"},
}, },
{ {
name: "Split page", name: "Split page",
@@ -79,7 +84,7 @@ func TestWriteChapterToCBZ(t *testing.T) {
}, },
}, },
}, },
expectedFiles: []string{"page_0000-01.jpg"}, expectedFiles: []string{"0000-01.jpg"},
}, },
} }
@@ -125,6 +130,10 @@ func TestWriteChapterToCBZ(t *testing.T) {
} }
} }
if tc.expectedComment != "" && r.Comment != tc.expectedComment {
t.Errorf("Expected comment %s, but found %s", tc.expectedComment, r.Comment)
}
// Check if there are no unexpected files // Check if there are no unexpected files
if len(filesInArchive) != len(tc.expectedFiles) { if len(filesInArchive) != len(tc.expectedFiles) {
t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive)) t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive))

View File

@@ -7,6 +7,7 @@ import (
"fmt" "fmt"
"github.com/araddon/dateparse" "github.com/araddon/dateparse"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/manga"
"github.com/belphemur/CBZOptimizer/utils/errs"
"io" "io"
"path/filepath" "path/filepath"
"strings" "strings"
@@ -18,72 +19,85 @@ func LoadChapter(filePath string) (*manga.Chapter, error) {
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to open .cbz file: %w", err) return nil, fmt.Errorf("failed to open .cbz file: %w", err)
} }
defer r.Close() defer errs.Capture(&err, r.Close, "failed to close opened .cbz file")
chapter := &manga.Chapter{ chapter := &manga.Chapter{
FilePath: filePath, FilePath: filePath,
} }
// Check for comment
if r.Comment != "" {
scanner := bufio.NewScanner(strings.NewReader(r.Comment))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err == nil {
chapter.IsConverted = true
}
}
}
for _, f := range r.File { for _, f := range r.File {
if f.FileInfo().IsDir() { if f.FileInfo().IsDir() {
continue continue
} }
// Open the file inside the zip err := func() error {
rc, err := f.Open() // Open the file inside the zip
if err != nil { rc, err := f.Open()
return nil, fmt.Errorf("failed to open file inside .cbz: %w", err)
}
// Determine the file extension
ext := strings.ToLower(filepath.Ext(f.Name))
if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
// Read the ComicInfo.xml file content
xmlContent, err := io.ReadAll(rc)
if err != nil { if err != nil {
rc.Close() return fmt.Errorf("failed to open file inside .cbz: %w", err)
return nil, fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
} }
chapter.ComicInfoXml = string(xmlContent)
} else if ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" { defer errs.Capture(&err, rc.Close, "failed to close file inside .cbz")
textContent, err := io.ReadAll(rc)
if err != nil { // Determine the file extension
rc.Close() ext := strings.ToLower(filepath.Ext(f.Name))
return nil, fmt.Errorf("failed to read Converted.xml content: %w", err)
} if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
scanner := bufio.NewScanner(bytes.NewReader(textContent)) // Read the ComicInfo.xml file content
if scanner.Scan() { xmlContent, err := io.ReadAll(rc)
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil { if err != nil {
rc.Close() return fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
return nil, fmt.Errorf("failed to parse converted time: %w", err) }
chapter.ComicInfoXml = string(xmlContent)
} else if !chapter.IsConverted && ext == ".txt" && strings.ToLower(filepath.Base(f.Name)) == "converted.txt" {
textContent, err := io.ReadAll(rc)
if err != nil {
return fmt.Errorf("failed to read Converted.xml content: %w", err)
}
scanner := bufio.NewScanner(bytes.NewReader(textContent))
if scanner.Scan() {
convertedTime := scanner.Text()
chapter.ConvertedTime, err = dateparse.ParseAny(convertedTime)
if err != nil {
return fmt.Errorf("failed to parse converted time: %w", err)
}
chapter.IsConverted = true
}
} else {
// Read the file contents for page
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
if err != nil {
return fmt.Errorf("failed to read file contents: %w", err)
} }
chapter.IsConverted = true
}
} else {
// Read the file contents for page
buf := new(bytes.Buffer)
_, err = io.Copy(buf, rc)
if err != nil {
rc.Close()
return nil, fmt.Errorf("failed to read file contents: %w", err)
}
// Create a new Page object // Create a new Page object
page := &manga.Page{ page := &manga.Page{
Index: uint16(len(chapter.Pages)), // Simple index based on order Index: uint16(len(chapter.Pages)), // Simple index based on order
Extension: ext, Extension: ext,
Size: uint64(buf.Len()), Size: uint64(buf.Len()),
Contents: buf, Contents: buf,
IsSplitted: false, IsSplitted: false,
} }
// Add the page to the chapter // Add the page to the chapter
chapter.Pages = append(chapter.Pages, page) chapter.Pages = append(chapter.Pages, page)
}
return nil
}()
if err != nil {
return nil, err
} }
rc.Close()
} }
return chapter, nil return chapter, nil

View File

@@ -24,7 +24,7 @@ func TestLoadChapter(t *testing.T) {
}, },
{ {
name: "Converted Chapter", name: "Converted Chapter",
filePath: "../testdata/Chapter 1_converted.cbz", filePath: "../testdata/Chapter 10_converted.cbz",
expectedPages: 107, expectedPages: 107,
expectedSeries: "<Series>Boundless Necromancer</Series>", expectedSeries: "<Series>Boundless Necromancer</Series>",
expectedConversion: true, expectedConversion: true,

View File

@@ -29,6 +29,7 @@ func init() {
command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)") command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel") command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
command.Flags().BoolP("override", "o", false, "Override the original CBZ files") command.Flags().BoolP("override", "o", false, "Override the original CBZ files")
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
command.PersistentFlags().VarP( command.PersistentFlags().VarP(
formatFlag, formatFlag,
"format", "f", "format", "f",
@@ -58,6 +59,11 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
return fmt.Errorf("invalid quality value") return fmt.Errorf("invalid quality value")
} }
split, err := cmd.Flags().GetBool("split")
if err != nil {
return fmt.Errorf("invalid split value")
}
parallelism, err := cmd.Flags().GetInt("parallelism") parallelism, err := cmd.Flags().GetInt("parallelism")
if err != nil || parallelism < 1 { if err != nil || parallelism < 1 {
return fmt.Errorf("invalid parallelism value") return fmt.Errorf("invalid parallelism value")
@@ -86,7 +92,13 @@ func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
go func() { go func() {
defer wg.Done() defer wg.Done()
for path := range fileChan { for path := range fileChan {
err := utils.Optimize(chapterConverter, path, quality, override) err := utils.Optimize(&utils.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: path,
Quality: quality,
Override: override,
Split: split,
})
if err != nil { if err != nil {
errorChan <- fmt.Errorf("error processing file %s: %w", path, err) errorChan <- fmt.Errorf("error processing file %s: %w", path, err)
} }

View File

@@ -17,17 +17,16 @@ import (
// MockConverter is a mock implementation of the Converter interface // MockConverter is a mock implementation of the Converter interface
type MockConverter struct{} type MockConverter struct{}
func (m *MockConverter) Format() constant.ConversionFormat { func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
return constant.WebP
}
func (m *MockConverter) ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(string)) (*manga.Chapter, error) {
// Simulate conversion by setting the IsConverted flag
chapter.IsConverted = true chapter.IsConverted = true
chapter.ConvertedTime = time.Now() chapter.ConvertedTime = time.Now()
return chapter, nil return chapter, nil
} }
func (m *MockConverter) Format() constant.ConversionFormat {
return constant.WebP
}
func (m *MockConverter) PrepareConverter() error { func (m *MockConverter) PrepareConverter() error {
return nil return nil
} }
@@ -79,6 +78,7 @@ func TestConvertCbzCommand(t *testing.T) {
cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)") cmd.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel") cmd.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files") cmd.Flags().BoolP("override", "o", false, "Override the original CBZ files")
cmd.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
// Execute the command // Execute the command
err = ConvertCbzCommand(cmd, []string{tempDir}) err = ConvertCbzCommand(cmd, []string{tempDir})

View File

@@ -35,6 +35,9 @@ func init() {
command.Flags().BoolP("override", "o", true, "Override the original CBZ files") command.Flags().BoolP("override", "o", true, "Override the original CBZ files")
_ = viper.BindPFlag("override", command.Flags().Lookup("override")) _ = viper.BindPFlag("override", command.Flags().Lookup("override"))
command.Flags().BoolP("split", "s", false, "Split long pages into smaller chunks")
_ = viper.BindPFlag("split", command.Flags().Lookup("split"))
command.PersistentFlags().VarP( command.PersistentFlags().VarP(
formatFlag, formatFlag,
"format", "f", "format", "f",
@@ -61,6 +64,8 @@ func WatchCommand(_ *cobra.Command, args []string) error {
override := viper.GetBool("override") override := viper.GetBool("override")
split := viper.GetBool("split")
converterType := constant.FindConversionFormat(viper.GetString("format")) converterType := constant.FindConversionFormat(viper.GetString("format"))
chapterConverter, err := converter.Get(converterType) chapterConverter, err := converter.Get(converterType)
if err != nil { if err != nil {
@@ -71,7 +76,7 @@ func WatchCommand(_ *cobra.Command, args []string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to prepare converter: %v", err) return fmt.Errorf("failed to prepare converter: %v", err)
} }
log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s]", path, override, quality, converterType.String()) log.Printf("Watching [%s] with [override: %t, quality: %d, format: %s, split: %t]", path, override, quality, converterType.String(), split)
events := make(chan inotifywaitgo.FileEvent) events := make(chan inotifywaitgo.FileEvent)
errors := make(chan error) errors := make(chan error)
@@ -109,7 +114,13 @@ func WatchCommand(_ *cobra.Command, args []string) error {
for _, e := range event.Events { for _, e := range event.Events {
switch e { switch e {
case inotifywaitgo.CLOSE_WRITE, inotifywaitgo.MOVE: case inotifywaitgo.CLOSE_WRITE, inotifywaitgo.MOVE:
err := utils.Optimize(chapterConverter, event.Filename, quality, override) err := utils.Optimize(&utils.OptimizeOptions{
ChapterConverter: chapterConverter,
Path: event.Filename,
Quality: quality,
Override: override,
Split: split,
})
if err != nil { if err != nil {
errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err) errors <- fmt.Errorf("error processing file %s: %w", event.Filename, err)
} }

View File

@@ -12,7 +12,10 @@ import (
type Converter interface { type Converter interface {
// Format of the converter // Format of the converter
Format() (format constant.ConversionFormat) Format() (format constant.ConversionFormat)
ConvertChapter(chapter *manga.Chapter, quality uint8, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) // ConvertChapter converts a manga chapter to the specified format.
//
// Returns partial success where some pages are converted and some are not.
ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error)
PrepareConverter() error PrepareConverter() error
} }

View File

@@ -2,7 +2,9 @@ package converter
import ( import (
"bytes" "bytes"
"github.com/belphemur/CBZOptimizer/converter/constant"
"github.com/belphemur/CBZOptimizer/manga" "github.com/belphemur/CBZOptimizer/manga"
"golang.org/x/exp/slices"
"image" "image"
"image/jpeg" "image/jpeg"
"os" "os"
@@ -12,20 +14,46 @@ import (
func TestConvertChapter(t *testing.T) { func TestConvertChapter(t *testing.T) {
testCases := []struct { testCases := []struct {
name string name string
genTestChapter func(path string) (*manga.Chapter, error) genTestChapter func(path string) (*manga.Chapter, error)
split bool
expectFailure []constant.ConversionFormat
expectPartialSuccess []constant.ConversionFormat
}{ }{
{ {
name: "All split pages", name: "All split pages",
genTestChapter: genBigPages, genTestChapter: genHugePage,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
}, },
{ {
name: "No split pages", name: "Big Pages, no split",
genTestChapter: genSmallPages, genTestChapter: genHugePage,
split: false,
expectFailure: []constant.ConversionFormat{constant.WebP},
expectPartialSuccess: []constant.ConversionFormat{},
}, },
{ {
name: "Mix of split and no split pages", name: "No split pages",
genTestChapter: genMixSmallBig, genTestChapter: genSmallPages,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Mix of split and no split pages",
genTestChapter: genMixSmallBig,
split: true,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{},
},
{
name: "Mix of Huge and small page",
genTestChapter: genMixSmallHuge,
split: false,
expectFailure: []constant.ConversionFormat{},
expectPartialSuccess: []constant.ConversionFormat{constant.WebP},
}, },
} }
// Load test genTestChapter from testdata // Load test genTestChapter from testdata
@@ -50,19 +78,33 @@ func TestConvertChapter(t *testing.T) {
quality := uint8(80) quality := uint8(80)
progress := func(msg string) { progress := func(msg string, current uint32, total uint32) {
t.Log(msg) t.Log(msg)
} }
convertedChapter, err := converter.ConvertChapter(chapter, quality, progress) convertedChapter, err := converter.ConvertChapter(chapter, quality, tc.split, progress)
if err != nil { if err != nil {
if convertedChapter != nil && slices.Contains(tc.expectPartialSuccess, converter.Format()) {
t.Logf("Partial success to convert genTestChapter: %v", err)
return
}
if slices.Contains(tc.expectFailure, converter.Format()) {
t.Logf("Expected failure to convert genTestChapter: %v", err)
return
}
t.Fatalf("failed to convert genTestChapter: %v", err) t.Fatalf("failed to convert genTestChapter: %v", err)
} else if slices.Contains(tc.expectFailure, converter.Format()) {
t.Fatalf("expected failure to convert genTestChapter didn't happen")
} }
if len(convertedChapter.Pages) == 0 { if len(convertedChapter.Pages) == 0 {
t.Fatalf("no pages were converted") t.Fatalf("no pages were converted")
} }
if len(convertedChapter.Pages) != len(chapter.Pages) {
t.Fatalf("converted chapter has different number of pages")
}
for _, page := range convertedChapter.Pages { for _, page := range convertedChapter.Pages {
if page.Extension != ".webp" { if page.Extension != ".webp" {
t.Errorf("page %d was not converted to webp format", page.Index) t.Errorf("page %d was not converted to webp format", page.Index)
@@ -74,7 +116,7 @@ func TestConvertChapter(t *testing.T) {
} }
} }
func genBigPages(path string) (*manga.Chapter, error) { func genHugePage(path string) (*manga.Chapter, error) {
file, err := os.Open(path) file, err := os.Open(path)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -82,8 +124,8 @@ func genBigPages(path string) (*manga.Chapter, error) {
defer file.Close() defer file.Close()
var pages []*manga.Page var pages []*manga.Page
for i := 0; i < 5; i++ { // Assuming there are 5 pages for the test for i := 0; i < 1; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 300, 10000)) img := image.NewRGBA(image.Rect(0, 0, 1, 17000))
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil) err := jpeg.Encode(buf, img, nil)
if err != nil { if err != nil {
@@ -160,3 +202,32 @@ func genMixSmallBig(path string) (*manga.Chapter, error) {
Pages: pages, Pages: pages,
}, nil }, nil
} }
func genMixSmallHuge(path string) (*manga.Chapter, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var pages []*manga.Page
for i := 0; i < 10; i++ { // Assuming there are 5 pages for the test
img := image.NewRGBA(image.Rect(0, 0, 1, 2000*(i+1)))
buf := new(bytes.Buffer)
err := jpeg.Encode(buf, img, nil)
if err != nil {
return nil, err
}
page := &manga.Page{
Index: uint16(i),
Contents: buf,
Extension: ".jpg",
}
pages = append(pages, page)
}
return &manga.Chapter{
FilePath: path,
Pages: pages,
}, nil
}

View File

@@ -0,0 +1,13 @@
package errors
type PageIgnoredError struct {
s string
}
func (e *PageIgnoredError) Error() string {
return e.s
}
func NewPageIgnored(text string) error {
return &PageIgnoredError{text}
}

View File

@@ -2,9 +2,11 @@ package webp
import ( import (
"bytes" "bytes"
"errors"
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/converter/constant" "github.com/belphemur/CBZOptimizer/converter/constant"
packer2 "github.com/belphemur/CBZOptimizer/manga" converterrors "github.com/belphemur/CBZOptimizer/converter/errors"
"github.com/belphemur/CBZOptimizer/manga"
"github.com/oliamb/cutter" "github.com/oliamb/cutter"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
_ "golang.org/x/image/webp" _ "golang.org/x/image/webp"
@@ -17,6 +19,8 @@ import (
"sync/atomic" "sync/atomic"
) )
const webpMaxHeight = 16383
type Converter struct { type Converter struct {
maxHeight int maxHeight int
cropHeight int cropHeight int
@@ -48,7 +52,7 @@ func (converter *Converter) PrepareConverter() error {
return nil return nil
} }
func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uint8, progress func(message string, current uint32, total uint32)) (*packer2.Chapter, error) { func (converter *Converter) ConvertChapter(chapter *manga.Chapter, quality uint8, split bool, progress func(message string, current uint32, total uint32)) (*manga.Chapter, error) {
err := converter.PrepareConverter() err := converter.PrepareConverter()
if err != nil { if err != nil {
return nil, err return nil, err
@@ -57,7 +61,7 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
var wgConvertedPages sync.WaitGroup var wgConvertedPages sync.WaitGroup
maxGoroutines := runtime.NumCPU() maxGoroutines := runtime.NumCPU()
pagesChan := make(chan *packer2.PageContainer, maxGoroutines) pagesChan := make(chan *manga.PageContainer, maxGoroutines)
errChan := make(chan error, maxGoroutines) errChan := make(chan error, maxGoroutines)
var wgPages sync.WaitGroup var wgPages sync.WaitGroup
@@ -65,13 +69,13 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
guard := make(chan struct{}, maxGoroutines) guard := make(chan struct{}, maxGoroutines)
pagesMutex := sync.Mutex{} pagesMutex := sync.Mutex{}
var pages []*packer2.Page var pages []*manga.Page
var totalPages = uint32(len(chapter.Pages)) var totalPages = uint32(len(chapter.Pages))
go func() { go func() {
for page := range pagesChan { for page := range pagesChan {
guard <- struct{}{} // would block if guard channel is already filled guard <- struct{}{} // would block if guard channel is already filled
go func(pageToConvert *packer2.PageContainer) { go func(pageToConvert *manga.PageContainer) {
defer wgConvertedPages.Done() defer wgConvertedPages.Done()
convertedPage, err := converter.convertPage(pageToConvert, quality) convertedPage, err := converter.convertPage(pageToConvert, quality)
if err != nil { if err != nil {
@@ -101,31 +105,36 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
}() }()
for _, page := range chapter.Pages { for _, page := range chapter.Pages {
go func(page *packer2.Page) { go func(page *manga.Page) {
defer wgPages.Done() defer wgPages.Done()
splitNeeded, img, format, err := converter.checkPageNeedsSplit(page) splitNeeded, img, format, err := converter.checkPageNeedsSplit(page, split)
if err != nil { if err != nil {
errChan <- fmt.Errorf("error checking if page %d of genTestChapter %s needs split: %v", page.Index, chapter.FilePath, err) errChan <- err
// Partial error in this case, we want the page, but not converting it
if img != nil {
wgConvertedPages.Add(1)
pagesChan <- manga.NewContainer(page, img, format, false)
}
return return
} }
if !splitNeeded { if !splitNeeded {
wgConvertedPages.Add(1) wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, format) pagesChan <- manga.NewContainer(page, img, format, true)
return return
} }
images, err := converter.cropImage(img) images, err := converter.cropImage(img)
if err != nil { if err != nil {
errChan <- fmt.Errorf("error converting page %d of genTestChapter %s to webp: %v", page.Index, chapter.FilePath, err) errChan <- err
return return
} }
atomic.AddUint32(&totalPages, uint32(len(images)-1)) atomic.AddUint32(&totalPages, uint32(len(images)-1))
for i, img := range images { for i, img := range images {
page := &packer2.Page{Index: page.Index, IsSplitted: true, SplitPartIndex: uint16(i)} page := &manga.Page{Index: page.Index, IsSplitted: true, SplitPartIndex: uint16(i)}
wgConvertedPages.Add(1) wgConvertedPages.Add(1)
pagesChan <- packer2.NewContainer(page, img, "N/A") pagesChan <- manga.NewContainer(page, img, "N/A", true)
} }
}(page) }(page)
} }
@@ -140,11 +149,12 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
errList = append(errList, err) errList = append(errList, err)
} }
var aggregatedError error = nil
if len(errList) > 0 { if len(errList) > 0 {
return nil, fmt.Errorf("encountered errors: %v", errList) aggregatedError = errors.Join(errList...)
} }
slices.SortFunc(pages, func(a, b *packer2.Page) int { slices.SortFunc(pages, func(a, b *manga.Page) int {
if a.Index == b.Index { if a.Index == b.Index {
return int(b.SplitPartIndex - a.SplitPartIndex) return int(b.SplitPartIndex - a.SplitPartIndex)
} }
@@ -154,7 +164,7 @@ func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uin
runtime.GC() runtime.GC()
return chapter, nil return chapter, aggregatedError
} }
func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) { func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
@@ -190,7 +200,7 @@ func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
return parts, nil return parts, nil
} }
func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image.Image, string, error) { func (converter *Converter) checkPageNeedsSplit(page *manga.Page, splitRequested bool) (bool, image.Image, string, error) {
reader := io.Reader(bytes.NewBuffer(page.Contents.Bytes())) reader := io.Reader(bytes.NewBuffer(page.Contents.Bytes()))
img, format, err := image.Decode(reader) img, format, err := image.Decode(reader)
if err != nil { if err != nil {
@@ -200,13 +210,19 @@ func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image
bounds := img.Bounds() bounds := img.Bounds()
height := bounds.Dy() height := bounds.Dy()
return height >= converter.maxHeight, img, format, nil if height >= webpMaxHeight && !splitRequested {
return false, img, format, converterrors.NewPageIgnored(fmt.Sprintf("page %d is too tall [max: %dpx] to be converted to webp format", page.Index, webpMaxHeight))
}
return height >= converter.maxHeight && splitRequested, img, format, nil
} }
func (converter *Converter) convertPage(container *packer2.PageContainer, quality uint8) (*packer2.PageContainer, error) { func (converter *Converter) convertPage(container *manga.PageContainer, quality uint8) (*manga.PageContainer, error) {
if container.Format == "webp" { if container.Format == "webp" {
return container, nil return container, nil
} }
if !container.IsToBeConverted {
return container, nil
}
converted, err := converter.convert(container.Image, uint(quality)) converted, err := converter.convert(container.Image, uint(quality))
if err != nil { if err != nil {
return nil, err return nil, err

6
go.mod
View File

@@ -11,8 +11,8 @@ require (
github.com/spf13/cobra v1.8.1 github.com/spf13/cobra v1.8.1
github.com/spf13/viper v1.19.0 github.com/spf13/viper v1.19.0
github.com/thediveo/enumflag/v2 v2.0.5 github.com/thediveo/enumflag/v2 v2.0.5
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 golang.org/x/exp v0.0.0-20240909161429-701f63a606c0
golang.org/x/image v0.19.0 golang.org/x/image v0.20.0
) )
require ( require (
@@ -43,7 +43,7 @@ require (
go.uber.org/atomic v1.9.0 // indirect go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect go.uber.org/multierr v1.9.0 // indirect
golang.org/x/sys v0.18.0 // indirect golang.org/x/sys v0.18.0 // indirect
golang.org/x/text v0.17.0 // indirect golang.org/x/text v0.18.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

16
go.sum
View File

@@ -118,18 +118,18 @@ go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ= golang.org/x/image v0.20.0 h1:7cVCUjQwfL18gyBJOmYvptfSHS8Fb3YUDtfLIZ7Nbpw=
golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys= golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24= golang.org/x/tools v0.25.0 h1:oFU9pkj/iJgs+0DT+VMHrx+oBKs/LJMV+Uvg78sl+fE=
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ= golang.org/x/tools v0.25.0/go.mod h1:/vtpO8WL1N9cQC3FN5zPqb//fRXskFHbLKk4OW1Q7rg=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=

View File

@@ -10,8 +10,10 @@ type PageContainer struct {
Image image.Image Image image.Image
// Format is a string representing the format of the image (e.g., "png", "jpeg", "webp"). // Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
Format string Format string
// IsToBeConverted is a boolean flag indicating whether the image needs to be converted to another format.
IsToBeConverted bool
} }
func NewContainer(Page *Page, img image.Image, format string) *PageContainer { func NewContainer(Page *Page, img image.Image, format string, isToBeConverted bool) *PageContainer {
return &PageContainer{Page: Page, Image: img, Format: format} return &PageContainer{Page: Page, Image: img, Format: format, IsToBeConverted: isToBeConverted}
} }

View File

@@ -2,5 +2,16 @@
"$schema": "https://docs.renovatebot.com/renovate-schema.json", "$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [ "extends": [
"config:recommended" "config:recommended"
],
"packageRules": [
{
"matchUpdateTypes": [
"minor",
"patch",
"digest"
],
"matchCurrentVersion": "!/^0/",
"automerge": true
}
] ]
} }

View File

@@ -0,0 +1,16 @@
package errs
import (
"errors"
"fmt"
)
// Capture runs errFunc and assigns the error, if any, to *errPtr. Preserves the
// original error by wrapping with errors.Join if the errFunc err is non-nil.
func Capture(errPtr *error, errFunc func() error, msg string) {
err := errFunc()
if err == nil {
return
}
*errPtr = errors.Join(*errPtr, fmt.Errorf("%s: %w", msg, err))
}

View File

@@ -0,0 +1,58 @@
package errs
import (
"errors"
"fmt"
"testing"
)
func TestCapture(t *testing.T) {
tests := []struct {
name string
initial error
errFunc func() error
msg string
expected string
}{
{
name: "No error from errFunc",
initial: nil,
errFunc: func() error { return nil },
msg: "test message",
expected: "",
},
{
name: "Error from errFunc with no initial error",
initial: nil,
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "test message: error from func",
},
{
name: "Error from errFunc with initial error",
initial: errors.New("initial error"),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "initial error\ntest message: error from func",
},
{
name: "Error from errFunc with initial wrapped error",
initial: fmt.Errorf("wrapped error: %w", errors.New("initial error")),
errFunc: func() error { return errors.New("error from func") },
msg: "test message",
expected: "wrapped error: initial error\ntest message: error from func",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var err error = tt.initial
Capture(&err, tt.errFunc, tt.msg)
if err != nil && err.Error() != tt.expected {
t.Errorf("expected %q, got %q", tt.expected, err.Error())
} else if err == nil && tt.expected != "" {
t.Errorf("expected %q, got nil", tt.expected)
}
})
}
}

View File

@@ -1,43 +1,60 @@
package utils package utils
import ( import (
"errors"
"fmt" "fmt"
"github.com/belphemur/CBZOptimizer/cbz" "github.com/belphemur/CBZOptimizer/cbz"
"github.com/belphemur/CBZOptimizer/converter" "github.com/belphemur/CBZOptimizer/converter"
errors2 "github.com/belphemur/CBZOptimizer/converter/errors"
"log" "log"
"strings" "strings"
) )
type OptimizeOptions struct {
ChapterConverter converter.Converter
Path string
Quality uint8
Override bool
Split bool
}
// Optimize optimizes a CBZ file using the specified converter. // Optimize optimizes a CBZ file using the specified converter.
func Optimize(chapterConverter converter.Converter, path string, quality uint8, override bool) error { func Optimize(options *OptimizeOptions) error {
log.Printf("Processing file: %s\n", path) log.Printf("Processing file: %s\n", options.Path)
// Load the chapter // Load the chapter
chapter, err := cbz.LoadChapter(path) chapter, err := cbz.LoadChapter(options.Path)
if err != nil { if err != nil {
return fmt.Errorf("failed to load chapter: %v", err) return fmt.Errorf("failed to load chapter: %v", err)
} }
if chapter.IsConverted { if chapter.IsConverted {
log.Printf("Chapter already converted: %s", path) log.Printf("Chapter already converted: %s", options.Path)
return nil return nil
} }
// Convert the chapter // Convert the chapter
convertedChapter, err := chapterConverter.ConvertChapter(chapter, quality, func(msg string, current uint32, total uint32) { convertedChapter, err := options.ChapterConverter.ConvertChapter(chapter, options.Quality, options.Split, func(msg string, current uint32, total uint32) {
if current%10 == 0 || current == total { if current%10 == 0 || current == total {
log.Printf("[%s] Converting: %d/%d", chapter.FilePath, current, total) log.Printf("[%s] Converting: %d/%d", chapter.FilePath, current, total)
} }
}) })
if err != nil { if err != nil {
return fmt.Errorf("failed to convert chapter: %v", err) var pageIgnoredError *errors2.PageIgnoredError
if !errors.As(err, &pageIgnoredError) {
return fmt.Errorf("failed to convert chapter: %v", err)
}
} }
if convertedChapter == nil {
return fmt.Errorf("failed to convert chapter")
}
convertedChapter.SetConverted() convertedChapter.SetConverted()
// Write the converted chapter back to a CBZ file // Write the converted chapter back to a CBZ file
outputPath := path outputPath := options.Path
if !override { if !options.Override {
outputPath = strings.TrimSuffix(path, ".cbz") + "_converted.cbz" outputPath = strings.TrimSuffix(options.Path, ".cbz") + "_converted.cbz"
} }
err = cbz.WriteChapterToCBZ(convertedChapter, outputPath) err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
if err != nil { if err != nil {