diff --git a/.idea/CBZOptimizer.iml b/.idea/CBZOptimizer.iml
new file mode 100644
index 0000000..5e764c4
--- /dev/null
+++ b/.idea/CBZOptimizer.iml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..16acce1
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..94a25f7
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/cbz/cbz_creator.go b/cbz/cbz_creator.go
new file mode 100644
index 0000000..1faca18
--- /dev/null
+++ b/cbz/cbz_creator.go
@@ -0,0 +1,61 @@
+package cbz
+
+import (
+ "CBZOptimizer/packer"
+ "archive/zip"
+ "fmt"
+ "os"
+)
+
+func WriteChapterToCBZ(chapter *packer.Chapter, outputFilePath string) error {
+ // Create a new ZIP file
+ zipFile, err := os.Create(outputFilePath)
+ if err != nil {
+ return fmt.Errorf("failed to create .cbz file: %w", err)
+ }
+ defer zipFile.Close()
+
+ // Create a new ZIP writer
+ zipWriter := zip.NewWriter(zipFile)
+ defer zipWriter.Close()
+
+ // Write each page to the ZIP archive
+ for _, page := range chapter.Pages {
+ // Construct the file name for the page
+ var fileName string
+ if page.IsSplitted {
+ // Use the format page%03d-%02d for split pages
+ fileName = fmt.Sprintf("page_%03d-%02d%s", page.Index, page.SplitPartIndex, page.Extension)
+ } else {
+ // Use the format page%03d for non-split pages
+ fileName = fmt.Sprintf("page_%03d%s", page.Index, page.Extension)
+ }
+
+ // Create a new file in the ZIP archive
+ fileWriter, err := zipWriter.Create(fileName)
+ if err != nil {
+ return fmt.Errorf("failed to create file in .cbz: %w", err)
+ }
+
+ // Write the page contents to the file
+ _, err = fileWriter.Write(page.Contents.Bytes())
+ if err != nil {
+ return fmt.Errorf("failed to write page contents: %w", err)
+ }
+ }
+
+ // Optionally, write the ComicInfo.xml file if present
+ if chapter.ComicInfoXml != "" {
+ comicInfoWriter, err := zipWriter.Create("ComicInfo.xml")
+ if err != nil {
+ return fmt.Errorf("failed to create ComicInfo.xml in .cbz: %w", err)
+ }
+
+ _, err = comicInfoWriter.Write([]byte(chapter.ComicInfoXml))
+ if err != nil {
+ return fmt.Errorf("failed to write ComicInfo.xml contents: %w", err)
+ }
+ }
+
+ return nil
+}
diff --git a/cbz/cbz_creator_test.go b/cbz/cbz_creator_test.go
new file mode 100644
index 0000000..e6dde85
--- /dev/null
+++ b/cbz/cbz_creator_test.go
@@ -0,0 +1,115 @@
+package cbz
+
+import (
+ "CBZOptimizer/packer"
+ "archive/zip"
+ "bytes"
+ "os"
+ "testing"
+)
+
+func TestWriteChapterToCBZ(t *testing.T) {
+ // Define test cases
+ testCases := []struct {
+ name string
+ chapter *packer.Chapter
+ expectedFiles []string
+ }{
+ {
+ name: "Single page, no ComicInfo",
+ chapter: &packer.Chapter{
+ Pages: []*packer.Page{
+ {
+ Index: 0,
+ Extension: ".jpg",
+ Contents: bytes.NewBuffer([]byte("image data")),
+ },
+ },
+ },
+ expectedFiles: []string{"page_000.jpg"},
+ },
+ {
+ name: "Multiple pages with ComicInfo",
+ chapter: &packer.Chapter{
+ Pages: []*packer.Page{
+ {
+ Index: 0,
+ Extension: ".jpg",
+ Contents: bytes.NewBuffer([]byte("image data 1")),
+ },
+ {
+ Index: 1,
+ Extension: ".jpg",
+ Contents: bytes.NewBuffer([]byte("image data 2")),
+ },
+ },
+ ComicInfoXml: "Boundless Necromancer",
+ },
+ expectedFiles: []string{"page_000.jpg", "page_001.jpg", "ComicInfo.xml"},
+ },
+ {
+ name: "Split page",
+ chapter: &packer.Chapter{
+ Pages: []*packer.Page{
+ {
+ Index: 0,
+ Extension: ".jpg",
+ Contents: bytes.NewBuffer([]byte("split image data")),
+ IsSplitted: true,
+ SplitPartIndex: 1,
+ },
+ },
+ },
+ expectedFiles: []string{"page_000-01.jpg"},
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ // Create a temporary file for the .cbz output
+ tempFile, err := os.CreateTemp("", "*.cbz")
+ if err != nil {
+ t.Fatalf("Failed to create temporary file: %v", err)
+ }
+ defer os.Remove(tempFile.Name())
+
+ // Write the chapter to the .cbz file
+ err = WriteChapterToCBZ(tc.chapter, tempFile.Name())
+ if err != nil {
+ t.Fatalf("Failed to write chapter to CBZ: %v", err)
+ }
+
+ // Open the .cbz file as a zip archive
+ r, err := zip.OpenReader(tempFile.Name())
+ if err != nil {
+ t.Fatalf("Failed to open CBZ file: %v", err)
+ }
+ defer r.Close()
+
+ // Collect the names of the files in the archive
+ var filesInArchive []string
+ for _, f := range r.File {
+ filesInArchive = append(filesInArchive, f.Name)
+ }
+
+ // Check if all expected files are present
+ for _, expectedFile := range tc.expectedFiles {
+ found := false
+ for _, actualFile := range filesInArchive {
+ if actualFile == expectedFile {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Errorf("Expected file %s not found in archive", expectedFile)
+ }
+ }
+
+ // Check if there are no unexpected files
+ if len(filesInArchive) != len(tc.expectedFiles) {
+ t.Errorf("Expected %d files, but found %d", len(tc.expectedFiles), len(filesInArchive))
+ }
+ })
+ }
+}
diff --git a/cbz/cbz_loader.go b/cbz/cbz_loader.go
new file mode 100644
index 0000000..f08e445
--- /dev/null
+++ b/cbz/cbz_loader.go
@@ -0,0 +1,71 @@
+package cbz
+
+import (
+ "CBZOptimizer/packer"
+ "archive/zip"
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "path/filepath"
+ "strings"
+)
+
+func LoadChapter(filePath string) (*packer.Chapter, error) {
+ // Open the .cbz file
+ r, err := zip.OpenReader(filePath)
+ if err != nil {
+ return nil, fmt.Errorf("failed to open .cbz file: %w", err)
+ }
+ defer r.Close()
+
+ chapter := &packer.Chapter{
+ FilePath: filePath,
+ }
+
+ for _, f := range r.File {
+ if !f.FileInfo().IsDir() {
+ // Open the file inside the zip
+ rc, err := f.Open()
+ if err != nil {
+ return nil, fmt.Errorf("failed to open file inside .cbz: %w", err)
+ }
+
+ // Determine the file extension
+ ext := strings.ToLower(filepath.Ext(f.Name))
+
+ if ext == ".xml" && strings.ToLower(filepath.Base(f.Name)) == "comicinfo.xml" {
+ // Read the ComicInfo.xml file content
+ xmlContent, err := ioutil.ReadAll(rc)
+ if err != nil {
+ rc.Close()
+ return nil, fmt.Errorf("failed to read ComicInfo.xml content: %w", err)
+ }
+ chapter.ComicInfoXml = string(xmlContent)
+ } else {
+ // Read the file contents for page
+ buf := new(bytes.Buffer)
+ _, err = io.Copy(buf, rc)
+ if err != nil {
+ rc.Close()
+ return nil, fmt.Errorf("failed to read file contents: %w", err)
+ }
+
+ // Create a new Page object
+ page := &packer.Page{
+ Index: uint16(len(chapter.Pages)), // Simple index based on order
+ Extension: ext,
+ Size: uint64(buf.Len()),
+ Contents: buf,
+ IsSplitted: false,
+ }
+
+ // Add the page to the chapter
+ chapter.Pages = append(chapter.Pages, page)
+ }
+ rc.Close()
+ }
+ }
+
+ return chapter, nil
+}
diff --git a/cbz/cbz_loader_test.go b/cbz/cbz_loader_test.go
new file mode 100644
index 0000000..9fdb5b6
--- /dev/null
+++ b/cbz/cbz_loader_test.go
@@ -0,0 +1,30 @@
+package cbz
+
+import (
+ "strings"
+ "testing"
+)
+
+func TestLoadChapter(t *testing.T) {
+ // Define the path to the .cbz file
+ chapterFilePath := "../testdata/Chapter 1.cbz"
+
+ // Load the chapter
+ chapter, err := LoadChapter(chapterFilePath)
+ if err != nil {
+ t.Fatalf("Failed to load chapter: %v", err)
+ }
+
+ // Check the number of pages
+ expectedPages := 16
+ actualPages := len(chapter.Pages)
+ if actualPages != expectedPages {
+ t.Errorf("Expected %d pages, but got %d", expectedPages, actualPages)
+ }
+
+ // Check if ComicInfoXml contains the expected series name
+ expectedSeries := "Boundless Necromancer"
+ if !strings.Contains(chapter.ComicInfoXml, expectedSeries) {
+ t.Errorf("ComicInfoXml does not contain the expected series: %s", expectedSeries)
+ }
+}
diff --git a/cmd/convert_cbz_command.go b/cmd/convert_cbz_command.go
new file mode 100644
index 0000000..ae93bf5
--- /dev/null
+++ b/cmd/convert_cbz_command.go
@@ -0,0 +1,120 @@
+package cmd
+
+import (
+ "CBZOptimizer/cbz"
+ "CBZOptimizer/converter"
+ "CBZOptimizer/converter/constant"
+ "fmt"
+ "github.com/spf13/cobra"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+)
+
+func init() {
+ command := &cobra.Command{
+ Use: "convert",
+ Short: "Convert CBZ files using a specified converter",
+ RunE: ConvertCbzCommand,
+ Args: cobra.ExactArgs(1),
+ }
+ command.Flags().Uint8P("quality", "q", 85, "Quality for conversion (0-100)")
+ command.Flags().IntP("parallelism", "n", 2, "Number of chapters to convert in parallel")
+ command.Flags().BoolP("override", "o", false, "Override the original CBZ files")
+ AddCommand(command)
+}
+
+func ConvertCbzCommand(cmd *cobra.Command, args []string) error {
+ path := args[0]
+ if path == "" {
+ return fmt.Errorf("path is required")
+ }
+
+ quality, err := cmd.Flags().GetUint8("quality")
+ if err != nil {
+ return fmt.Errorf("invalid quality value")
+ }
+
+ override, err := cmd.Flags().GetBool("override")
+ if err != nil {
+ return fmt.Errorf("invalid quality value")
+ }
+
+ parallelism, err := cmd.Flags().GetInt("parallelism")
+ if err != nil || parallelism < 1 {
+ return fmt.Errorf("invalid parallelism value")
+ }
+
+ chapterConverter, err := converter.Get(constant.ImageFormatWebP)
+ if err != nil {
+ return fmt.Errorf("failed to get chapterConverter: %v", err)
+ }
+ // Channel to manage the files to process
+ fileChan := make(chan string)
+
+ // WaitGroup to wait for all goroutines to finish
+ var wg sync.WaitGroup
+
+ // Start worker goroutines
+ for i := 0; i < parallelism; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for path := range fileChan {
+ fmt.Printf("Processing file: %s\n", path)
+
+ // Load the chapter
+ chapter, err := cbz.LoadChapter(path)
+ if err != nil {
+ fmt.Printf("Failed to load chapter: %v\n", err)
+ continue
+ }
+
+ // Convert the chapter
+ convertedChapter, err := chapterConverter.ConvertChapter(chapter, quality, func(msg string) {
+ fmt.Println(msg)
+ })
+ if err != nil {
+ fmt.Printf("Failed to convert chapter: %v\n", err)
+ continue
+ }
+
+ // Write the converted chapter back to a CBZ file
+ outputPath := path
+ if !override {
+ outputPath = strings.TrimSuffix(path, ".cbz") + "_converted.cbz"
+ }
+ err = cbz.WriteChapterToCBZ(convertedChapter, outputPath)
+ if err != nil {
+ fmt.Printf("Failed to write converted chapter: %v\n", err)
+ continue
+ }
+
+ fmt.Printf("Converted file written to: %s\n", outputPath)
+ }
+ }()
+ }
+
+ // Walk the path and send files to the channel
+ err = filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".cbz") {
+ fileChan <- path
+ }
+
+ return nil
+ })
+
+ if err != nil {
+ return fmt.Errorf("error walking the path: %w", err)
+ }
+
+ close(fileChan) // Close the channel to signal workers to stop
+ wg.Wait() // Wait for all workers to finish
+
+ return nil
+}
diff --git a/cmd/rootcmd.go b/cmd/rootcmd.go
new file mode 100644
index 0000000..745ab82
--- /dev/null
+++ b/cmd/rootcmd.go
@@ -0,0 +1,23 @@
+package cmd
+
+import (
+ "fmt"
+ "github.com/spf13/cobra"
+ "os"
+)
+
+var rootCmd = &cobra.Command{
+ Use: "cbzconverter",
+ Short: "Convert CBZ files using a specified converter",
+}
+
+// Execute executes the root command.
+func Execute() {
+ if err := rootCmd.Execute(); err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+}
+func AddCommand(cmd *cobra.Command) {
+ rootCmd.AddCommand(cmd)
+}
diff --git a/converter/constant/format.go b/converter/constant/format.go
new file mode 100644
index 0000000..de58bcf
--- /dev/null
+++ b/converter/constant/format.go
@@ -0,0 +1,8 @@
+package constant
+
+type ConversionFormat string
+
+const (
+ ImageFormatWebP ConversionFormat = "webp"
+ ImageFormatUnknown ConversionFormat = ""
+)
diff --git a/converter/converter.go b/converter/converter.go
new file mode 100644
index 0000000..0b33760
--- /dev/null
+++ b/converter/converter.go
@@ -0,0 +1,37 @@
+package converter
+
+import (
+ "CBZOptimizer/converter/constant"
+ "CBZOptimizer/converter/webp"
+ "CBZOptimizer/packer"
+ "fmt"
+ "github.com/samber/lo"
+ "strings"
+)
+
+type Converter interface {
+ // Format of the converter
+ Format() (format constant.ConversionFormat)
+ ConvertChapter(chapter *packer.Chapter, quality uint8, progress func(string)) (*packer.Chapter, error)
+}
+
+var converters = map[constant.ConversionFormat]Converter{
+ constant.ImageFormatWebP: webp.New(),
+}
+
+// Available returns a list of available converters.
+func Available() []constant.ConversionFormat {
+ return lo.Keys(converters)
+}
+
+// Get returns a packer by name.
+// If the packer is not available, an error is returned.
+func Get(name constant.ConversionFormat) (Converter, error) {
+ if packer, ok := converters[name]; ok {
+ return packer, nil
+ }
+
+ return nil, fmt.Errorf("unkown converter \"%s\", available options are %s", name, strings.Join(lo.Map(Available(), func(item constant.ConversionFormat, index int) string {
+ return string(item)
+ }), ", "))
+}
diff --git a/converter/webp/webp_converter.go b/converter/webp/webp_converter.go
new file mode 100644
index 0000000..edab707
--- /dev/null
+++ b/converter/webp/webp_converter.go
@@ -0,0 +1,203 @@
+package webp
+
+import (
+ "CBZOptimizer/converter/constant"
+ packer2 "CBZOptimizer/packer"
+ "bytes"
+ "fmt"
+ "github.com/oliamb/cutter"
+ "golang.org/x/exp/slices"
+ _ "golang.org/x/image/webp"
+ "image"
+ _ "image/jpeg"
+ "image/png"
+ "io"
+ "log"
+ "runtime"
+ "sync"
+ "sync/atomic"
+)
+
+type Converter struct {
+ maxHeight int
+ cropHeight int
+}
+
+func (converter *Converter) Format() (format constant.ConversionFormat) {
+ return constant.ImageFormatWebP
+}
+
+func New() *Converter {
+ return &Converter{
+ //maxHeight: 16383 / 2,
+ maxHeight: 4000,
+ cropHeight: 2000,
+ }
+}
+
+func (converter *Converter) ConvertChapter(chapter *packer2.Chapter, quality uint8, progress func(string)) (*packer2.Chapter, error) {
+ err := PrepareEncoder()
+ if err != nil {
+ return nil, err
+ }
+
+ var wgConvertedPages sync.WaitGroup
+ maxGoroutines := runtime.NumCPU()
+
+ pagesChan := make(chan *packer2.PageContainer, maxGoroutines)
+
+ var wgPages sync.WaitGroup
+ wgPages.Add(len(chapter.Pages))
+
+ guard := make(chan struct{}, maxGoroutines)
+ pagesMutex := sync.Mutex{}
+ var pages []*packer2.Page
+ var totalPages = uint32(len(chapter.Pages))
+
+ go func() {
+ for page := range pagesChan {
+ guard <- struct{}{} // would block if guard channel is already filled
+ go func(pageToConvert *packer2.PageContainer) {
+ defer wgConvertedPages.Done()
+ convertedPage, err := converter.convertPage(pageToConvert, quality)
+ if err != nil {
+ buffer := new(bytes.Buffer)
+ err := png.Encode(buffer, convertedPage.Image)
+ if err != nil {
+ <-guard
+ return
+ }
+ convertedPage.Page.Contents = buffer
+ convertedPage.Page.Extension = ".png"
+ convertedPage.Page.Size = uint64(buffer.Len())
+ }
+ pagesMutex.Lock()
+ pages = append(pages, convertedPage.Page)
+ progress(fmt.Sprintf("Converted %d/%d pages to %s format", len(pages), totalPages, converter.Format()))
+ pagesMutex.Unlock()
+ <-guard
+ }(page)
+
+ }
+ }()
+
+ for _, page := range chapter.Pages {
+ go func(page *packer2.Page) {
+ defer wgPages.Done()
+
+ splitNeeded, img, format, err := converter.checkPageNeedsSplit(page)
+ if err != nil {
+ log.Fatalf("error checking if page %d d of chapter %s needs split: %v", page.Index, chapter.FilePath, err)
+ return
+ }
+
+ if !splitNeeded {
+ wgConvertedPages.Add(1)
+ pagesChan <- packer2.NewContainer(page, img, format)
+ return
+ }
+ images, err := converter.cropImage(img)
+ if err != nil {
+ log.Fatalf("error converting page %d of chapter %s to webp: %v", page.Index, chapter.FilePath, err)
+ return
+ }
+
+ atomic.AddUint32(&totalPages, uint32(len(images)-1))
+ for i, img := range images {
+ page := &packer2.Page{Index: page.Index, IsSplitted: true, SplitPartIndex: uint16(i)}
+ wgConvertedPages.Add(1)
+ pagesChan <- packer2.NewContainer(page, img, "N/A")
+ }
+ }(page)
+
+ }
+
+ wgPages.Wait()
+ wgConvertedPages.Wait()
+ close(pagesChan)
+
+ slices.SortFunc(pages, func(a, b *packer2.Page) int {
+ if a.Index == b.Index {
+ return int(b.SplitPartIndex - a.SplitPartIndex)
+ }
+ return int(b.Index - a.Index)
+ })
+ chapter.Pages = pages
+
+ runtime.GC()
+
+ return chapter, nil
+}
+
+func (converter *Converter) cropImage(img image.Image) ([]image.Image, error) {
+ bounds := img.Bounds()
+ height := bounds.Dy()
+
+ numParts := height / converter.cropHeight
+ if height%converter.cropHeight != 0 {
+ numParts++
+ }
+
+ parts := make([]image.Image, numParts)
+
+ for i := 0; i < numParts; i++ {
+ partHeight := converter.cropHeight
+ if i == numParts-1 {
+ partHeight = height - i*converter.cropHeight
+ }
+
+ part, err := cutter.Crop(img, cutter.Config{
+ Width: bounds.Dx(),
+ Height: partHeight,
+ Anchor: image.Point{Y: i * converter.cropHeight},
+ Mode: cutter.TopLeft,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("error cropping part %d: %v", i+1, err)
+ }
+
+ parts[i] = part
+ }
+
+ return parts, nil
+}
+
+func (converter *Converter) checkPageNeedsSplit(page *packer2.Page) (bool, image.Image, string, error) {
+ reader := io.Reader(bytes.NewBuffer(page.Contents.Bytes()))
+ img, format, err := image.Decode(reader)
+ if err != nil {
+ return false, nil, format, err
+ }
+
+ bounds := img.Bounds()
+ height := bounds.Dy()
+
+ return height >= converter.maxHeight, img, format, nil
+}
+
+func (converter *Converter) convertPage(container *packer2.PageContainer, quality uint8) (*packer2.PageContainer, error) {
+ if container.Format == "webp" {
+ return container, nil
+ }
+ converted, err := converter.convert(container.Image, uint(quality))
+ if err != nil {
+ return nil, err
+ }
+ container.Page.Contents = converted
+ container.Page.Extension = ".webp"
+ container.Page.Size = uint64(converted.Len())
+ return container, nil
+}
+
+// convert converts an image to the ImageFormatWebP format. It decodes the image from the input buffer,
+// encodes it as a ImageFormatWebP file using the webp.Encode() function, and returns the resulting ImageFormatWebP
+// file as a bytes.Buffer.
+func (converter *Converter) convert(image image.Image, quality uint) (*bytes.Buffer, error) {
+ var buf bytes.Buffer
+ err := Encode(&buf, image, quality)
+ if err != nil {
+ return nil, err
+ }
+
+ return &buf, nil
+}
diff --git a/converter/webp/webp_provider.go b/converter/webp/webp_provider.go
new file mode 100644
index 0000000..1432550
--- /dev/null
+++ b/converter/webp/webp_provider.go
@@ -0,0 +1,83 @@
+package webp
+
+import (
+ "github.com/nickalie/go-binwrapper"
+ "github.com/nickalie/go-webpbin"
+ "image"
+ "io"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+const libwebpVersion = "1.4.0"
+
+// NewCWebP creates new CWebP instance.
+func newCWebP(folder string) *webpbin.CWebP {
+ bin := &webpbin.CWebP{
+ BinWrapper: createBinWrapper(folder),
+ }
+ bin.ExecPath("cwebp")
+
+ return bin
+}
+
+func PrepareEncoder() error {
+ container := newCWebP(DefaultWebPDir)
+ return container.BinWrapper.Run()
+}
+
+// DefaultWebPDir for downloaded browser. For unix is "$HOME/.cache/webp/bin",
+// for Windows it's "%APPDATA%\webp\bin"
+var DefaultWebPDir = filepath.Join(map[string]string{
+ "windows": filepath.Join(os.Getenv("APPDATA")),
+ "darwin": filepath.Join(os.Getenv("HOME"), ".cache"),
+ "linux": filepath.Join(os.Getenv("HOME"), ".cache"),
+}[runtime.GOOS], "webp", libwebpVersion, "bin")
+
+func Encode(w io.Writer, m image.Image, quality uint) error {
+ return newCWebP(DefaultWebPDir).
+ Quality(quality).
+ InputImage(m).
+ Output(w).
+ Run()
+}
+
+func createBinWrapper(dest string) *binwrapper.BinWrapper {
+ base := "https://storage.googleapis.com/downloads.webmproject.org/releases/webp/"
+
+ b := binwrapper.NewBinWrapper().AutoExe()
+
+ b.Src(
+ binwrapper.NewSrc().
+ URL(base + "libwebp-" + libwebpVersion + "-mac-arm64.tar.gz").
+ Os("darwin").
+ Arch("arm64")).
+ Src(
+ binwrapper.NewSrc().
+ URL(base + "libwebp-" + libwebpVersion + "-mac-x86-64.tar.gz").
+ Os("darwin").
+ Arch("x64")).
+ Src(
+ binwrapper.NewSrc().
+ URL(base + "libwebp-" + libwebpVersion + "-linux-x86-32.tar.gz").
+ Os("linux").
+ Arch("x86")).
+ Src(
+ binwrapper.NewSrc().
+ URL(base + "libwebp-" + libwebpVersion + "-linux-x86-64.tar.gz").
+ Os("linux").
+ Arch("x64")).
+ Src(
+ binwrapper.NewSrc().
+ URL(base + "libwebp-" + libwebpVersion + "-windows-x64.zip").
+ Os("win32").
+ Arch("x64")).
+ Src(
+ binwrapper.NewSrc().
+ URL(base + "libwebp-" + libwebpVersion + "-windows-x86.zip").
+ Os("win32").
+ Arch("x86"))
+
+ return b.Strip(2).Dest(dest)
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..92c7f7a
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,30 @@
+module CBZOptimizer
+
+go 1.23
+
+require (
+ github.com/nickalie/go-binwrapper v0.0.0-20190114141239-525121d43c84
+ github.com/nickalie/go-webpbin v0.0.0-20220110095747-f10016bf2dc1
+ github.com/oliamb/cutter v0.2.2
+ github.com/samber/lo v1.47.0
+ github.com/spf13/cobra v1.8.1
+ golang.org/x/exp v0.0.0-20240823005443-9b4947da3948
+ golang.org/x/image v0.19.0
+)
+
+require (
+ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
+ github.com/dsnet/compress v0.0.1 // indirect
+ github.com/frankban/quicktest v1.14.6 // indirect
+ github.com/golang/snappy v0.0.4 // indirect
+ github.com/inconshreveable/mousetrap v1.1.0 // indirect
+ github.com/mholt/archiver v3.1.1+incompatible // indirect
+ github.com/nwaples/rardecode v1.1.3 // indirect
+ github.com/pierrec/lz4 v2.6.1+incompatible // indirect
+ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
+ github.com/spf13/pflag v1.0.5 // indirect
+ github.com/stretchr/testify v1.9.0 // indirect
+ github.com/ulikunitz/xz v0.5.12 // indirect
+ github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
+ golang.org/x/text v0.17.0 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..06a5497
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,72 @@
+github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
+github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
+github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
+github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
+github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
+github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
+github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
+github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
+github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/mholt/archiver v3.1.1+incompatible h1:1dCVxuqs0dJseYEhi5pl7MYPH9zDa1wBi7mF09cbNkU=
+github.com/mholt/archiver v3.1.1+incompatible/go.mod h1:Dh2dOXnSdiLxRiPoVfIr/fI1TwETms9B8CTWfeh7ROU=
+github.com/nickalie/go-binwrapper v0.0.0-20190114141239-525121d43c84 h1:/6MoQlTdk1eAi0J9O89ypO8umkp+H7mpnSF2ggSL62Q=
+github.com/nickalie/go-binwrapper v0.0.0-20190114141239-525121d43c84/go.mod h1:Eeech2fhQ/E4bS8cdc3+SGABQ+weQYGyWBvZ/mNr5uY=
+github.com/nickalie/go-webpbin v0.0.0-20220110095747-f10016bf2dc1 h1:9awJsNP+gYOGCr3pQu9i217bCNsVwoQCmD3h7CYwxOw=
+github.com/nickalie/go-webpbin v0.0.0-20220110095747-f10016bf2dc1/go.mod h1:m5oz0fmp+uyRBxxFkvciIpe1wd2JZ3pDVJ3x/D8/EGw=
+github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
+github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
+github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
+github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
+github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
+github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM=
+github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
+github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc=
+github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU=
+github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
+github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
+github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
+github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
+github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
+github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
+github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
+golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA=
+golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ=
+golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
+golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ=
+golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
+golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/main.go b/main.go
new file mode 100644
index 0000000..6c66e5c
--- /dev/null
+++ b/main.go
@@ -0,0 +1,9 @@
+package main
+
+import (
+ "CBZOptimizer/cmd"
+)
+
+func main() {
+ cmd.Execute()
+}
diff --git a/packer/chapter.go b/packer/chapter.go
new file mode 100644
index 0000000..df3bbb2
--- /dev/null
+++ b/packer/chapter.go
@@ -0,0 +1,10 @@
+package packer
+
+type Chapter struct {
+ // FilePath is the path to the chapter's directory.
+ FilePath string
+ // Pages is a slice of pointers to Page objects.
+ Pages []*Page
+ // ComicInfo is a string containing information about the chapter.
+ ComicInfoXml string
+}
diff --git a/packer/page.go b/packer/page.go
new file mode 100644
index 0000000..d99d0b9
--- /dev/null
+++ b/packer/page.go
@@ -0,0 +1,18 @@
+package packer
+
+import "bytes"
+
+type Page struct {
+ // Index of the page in the chapter.
+ Index uint16 `json:"index" jsonschema:"description=Index of the page in the chapter."`
+ // Extension of the page image.
+ Extension string `json:"extension" jsonschema:"description=Extension of the page image."`
+ // Size of the page in bytes
+ Size uint64 `json:"-"`
+ // Contents of the page
+ Contents *bytes.Buffer `json:"-"`
+ // IsSplitted tell us if the page was cropped to multiple pieces
+ IsSplitted bool `json:"is_cropped" jsonschema:"description=Was this page cropped."`
+ // SplitPartIndex represent the index of the crop if the page was cropped
+ SplitPartIndex uint16 `json:"crop_part_index" jsonschema:"description=Index of the crop if the image was cropped."`
+}
diff --git a/packer/page_container.go b/packer/page_container.go
new file mode 100644
index 0000000..58cbe8b
--- /dev/null
+++ b/packer/page_container.go
@@ -0,0 +1,17 @@
+package packer
+
+import "image"
+
+// PageContainer is a struct that holds a manga page, its image, and the image format.
+type PageContainer struct {
+ // Page is a pointer to a manga page object.
+ Page *Page
+ // Image is the decoded image of the manga page.
+ Image image.Image
+ // Format is a string representing the format of the image (e.g., "png", "jpeg", "webp").
+ Format string
+}
+
+func NewContainer(Page *Page, img image.Image, format string) *PageContainer {
+ return &PageContainer{Page: Page, Image: img, Format: format}
+}
diff --git a/testdata/Chapter 1.cbz b/testdata/Chapter 1.cbz
new file mode 100644
index 0000000..f01b7b6
Binary files /dev/null and b/testdata/Chapter 1.cbz differ