Skip to content

Commit

Permalink
Better error handling
Browse files Browse the repository at this point in the history
- Removed all panic code
- It should continue downloading now in most cases where before it would
  panic while downloading

closes #3
  • Loading branch information
elboletaire committed Jan 14, 2023
1 parent 5974b99 commit 1fd7088
Show file tree
Hide file tree
Showing 8 changed files with 178 additions and 139 deletions.
63 changes: 48 additions & 15 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,13 @@ Would download chapters 10 to 20 of Black Clover from
mangadex.org in Spanish`, "manga-downloader", color.YellowString("manga-downloader")),
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
s := grabber.NewSite(args[0])
s, errs := grabber.NewSite(args[0])
if len(errs) > 0 {
color.Red("Errors testing site (a site may be down):")
for _, err := range errs {
color.Red(err.Error())
}
}
if s == nil {
color.Yellow("Site not recognised")
os.Exit(1)
Expand All @@ -57,55 +63,70 @@ mangadex.org in Spanish`, "manga-downloader", color.YellowString("manga-download

// ranges parsing
rngs, err := ranges.Parse(args[1])
if err != nil {
panic(err)
}
cerr(err, "Error parsing ranges: %s")

// fetch series title
title := s.GetTitle()
title, err := s.FetchTitle()
cerr(err, "Error fetching title: %s")

// fetch all chapters
chapters := s.FetchChapters()
chapters, errs := s.FetchChapters()
if len(errs) > 0 {
color.Red("Errors fetching chapters:")
for _, err := range errs {
color.Red(err.Error())
}
os.Exit(1)
}

// sort and filter specified ranges
chapters = chapters.FilterRanges(rngs)

if len(chapters) == 0 {
color.Yellow("No chapters found for the specified ranges")
os.Exit(0)
warn("No chapters found for the specified ranges")
}

wg := sync.WaitGroup{}
guard := make(chan struct{}, s.GetMaxConcurrency().Chapters)
g := make(chan struct{}, s.GetMaxConcurrency().Chapters)

// loop chapters to retrieve pages
for _, chap := range chapters {
guard <- struct{}{}
g <- struct{}{}
wg.Add(1)
go func(chap grabber.Filterable) {
defer wg.Done()
chapter := s.FetchChapter(chap)
chapter, err := s.FetchChapter(chap)
if err != nil {
color.Red("- error fetching chapter %s: %s", chap.GetTitle(), err.Error())
<-g
return
}
fmt.Printf("fetched %s %s\n", color.CyanString(title), color.HiBlackString(chapter.GetTitle()))

files, err := downloader.FetchChapter(s, chapter)
if err != nil {
panic(err)
color.Red("- error downloading chapter %s: %s", chapter.GetTitle(), err.Error())
<-g
return
}

filename, err := packer.NewFilenameFromTemplate(title, chapter, s.GetFilenameTemplate())
if err != nil {
panic(err)
color.Red("- error creating filename for chapter %s: %s", chapter.GetTitle(), err.Error())
<-g
return
}

filename += ".cbz"

fmt.Printf("- %s %s\n", color.GreenString("saving file"), color.HiBlackString(filename))
if err = packer.ArchiveCBZ(filename, files); err != nil {
color.Red("- error saving file %s: %s", filename, err.Error())
} else {
fmt.Printf("- %s %s\n", color.GreenString("saved file"), color.HiBlackString(filename))
}

// release guard
<-guard
<-g
}(chap)
}
wg.Wait()
Expand Down Expand Up @@ -140,3 +161,15 @@ func init() {
rootCmd.Flags().StringP("language", "l", "", "only download the specified language")
rootCmd.Flags().StringP("filename-template", "t", packer.FilenameTemplateDefault, "template for the resulting filename")
}

func cerr(err error, prefix string) {
if err != nil {
fmt.Println(color.RedString(prefix + err.Error()))
os.Exit(1)
}
}

func warn(err string) {
fmt.Println(color.YellowString(err))
os.Exit(1)
}
2 changes: 1 addition & 1 deletion downloader/fetch.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ type File struct {
}

// FetchChapter downloads all the pages of a chapter
func FetchChapter(site grabber.Site, chapter grabber.Chapter) (files []*File, err error) {
func FetchChapter(site grabber.Site, chapter *grabber.Chapter) (files []*File, err error) {
wg := sync.WaitGroup{}

color.Blue("- downloading %s pages...", color.HiBlackString(chapter.GetTitle()))
Expand Down
85 changes: 40 additions & 45 deletions grabber/inmanga.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,79 +23,84 @@ type InmangaChapter struct {
}

// Test checks if the site is InManga
func (i *Inmanga) Test() bool {
func (i *Inmanga) Test() (bool, error) {
re := regexp.MustCompile(`inmanga\.com`)
return re.MatchString(i.URL)
return re.MatchString(i.URL), nil
}

// GetTitle fetches the manga title
func (i *Inmanga) FetchTitle() (string, error) {
if i.title != "" {
return i.title, nil
}

body, err := http.Get(http.RequestParams{
URL: i.URL,
})
if err != nil {
return "", err
}
defer body.Close()

doc, err := goquery.NewDocumentFromReader(body)
if err != nil {
return "", err
}

i.title = doc.Find("h1").Text()

return i.title, nil
}

// FetchChapters returns the chapters of the manga
func (i Inmanga) FetchChapters() Filterables {
func (i Inmanga) FetchChapters() (Filterables, []error) {
id := GetUUID(i.URL)

// retrieve chapters json list
body, err := http.GetText(http.RequestParams{
URL: "https://inmanga.com/chapter/getall?mangaIdentification=" + id,
})
if err != nil {
panic(err)
return nil, []error{err}
}

raw := struct {
Data string
}{}

if err = json.Unmarshal([]byte(body), &raw); err != nil {
panic(err)
return nil, []error{err}
}

feed := inmangaChapterFeed{}
err = json.Unmarshal([]byte(raw.Data), &feed)
if err != nil {
panic(err)
}

return newInmangaChaptersSlice(feed.Result)
}

// GetTitle fetches the manga title
func (i *Inmanga) GetTitle() string {
if i.title != "" {
return i.title
if err = json.Unmarshal([]byte(raw.Data), &feed); err != nil {
return nil, []error{err}
}

body, err := http.Get(http.RequestParams{
URL: i.URL,
})
if err != nil {
panic(err)
}
defer body.Close()

doc, err := goquery.NewDocumentFromReader(body)
if err != nil {
panic(err)
chapters := make(Filterables, 0, len(feed.Result))
for _, c := range feed.Result {
chapters = append(chapters, newInmangaChapter(c))
}

i.title = doc.Find("h1").Text()
return i.title
return chapters, nil
}

// FetchChapter fetches the chapter with its pages
func (i Inmanga) FetchChapter(chap Filterable) Chapter {
func (i Inmanga) FetchChapter(chap Filterable) (*Chapter, error) {
ichap := chap.(*InmangaChapter)
body, err := http.Get(http.RequestParams{
URL: "https://inmanga.com/chapter/chapterIndexControls?identification=" + ichap.Id,
})
if err != nil {
panic(err)
return nil, err
}
defer body.Close()
doc, err := goquery.NewDocumentFromReader(body)
if err != nil {
panic(err)
return nil, err
}

chapter := Chapter{
chapter := &Chapter{
Title: chap.GetTitle(),
Number: chap.GetNumber(),
PagesCount: int64(ichap.PagesCount),
Expand All @@ -112,7 +117,7 @@ func (i Inmanga) FetchChapter(chap Filterable) Chapter {
})
})

return chapter
return chapter, nil
}

// newInmangaChapter creates an InMangaChapter from an InMangaChapterFeedResult
Expand All @@ -127,16 +132,6 @@ func newInmangaChapter(c inmangaChapterFeedResult) *InmangaChapter {
}
}

// newInmangaChaptersSlice creates a slice of Filterables from a slice of InMangaChapterFeedResult
func newInmangaChaptersSlice(s []inmangaChapterFeedResult) Filterables {
chapters := make(Filterables, 0, len(s))
for _, c := range s {
chapters = append(chapters, newInmangaChapter(c))
}

return chapters
}

// inmangaChapterFeed is the JSON feed for the chapters list
type inmangaChapterFeed struct {
Result []inmangaChapterFeedResult
Expand Down
Loading

0 comments on commit 1fd7088

Please sign in to comment.