Skip to content

Commit

Permalink
feat: quiet option
Browse files Browse the repository at this point in the history
  • Loading branch information
lapwat committed Jan 2, 2022
1 parent 5e735f9 commit 008e4eb
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 14 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ Flags:
-n, --name string book name (default: page title)
-o, --offset int skip first chapters, in recursive mode
--output string file name (default: book name)
-q, --quiet hide progress bar
-r, --recursive create one chapter per natigation item
-s, --selector string table of content CSS selector, in resursive mode
-t, --threads int download concurrency, in recursive mode (default -1)
Expand Down Expand Up @@ -111,15 +112,15 @@ go get -u github.com/lapwat/papeer

```sh
platform=linux # use platform=darwin for MacOS
release=0.3.2
release=0.3.3
curl -L https://github.com/lapwat/papeer/releases/download/v$release/papeer-v$release-$platform-amd64 > papeer
chmod +x papeer
sudo mv papeer /usr/local/bin
```

### On Windows

Download [latest release](https://github.com/lapwat/papeer/releases/download/v0.3.2/papeer-v0.3.2-windows-amd64.exe).
Download [latest release](https://github.com/lapwat/papeer/releases/download/3/papeer-v0.3.3-windows-amd64.exe).

## Install kindlegen to export websites to MOBI (optional)

Expand Down
28 changes: 18 additions & 10 deletions book/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ func NewScrapeConfigFake() *ScrapeConfig {
return config
}

func NewBookFromURL(url, selector, name, author string, recursive, include, imagesOnly bool, limit, offset, delay, threads int) book {
func NewBookFromURL(url, selector, name, author string, recursive, include, imagesOnly, quiet bool, limit, offset, delay, threads int) book {
config1 := NewScrapeConfig()
config1.imagesOnly = imagesOnly

Expand All @@ -92,7 +92,7 @@ func NewBookFromURL(url, selector, name, author string, recursive, include, imag
config2.threads = threads
config2.include = include
config2.imagesOnly = imagesOnly
chapters, home = tableOfContent(url, config2, config1)
chapters, home = tableOfContent(url, config2, config1, quiet)
} else {
chapters = []chapter{NewChapterFromURL(url, []*ScrapeConfig{config1}, 0, func(index int, name string) {})}
home = chapters[0]
Expand Down Expand Up @@ -240,7 +240,7 @@ func NewChapterFromURL(url string, configs []*ScrapeConfig, index int, updatePro
return chapter{string(body), name, article.Byline, content, subchapters, config}
}

func tableOfContent(url string, config *ScrapeConfig, subConfig *ScrapeConfig) ([]chapter, chapter) {
func tableOfContent(url string, config *ScrapeConfig, subConfig *ScrapeConfig, quiet bool) ([]chapter, chapter) {
base, err := urllib.Parse(url)
if err != nil {
log.Fatal(err)
Expand All @@ -252,9 +252,13 @@ func tableOfContent(url string, config *ScrapeConfig, subConfig *ScrapeConfig) (
}

chapters := make([]chapter, len(links))
// progress := NewProgress(links, "", 0)
delay := config.delay

var p progress
if quiet == false {
p = NewProgress(links, "", 0)
}

if delay >= 0 {
// synchronous mode

Expand All @@ -265,9 +269,11 @@ func tableOfContent(url string, config *ScrapeConfig, subConfig *ScrapeConfig) (
log.Fatal(err)
}

sc := NewChapterFromURL(u.String(), []*ScrapeConfig{subConfig}, 0, func(index int, name string) {})
chapters[index] = sc
// progress.Increment(index)
chapters[index] = NewChapterFromURL(u.String(), []*ScrapeConfig{subConfig}, 0, func(index int, name string) {})

if quiet == false {
p.Increment(index)
}

// short sleep for last chapter to let the progress bar update
if index == len(links)-1 {
Expand Down Expand Up @@ -301,9 +307,11 @@ func tableOfContent(url string, config *ScrapeConfig, subConfig *ScrapeConfig) (
log.Fatal(err)
}

sc := NewChapterFromURL(u.String(), []*ScrapeConfig{subConfig}, 0, func(index int, name string) {})
chapters[index] = sc
// progress.Increment(index)
chapters[index] = NewChapterFromURL(u.String(), []*ScrapeConfig{subConfig}, 0, func(index int, name string) {})

if quiet == false {
p.Increment(index)
}

<-semaphore
}(index, l)
Expand Down
2 changes: 1 addition & 1 deletion cmd/get.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ var getCmd = &cobra.Command{
},
Run: func(cmd *cobra.Command, args []string) {
url := args[0]
b := book.NewBookFromURL(url, selector, name, author, recursive, include, images, limit, offset, delay, threads)
b := book.NewBookFromURL(url, selector, name, author, recursive, include, images, quiet, limit, offset, delay, threads)

fakeConfig := book.NewScrapeConfigFake()
fakeChapter := book.NewChapter("", b.Name(), b.Author(), "", b.Chapters(), fakeConfig)
Expand Down
1 change: 1 addition & 0 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ func init() {
rootCmd.PersistentFlags().BoolVarP(&recursive, "recursive", "r", false, "create one chapter per natigation item")
rootCmd.PersistentFlags().BoolVarP(&include, "include", "i", false, "include URL as first chapter, in resursive mode")
rootCmd.PersistentFlags().BoolVarP(&images, "images", "", false, "retrieve images only")
rootCmd.PersistentFlags().BoolVarP(&quiet, "quiet", "q", false, "hide progress bar")
rootCmd.PersistentFlags().IntVarP(&limit, "limit", "l", -1, "limit number of chapters, in recursive mode")
rootCmd.PersistentFlags().IntVarP(&offset, "offset", "o", 0, "skip first chapters, in recursive mode")
rootCmd.PersistentFlags().IntVarP(&delay, "delay", "d", -1, "time to wait before downloading next chapter, in milliseconds")
Expand Down
2 changes: 1 addition & 1 deletion cmd/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,6 @@ var versionCmd = &cobra.Command{
Use: "version",
Short: "Print the version number of papeer",
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("papeer v0.3.2")
fmt.Println("papeer v0.3.3")
},
}

0 comments on commit 008e4eb

Please sign in to comment.