-
-
Notifications
You must be signed in to change notification settings - Fork 26
/
cli.go
37 lines (34 loc) · 1.57 KB
/
cli.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
package main
import (
"github.com/lawzava/emailscraper"
)
//nolint:gochecknoglobals // allow global var here
var (
scraperParameters emailscraper.Config
url string
output string
outputWithURL bool
)
//nolint:gochecknoinits // required by github.com/spf13/cobra
func init() {
rootCmd.PersistentFlags().StringVarP(&url,
"website", "w", "https://lawzava.com", "Website to scrape")
rootCmd.PersistentFlags().BoolVar(&scraperParameters.Recursively,
"recursively", true, "Scrape website recursively")
rootCmd.PersistentFlags().IntVarP(&scraperParameters.MaxDepth,
"depth", "d", 3, "Max depth to follow when scraping recursively") //nolint:gomnd // allow default max depth
rootCmd.PersistentFlags().BoolVar(&scraperParameters.Async,
"async", true, "Scrape website pages asynchronously")
rootCmd.PersistentFlags().BoolVar(&scraperParameters.Debug,
"debug", false, "Print debug logs")
rootCmd.PersistentFlags().BoolVar(&scraperParameters.FollowExternalLinks,
"follow-external", false, "Follow external 3rd party links within website")
rootCmd.PersistentFlags().BoolVar(&scraperParameters.EnableJavascript,
"js", false, "Enables EnableJavascript execution await")
rootCmd.PersistentFlags().IntVar(&scraperParameters.Timeout,
"timeout", 0, "If > 0, specify a timeout (seconds) for js execution await")
rootCmd.PersistentFlags().StringVar(&output,
"output", outputPlain, "Output type to use (default 'plain', supported: 'csv', 'json')")
rootCmd.PersistentFlags().BoolVar(&outputWithURL,
"output-with-url", false, "Adds URL to output with each email")
}