scrawl/scrawl.go

147 lines
2.4 KiB
Go
Raw Normal View History

2016-01-08 10:36:07 +00:00
package main
import (
"bytes"
"flag"
2016-01-08 11:03:40 +00:00
"fmt"
2016-01-08 10:36:07 +00:00
"io"
"log"
"net/http"
"net/url"
"os"
2016-01-08 13:19:36 +00:00
"path"
2016-01-08 10:36:07 +00:00
"path/filepath"
"strings"
"sync"
2016-01-08 10:36:07 +00:00
"github.com/PuerkitoBio/goquery"
)
func scrape(url, css, attr string) ([]string, error) {
2016-01-08 10:36:07 +00:00
doc, err := goquery.NewDocument(url)
if err != nil {
return nil, err
2016-01-08 10:36:07 +00:00
}
var assets []string
doc.Find(css).Each(func(index int, sel *goquery.Selection) {
asset := sel.Text()
if len(attr) > 0 {
asset, _ = sel.Attr(attr)
2016-01-08 11:03:40 +00:00
}
asset = strings.TrimSpace(asset)
if len(asset) > 0 {
assets = append(assets, asset)
}
})
2016-01-08 10:36:07 +00:00
return assets, nil
2016-01-08 10:36:07 +00:00
}
func download(url string, w io.Writer) error {
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
io.Copy(w, resp.Body)
return nil
}
func export(path string, r io.Reader) error {
out, err := os.Create(path)
if err != nil {
log.Fatal(err)
}
defer out.Close()
io.Copy(out, r)
return nil
}
2016-01-08 13:19:36 +00:00
func usage() {
2016-01-08 13:26:19 +00:00
fmt.Fprintf(os.Stderr, "Usage: %s [options] url selector [path]\n", path.Base(os.Args[0]))
2016-02-14 21:15:07 +00:00
fmt.Fprintf(os.Stderr, "https://foosoft.net/projects/scrawl/\n\n")
2016-01-08 13:19:36 +00:00
fmt.Fprintf(os.Stderr, "Parameters:\n")
flag.PrintDefaults()
}
2016-01-08 10:36:07 +00:00
func main() {
2016-01-08 11:03:40 +00:00
var (
attr = flag.String("attr", "", "attribute to query")
dir = flag.String("dir", ".", "output directory")
2016-01-08 11:03:40 +00:00
verbose = flag.Bool("verbose", false, "verbose output")
)
2016-01-08 13:19:36 +00:00
flag.Usage = usage
2016-01-08 11:03:40 +00:00
flag.Parse()
if flag.NArg() != 2 {
2016-01-08 10:36:07 +00:00
flag.Usage()
os.Exit(2)
}
var (
2016-01-08 11:03:40 +00:00
baseRaw = flag.Arg(0)
css = flag.Arg(1)
2016-01-08 10:36:07 +00:00
)
2016-01-08 11:03:40 +00:00
base, err := url.Parse(baseRaw)
2016-01-08 10:36:07 +00:00
if err != nil {
log.Fatal(err)
}
2016-01-08 11:03:40 +00:00
if *verbose {
log.Printf("scraping page '%s'", baseRaw)
}
assetsRaw, err := scrape(baseRaw, css, *attr)
2016-01-08 10:36:07 +00:00
if err != nil {
log.Fatal(err)
}
var wg sync.WaitGroup
for _, assetRaw := range assetsRaw {
wg.Add(1)
go func(assetRaw string) {
defer wg.Done()
2016-01-08 10:36:07 +00:00
if *verbose {
log.Printf("parsing asset string '%s'", assetRaw)
}
2016-01-08 10:36:07 +00:00
asset, err := url.Parse(assetRaw)
if err != nil {
log.Fatal(err)
}
2016-01-08 10:36:07 +00:00
if !asset.IsAbs() {
asset = asset.ResolveReference(base)
}
2016-01-08 11:03:40 +00:00
if *verbose {
log.Printf("downloading file '%s'", asset.String())
}
2016-01-08 10:36:07 +00:00
var buff bytes.Buffer
if err := download(asset.String(), &buff); err != nil {
log.Fatal(err)
}
2016-01-08 10:36:07 +00:00
path := filepath.Join(*dir, filepath.Base(asset.Path))
2016-01-08 11:03:40 +00:00
if *verbose {
log.Printf("writing file '%s'", path)
}
if err := export(path, &buff); err != nil {
log.Fatal(err)
}
}(assetRaw)
2016-01-08 10:36:07 +00:00
}
wg.Wait()
2016-01-08 10:36:07 +00:00
}