Add support for downloading multiple files

This commit is contained in:
Alex Yatskov 2016-01-09 10:35:25 +09:00
parent 1f1e46d8b0
commit 022d14bc96

115
scrawl.go
View File

@ -24,7 +24,6 @@ package main
import ( import (
"bytes" "bytes"
"errors"
"flag" "flag"
"fmt" "fmt"
"io" "io"
@ -35,39 +34,31 @@ import (
"path" "path"
"path/filepath" "path/filepath"
"strings" "strings"
"sync"
"github.com/PuerkitoBio/goquery" "github.com/PuerkitoBio/goquery"
) )
func scrape(url, css, attr string) (string, error) { func scrape(url, css, attr string) ([]string, error) {
doc, err := goquery.NewDocument(url) doc, err := goquery.NewDocument(url)
if err != nil { if err != nil {
return "", err return nil, err
} }
sel := doc.Find(css) var assets []string
if sel.Length() == 0 { doc.Find(css).Each(func(index int, sel *goquery.Selection) {
return "", fmt.Errorf("no selection for '%s'", css) asset := sel.Text()
} if len(attr) > 0 {
asset, _ = sel.Attr(attr)
sel = sel.First()
var res string
if len(attr) == 0 {
res = sel.Text()
} else {
var exists bool
if res, exists = sel.Attr(attr); !exists {
return "", fmt.Errorf("attribute '%s' not found", attr)
} }
}
res = strings.TrimSpace(res) asset = strings.TrimSpace(asset)
if len(res) == 0 { if len(asset) > 0 {
return "", errors.New("extracted empty string") assets = append(assets, asset)
} }
})
return res, nil return assets, nil
} }
func download(url string, w io.Writer) error { func download(url string, w io.Writer) error {
@ -102,13 +93,14 @@ func usage() {
func main() { func main() {
var ( var (
attr = flag.String("attr", "", "attribute to query") attr = flag.String("attr", "", "attribute to query")
dir = flag.String("dir", ".", "output directory")
verbose = flag.Bool("verbose", false, "verbose output") verbose = flag.Bool("verbose", false, "verbose output")
) )
flag.Usage = usage flag.Usage = usage
flag.Parse() flag.Parse()
if flag.NArg() < 2 { if flag.NArg() != 2 {
flag.Usage() flag.Usage()
os.Exit(2) os.Exit(2)
} }
@ -127,45 +119,50 @@ func main() {
log.Printf("scraping page '%s'", baseRaw) log.Printf("scraping page '%s'", baseRaw)
} }
resRaw, err := scrape(baseRaw, css, *attr) assetsRaw, err := scrape(baseRaw, css, *attr)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
if *verbose { var wg sync.WaitGroup
log.Printf("extracted string '%s'", resRaw) for _, assetRaw := range assetsRaw {
wg.Add(1)
go func(assetRaw string) {
defer wg.Done()
if *verbose {
log.Printf("parsing asset string '%s'", assetRaw)
}
asset, err := url.Parse(assetRaw)
if err != nil {
log.Fatal(err)
}
if !asset.IsAbs() {
asset = asset.ResolveReference(base)
}
if *verbose {
log.Printf("downloading file '%s'", asset.String())
}
var buff bytes.Buffer
if err := download(asset.String(), &buff); err != nil {
log.Fatal(err)
}
path := filepath.Join(*dir, filepath.Base(asset.Path))
if *verbose {
log.Printf("writing file '%s'", path)
}
if err := export(path, &buff); err != nil {
log.Fatal(err)
}
}(assetRaw)
} }
res, err := url.Parse(resRaw) wg.Wait()
if err != nil {
log.Fatal(err)
}
if !res.IsAbs() {
res = res.ResolveReference(base)
}
if *verbose {
log.Printf("downloading file '%s'", res.String())
}
var buff bytes.Buffer
if err := download(res.String(), &buff); err != nil {
log.Fatal(err)
}
var path string
if flag.NArg() > 2 {
path = flag.Arg(2)
} else {
path = filepath.Base(res.Path)
}
if *verbose {
log.Printf("writing file '%s'", path)
}
if err := export(path, &buff); err != nil {
log.Fatal(err)
}
} }