diff --git a/.gitignore b/.gitignore index 112ac9d..c508035 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ search-scrape +web_cache diff --git a/scrape.go b/scrape.go index 238d404..ef3d3f9 100644 --- a/scrape.go +++ b/scrape.go @@ -22,13 +22,6 @@ package main -import "log" - func main() { - urlFormat := "http://tabelog.com/en/kanagawa/rstLst/{{.Page}}/" - tabelogReviews := "tabelog.json" - - if err := scrapeTabelog(tabelogReviews, urlFormat); err != nil { - log.Fatal(err) - } + scrapeTabelog("http://tabelog.com/en/kanagawa/rstLst/1/", "tabelog.json", "web_cache") } diff --git a/tabelog.go b/tabelog.go index f034927..21b60f0 100644 --- a/tabelog.go +++ b/tabelog.go @@ -23,14 +23,13 @@ package main import ( - "bytes" "encoding/json" "io/ioutil" "log" + "net/url" "strconv" "strings" "sync" - "text/template" "github.com/PuerkitoBio/goquery" ) @@ -50,15 +49,31 @@ type tabelogReview struct { Cost float64 Drinks float64 Url string + Raw string } -func dumpReviews(filename string, in chan tabelogReview, out chan error) { - count := 1 +func makeAbsUrl(base, ref string) string { + b, err := url.Parse(base) + if err != nil { + log.Fatal(err) + } + r, err := url.Parse(ref) + if err != nil { + log.Fatal(err) + } + + return b.ResolveReference(r).String() +} + +func dumpReviews(filename string, rc chan tabelogReview, cond *sync.Cond) { + defer cond.Signal() + + count := 1 var reviews []tabelogReview for { - if review, ok := <-in; ok { - log.Printf("%d\t%s", count, review.Name) + if review, ok := <-rc; ok { + log.Printf("%s (%d)", review.Name, count) reviews = append(reviews, review) count++ } else { @@ -68,19 +83,20 @@ func dumpReviews(filename string, in chan tabelogReview, out chan error) { js, err := json.MarshalIndent(reviews, "", " ") if err != nil { - out <- err - return + log.Fatal(err) } - out <- ioutil.WriteFile(filename, js, 0644) + if err := ioutil.WriteFile(filename, js, 0644); err != nil { + log.Fatal(err) + } } -func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) { +func scrapeReview(url string, rc chan tabelogReview, wg *sync.WaitGroup, wc *webCache) { defer wg.Done() - doc, err := goquery.NewDocument(url) + doc, err := wc.fetchUrl(url) if err != nil { - return + log.Fatal(err) } addresses := doc.Find("p.rd-detail-info__rst-address") @@ -91,7 +107,7 @@ func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) { var review tabelogReview review.Url = url - review.Name = doc.Find("div.rd-header__headline > h2 > a").Text() + review.Name = doc.Find("a.rd-header__rst-name-main").Text() review.Address = strings.TrimSpace(addresses.First().Text()) if review.Dishes, err = strconv.ParseFloat(doc.Find("#js-rating-detail > dd:nth-child(2)").Text(), 8); err != nil { @@ -110,46 +126,41 @@ func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) { return } - out <- review + rc <- review } -func scrapeIndex(url string, out chan tabelogReview) error { - doc, err := goquery.NewDocument(url) +func scrapeIndex(url string, out chan tabelogReview, wc *webCache) { + doc, err := wc.fetchUrl(url) if err != nil { - return err + log.Fatal(err) } var wg sync.WaitGroup doc.Find("div.list-rst__header > p > a").Each(func(index int, sel *goquery.Selection) { if href, ok := sel.Attr("href"); ok { wg.Add(1) - go scrapeReview(href, out, &wg) + go scrapeReview(makeAbsUrl(url, href), out, &wg, wc) } }) - wg.Wait() - return nil + + if href, ok := doc.Find("a.c-pagination__target--next").Attr("href"); ok { + scrapeIndex(makeAbsUrl(url, href), out, wc) + } } -func scrapeTabelog(filename, url string) error { - out := make(chan tabelogReview) - in := make(chan error) - go dumpReviews(filename, out, in) - - t := template.New("tabelog") - t.Parse(url) - - for i := 1; i <= 2; i++ { - var url bytes.Buffer - if err := t.Execute(&url, tabelogParams{i}); err != nil { - return err - } - - if err := scrapeIndex(string(url.Bytes()), out); err != nil { - return err - } +func scrapeTabelog(url, jsonFile, cacheDir string) { + wc, err := newWebCache(cacheDir) + if err != nil { + log.Fatal(err) } - close(out) - return <-in + var cond sync.Cond + rc := make(chan tabelogReview) + go dumpReviews(jsonFile, rc, &cond) + + scrapeIndex(url, rc, wc) + + close(rc) + cond.Wait() } diff --git a/webcache.go b/webcache.go new file mode 100644 index 0000000..d6afedd --- /dev/null +++ b/webcache.go @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2015 Alex Yatskov + * Author: Alex Yatskov + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + * the Software, and to permit persons to whom the Software is furnished to do so, + * subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package main + +import ( + "bytes" + "crypto/md5" + "fmt" + "io/ioutil" + "net/http" + "os" + "path" + + "github.com/PuerkitoBio/goquery" +) + +type webCache struct { + baseDir string +} + +func newWebCache(baseDir string) (*webCache, error) { + if err := os.MkdirAll(baseDir, 0755); err != nil { + return nil, err + } + + return &webCache{baseDir: baseDir}, nil +} + +func (c *webCache) urlToLocal(url string) string { + hash := md5.New() + hash.Write([]byte(url)) + return path.Join(c.baseDir, fmt.Sprintf("%x", hash.Sum(nil))) +} + +func (c *webCache) fetchUrl(url string) (*goquery.Document, error) { + localPath := c.urlToLocal(url) + + if file, err := os.Open(localPath); err == nil { + defer file.Close() + return goquery.NewDocumentFromReader(file) + } else { + res, err := http.Get(url) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var buff bytes.Buffer + if _, err := buff.ReadFrom(res.Body); err != nil { + return nil, err + } + + if err := ioutil.WriteFile(localPath, buff.Bytes(), 0644); err != nil { + return nil, err + } + + return goquery.NewDocumentFromReader(&buff) + } +}