Cache scraping
This commit is contained in:
parent
6b8e33b4d2
commit
27bf94505c
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
|||||||
search-scrape
|
search-scrape
|
||||||
|
web_cache
|
||||||
|
@ -22,13 +22,6 @@
|
|||||||
|
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import "log"
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
urlFormat := "http://tabelog.com/en/kanagawa/rstLst/{{.Page}}/"
|
scrapeTabelog("http://tabelog.com/en/kanagawa/rstLst/1/", "tabelog.json", "web_cache")
|
||||||
tabelogReviews := "tabelog.json"
|
|
||||||
|
|
||||||
if err := scrapeTabelog(tabelogReviews, urlFormat); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
89
tabelog.go
89
tabelog.go
@ -23,14 +23,13 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"text/template"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
"github.com/PuerkitoBio/goquery"
|
||||||
)
|
)
|
||||||
@ -50,15 +49,31 @@ type tabelogReview struct {
|
|||||||
Cost float64
|
Cost float64
|
||||||
Drinks float64
|
Drinks float64
|
||||||
Url string
|
Url string
|
||||||
|
Raw string
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpReviews(filename string, in chan tabelogReview, out chan error) {
|
func makeAbsUrl(base, ref string) string {
|
||||||
count := 1
|
b, err := url.Parse(base)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
r, err := url.Parse(ref)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.ResolveReference(r).String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func dumpReviews(filename string, rc chan tabelogReview, cond *sync.Cond) {
|
||||||
|
defer cond.Signal()
|
||||||
|
|
||||||
|
count := 1
|
||||||
var reviews []tabelogReview
|
var reviews []tabelogReview
|
||||||
for {
|
for {
|
||||||
if review, ok := <-in; ok {
|
if review, ok := <-rc; ok {
|
||||||
log.Printf("%d\t%s", count, review.Name)
|
log.Printf("%s (%d)", review.Name, count)
|
||||||
reviews = append(reviews, review)
|
reviews = append(reviews, review)
|
||||||
count++
|
count++
|
||||||
} else {
|
} else {
|
||||||
@ -68,19 +83,20 @@ func dumpReviews(filename string, in chan tabelogReview, out chan error) {
|
|||||||
|
|
||||||
js, err := json.MarshalIndent(reviews, "", " ")
|
js, err := json.MarshalIndent(reviews, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
out <- err
|
log.Fatal(err)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
out <- ioutil.WriteFile(filename, js, 0644)
|
if err := ioutil.WriteFile(filename, js, 0644); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) {
|
func scrapeReview(url string, rc chan tabelogReview, wg *sync.WaitGroup, wc *webCache) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
|
|
||||||
doc, err := goquery.NewDocument(url)
|
doc, err := wc.fetchUrl(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
addresses := doc.Find("p.rd-detail-info__rst-address")
|
addresses := doc.Find("p.rd-detail-info__rst-address")
|
||||||
@ -91,7 +107,7 @@ func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) {
|
|||||||
var review tabelogReview
|
var review tabelogReview
|
||||||
|
|
||||||
review.Url = url
|
review.Url = url
|
||||||
review.Name = doc.Find("div.rd-header__headline > h2 > a").Text()
|
review.Name = doc.Find("a.rd-header__rst-name-main").Text()
|
||||||
review.Address = strings.TrimSpace(addresses.First().Text())
|
review.Address = strings.TrimSpace(addresses.First().Text())
|
||||||
|
|
||||||
if review.Dishes, err = strconv.ParseFloat(doc.Find("#js-rating-detail > dd:nth-child(2)").Text(), 8); err != nil {
|
if review.Dishes, err = strconv.ParseFloat(doc.Find("#js-rating-detail > dd:nth-child(2)").Text(), 8); err != nil {
|
||||||
@ -110,46 +126,41 @@ func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
out <- review
|
rc <- review
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeIndex(url string, out chan tabelogReview) error {
|
func scrapeIndex(url string, out chan tabelogReview, wc *webCache) {
|
||||||
doc, err := goquery.NewDocument(url)
|
doc, err := wc.fetchUrl(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
doc.Find("div.list-rst__header > p > a").Each(func(index int, sel *goquery.Selection) {
|
doc.Find("div.list-rst__header > p > a").Each(func(index int, sel *goquery.Selection) {
|
||||||
if href, ok := sel.Attr("href"); ok {
|
if href, ok := sel.Attr("href"); ok {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go scrapeReview(href, out, &wg)
|
go scrapeReview(makeAbsUrl(url, href), out, &wg, wc)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func scrapeTabelog(filename, url string) error {
|
if href, ok := doc.Find("a.c-pagination__target--next").Attr("href"); ok {
|
||||||
out := make(chan tabelogReview)
|
scrapeIndex(makeAbsUrl(url, href), out, wc)
|
||||||
in := make(chan error)
|
|
||||||
go dumpReviews(filename, out, in)
|
|
||||||
|
|
||||||
t := template.New("tabelog")
|
|
||||||
t.Parse(url)
|
|
||||||
|
|
||||||
for i := 1; i <= 2; i++ {
|
|
||||||
var url bytes.Buffer
|
|
||||||
if err := t.Execute(&url, tabelogParams{i}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scrapeIndex(string(url.Bytes()), out); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
close(out)
|
func scrapeTabelog(url, jsonFile, cacheDir string) {
|
||||||
return <-in
|
wc, err := newWebCache(cacheDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var cond sync.Cond
|
||||||
|
rc := make(chan tabelogReview)
|
||||||
|
go dumpReviews(jsonFile, rc, &cond)
|
||||||
|
|
||||||
|
scrapeIndex(url, rc, wc)
|
||||||
|
|
||||||
|
close(rc)
|
||||||
|
cond.Wait()
|
||||||
}
|
}
|
||||||
|
79
webcache.go
Normal file
79
webcache.go
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2015 Alex Yatskov <alex@foosoft.net>
|
||||||
|
* Author: Alex Yatskov <alex@foosoft.net>
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
* this software and associated documentation files (the "Software"), to deal in
|
||||||
|
* the Software without restriction, including without limitation the rights to
|
||||||
|
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
* the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
* subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
type webCache struct {
|
||||||
|
baseDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newWebCache(baseDir string) (*webCache, error) {
|
||||||
|
if err := os.MkdirAll(baseDir, 0755); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &webCache{baseDir: baseDir}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *webCache) urlToLocal(url string) string {
|
||||||
|
hash := md5.New()
|
||||||
|
hash.Write([]byte(url))
|
||||||
|
return path.Join(c.baseDir, fmt.Sprintf("%x", hash.Sum(nil)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *webCache) fetchUrl(url string) (*goquery.Document, error) {
|
||||||
|
localPath := c.urlToLocal(url)
|
||||||
|
|
||||||
|
if file, err := os.Open(localPath); err == nil {
|
||||||
|
defer file.Close()
|
||||||
|
return goquery.NewDocumentFromReader(file)
|
||||||
|
} else {
|
||||||
|
res, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
var buff bytes.Buffer
|
||||||
|
if _, err := buff.ReadFrom(res.Body); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := ioutil.WriteFile(localPath, buff.Bytes(), 0644); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return goquery.NewDocumentFromReader(&buff)
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user