Updating scraper
This commit is contained in:
parent
32862a2797
commit
b499742cfa
31
tabelog.go
31
tabelog.go
@ -53,11 +53,14 @@ type tabelogReview struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func dumpReviews(filename string, in chan tabelogReview, out chan error) {
|
func dumpReviews(filename string, in chan tabelogReview, out chan error) {
|
||||||
|
count := 1
|
||||||
|
|
||||||
var reviews []tabelogReview
|
var reviews []tabelogReview
|
||||||
for {
|
for {
|
||||||
if review, ok := <-in; ok {
|
if review, ok := <-in; ok {
|
||||||
log.Println(review.Name)
|
log.Printf("%d\t%s", count, review.Name)
|
||||||
reviews = append(reviews, review)
|
reviews = append(reviews, review)
|
||||||
|
count++
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -77,7 +80,6 @@ func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) {
|
|||||||
|
|
||||||
doc, err := goquery.NewDocument(url)
|
doc, err := goquery.NewDocument(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Print(err)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,21 +113,22 @@ func scrapeReview(url string, out chan tabelogReview, wg *sync.WaitGroup) {
|
|||||||
out <- review
|
out <- review
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeIndex(url string, out chan tabelogReview, wg *sync.WaitGroup) {
|
func scrapeIndex(url string, out chan tabelogReview) error {
|
||||||
defer wg.Done()
|
|
||||||
|
|
||||||
doc, err := goquery.NewDocument(url)
|
doc, err := goquery.NewDocument(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Print(err)
|
return err
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
doc.Find("div.list-rst__header > p > a").Each(func(index int, sel *goquery.Selection) {
|
doc.Find("div.list-rst__header > p > a").Each(func(index int, sel *goquery.Selection) {
|
||||||
if href, ok := sel.Attr("href"); ok {
|
if href, ok := sel.Attr("href"); ok {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go scrapeReview(href, out, wg)
|
go scrapeReview(href, out, &wg)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeTabelog(filename, url string) error {
|
func scrapeTabelog(filename, url string) error {
|
||||||
@ -136,18 +139,16 @@ func scrapeTabelog(filename, url string) error {
|
|||||||
t := template.New("tabelog")
|
t := template.New("tabelog")
|
||||||
t.Parse(url)
|
t.Parse(url)
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
for i := 1; i <= 2; i++ {
|
||||||
for i := 1; i <= 1; i++ {
|
|
||||||
var url bytes.Buffer
|
var url bytes.Buffer
|
||||||
if err := t.Execute(&url, tabelogParams{i}); err != nil {
|
if err := t.Execute(&url, tabelogParams{i}); err != nil {
|
||||||
log.Fatal(err)
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
wg.Add(1)
|
if err := scrapeIndex(string(url.Bytes()), out); err != nil {
|
||||||
go scrapeIndex(string(url.Bytes()), out, &wg)
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
wg.Wait()
|
|
||||||
|
|
||||||
close(out)
|
close(out)
|
||||||
return <-in
|
return <-in
|
||||||
|
Loading…
Reference in New Issue
Block a user