2015-08-16 10:02:59 +00:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2015 Alex Yatskov <alex@foosoft.net>
|
|
|
|
* Author: Alex Yatskov <alex@foosoft.net>
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
|
|
* this software and associated documentation files (the "Software"), to deal in
|
|
|
|
* the Software without restriction, including without limitation the rights to
|
|
|
|
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
|
|
* the Software, and to permit persons to whom the Software is furnished to do so,
|
|
|
|
* subject to the following conditions:
|
|
|
|
*
|
|
|
|
* The above copyright notice and this permission notice shall be included in all
|
|
|
|
* copies or substantial portions of the Software.
|
|
|
|
*
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
|
|
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
|
|
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
|
|
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
|
|
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"log"
|
|
|
|
"net/url"
|
|
|
|
"sync"
|
|
|
|
|
|
|
|
"github.com/PuerkitoBio/goquery"
|
|
|
|
)
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
type restaurant struct {
|
2015-08-16 10:02:59 +00:00
|
|
|
name string
|
|
|
|
address string
|
|
|
|
url string
|
|
|
|
|
|
|
|
features map[string]float64
|
|
|
|
|
|
|
|
latitude float64
|
|
|
|
longitude float64
|
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
type scraper interface {
|
2015-08-16 10:02:59 +00:00
|
|
|
index(doc *goquery.Document) (string, []string)
|
2015-08-17 04:59:19 +00:00
|
|
|
review(doc *goquery.Document) (string, string, map[string]float64, error)
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|
|
|
|
|
2015-08-16 10:30:45 +00:00
|
|
|
func makeAbsUrl(ref, base string) (string, error) {
|
2015-08-16 10:02:59 +00:00
|
|
|
b, err := url.Parse(base)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
r, err := url.Parse(ref)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return b.ResolveReference(r).String(), nil
|
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
func decodeReviews(in chan restaurant, out chan restaurant, gc *geoCache) {
|
2015-08-16 10:02:59 +00:00
|
|
|
for {
|
2015-08-17 05:16:07 +00:00
|
|
|
if res, ok := <-in; ok {
|
|
|
|
pos, err := gc.decode(res.address)
|
2015-08-16 10:02:59 +00:00
|
|
|
if err == nil {
|
2015-08-17 05:16:07 +00:00
|
|
|
res.latitude = pos.Latitude
|
|
|
|
res.longitude = pos.Longitude
|
|
|
|
out <- res
|
2015-08-16 10:02:59 +00:00
|
|
|
} else {
|
2015-08-17 05:16:07 +00:00
|
|
|
log.Printf("failed to decode address for %s (%v)", res.url, err)
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
close(out)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
func scrapeReview(url string, out chan restaurant, wc *webCache, group *sync.WaitGroup, scr scraper) {
|
2015-08-16 10:02:59 +00:00
|
|
|
defer group.Done()
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
doc, err := wc.load(url)
|
2015-08-17 04:59:19 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("failed to load review at %s (%v)", url, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
name, address, features, err := scr.review(doc)
|
2015-08-16 10:02:59 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Printf("failed to scrape review at %s (%v)", url, err)
|
2015-08-17 04:59:19 +00:00
|
|
|
return
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|
2015-08-17 04:59:19 +00:00
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
out <- restaurant{
|
|
|
|
name: name,
|
|
|
|
address: address,
|
|
|
|
features: features,
|
|
|
|
url: url}
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
func scrapeIndex(indexUrl string, out chan restaurant, wc *webCache, scr scraper) {
|
|
|
|
doc, err := wc.load(indexUrl)
|
2015-08-16 10:02:59 +00:00
|
|
|
if err != nil {
|
2015-08-17 04:59:19 +00:00
|
|
|
log.Printf("failed to load index at %s (%v)", indexUrl, err)
|
2015-08-16 10:02:59 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
nextIndexUrl, reviewUrls := scr.index(doc)
|
2015-08-16 10:02:59 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
|
2015-08-16 10:30:45 +00:00
|
|
|
var group sync.WaitGroup
|
2015-08-16 10:02:59 +00:00
|
|
|
for _, reviewUrl := range reviewUrls {
|
|
|
|
absUrl, err := makeAbsUrl(reviewUrl, indexUrl)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
group.Add(1)
|
2015-08-17 05:16:07 +00:00
|
|
|
go scrapeReview(absUrl, out, wc, &group, scr)
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|
|
|
|
group.Wait()
|
|
|
|
|
|
|
|
if nextIndexUrl == "" {
|
|
|
|
close(out)
|
|
|
|
} else {
|
|
|
|
absUrl, err := makeAbsUrl(nextIndexUrl, indexUrl)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
scrapeIndex(absUrl, out, wc, scr)
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-17 07:04:06 +00:00
|
|
|
func scrape(url string, wc *webCache, gc *geoCache, scr scraper) []restaurant {
|
|
|
|
out := make(chan restaurant)
|
2015-08-17 05:16:07 +00:00
|
|
|
in := make(chan restaurant)
|
2015-08-17 07:04:06 +00:00
|
|
|
|
2015-08-17 05:16:07 +00:00
|
|
|
go scrapeIndex(url, in, wc, scr)
|
|
|
|
go decodeReviews(in, out, gc)
|
2015-08-17 07:04:06 +00:00
|
|
|
|
|
|
|
var results []restaurant
|
|
|
|
for {
|
|
|
|
if res, ok := <-out; ok {
|
|
|
|
results = append(results, res)
|
|
|
|
} else {
|
|
|
|
return results
|
|
|
|
}
|
|
|
|
}
|
2015-08-16 10:02:59 +00:00
|
|
|
}
|