Making code more go-like
This commit is contained in:
parent
260b919733
commit
347ae73fe5
11860
data/tabelog.json
11860
data/tabelog.json
File diff suppressed because it is too large
Load Diff
@ -20,7 +20,7 @@
|
||||
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
package main
|
||||
package geo
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
@ -31,22 +31,22 @@ import (
|
||||
"github.com/kellydunn/golang-geo"
|
||||
)
|
||||
|
||||
type geoCoord struct {
|
||||
type Coord struct {
|
||||
Latitude float64
|
||||
Longitude float64
|
||||
}
|
||||
|
||||
type geoCache struct {
|
||||
type Cache struct {
|
||||
filename string
|
||||
data map[string]geoCoord
|
||||
data map[string]Coord
|
||||
ticker *time.Ticker
|
||||
coder geo.GoogleGeocoder
|
||||
}
|
||||
|
||||
func newGeoCache(filename string) (*geoCache, error) {
|
||||
cache := &geoCache{
|
||||
func NewCache(filename string) (*Cache, error) {
|
||||
cache := &Cache{
|
||||
filename: filename,
|
||||
data: make(map[string]geoCoord),
|
||||
data: make(map[string]Coord),
|
||||
ticker: time.NewTicker(time.Millisecond * 200),
|
||||
}
|
||||
|
||||
@ -57,7 +57,7 @@ func newGeoCache(filename string) (*geoCache, error) {
|
||||
return cache, nil
|
||||
}
|
||||
|
||||
func (c *geoCache) load() error {
|
||||
func (c *Cache) load() error {
|
||||
file, err := os.Open(c.filename)
|
||||
if os.IsNotExist(err) {
|
||||
return nil
|
||||
@ -70,7 +70,7 @@ func (c *geoCache) load() error {
|
||||
return json.NewDecoder(file).Decode(&c.data)
|
||||
}
|
||||
|
||||
func (c *geoCache) save() error {
|
||||
func (c *Cache) Save() error {
|
||||
js, err := json.MarshalIndent(c.data, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
@ -79,7 +79,7 @@ func (c *geoCache) save() error {
|
||||
return ioutil.WriteFile(c.filename, js, 0644)
|
||||
}
|
||||
|
||||
func (c *geoCache) decode(address string) (geoCoord, error) {
|
||||
func (c *Cache) Decode(address string) (Coord, error) {
|
||||
if coord, ok := c.data[address]; ok {
|
||||
return coord, nil
|
||||
}
|
||||
@ -88,10 +88,10 @@ func (c *geoCache) decode(address string) (geoCoord, error) {
|
||||
|
||||
point, err := c.coder.Geocode(address)
|
||||
if err != nil {
|
||||
return geoCoord{}, err
|
||||
return Coord{}, err
|
||||
}
|
||||
|
||||
coord := geoCoord{point.Lat(), point.Lng()}
|
||||
coord := Coord{point.Lat(), point.Lng()}
|
||||
c.data[address] = coord
|
||||
return coord, nil
|
||||
}
|
22
tabelog.go
22
tabelog.go
@ -31,6 +31,8 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/FooSoft/scrape/geo"
|
||||
"github.com/FooSoft/scrape/web"
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
@ -89,10 +91,10 @@ func dumpReviews(filename string, in chan tabelogReview) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeReviews(in chan tabelogReview, out chan tabelogReview, gc *geoCache) {
|
||||
func decodeReviews(in chan tabelogReview, out chan tabelogReview, gc *geo.Cache) {
|
||||
for {
|
||||
if review, ok := <-in; ok {
|
||||
coord, err := gc.decode(review.Address)
|
||||
coord, err := gc.Decode(review.Address)
|
||||
if err == nil {
|
||||
review.Latitude = coord.Latitude
|
||||
review.Longitude = coord.Longitude
|
||||
@ -107,10 +109,10 @@ func decodeReviews(in chan tabelogReview, out chan tabelogReview, gc *geoCache)
|
||||
}
|
||||
}
|
||||
|
||||
func scrapeReview(url string, out chan tabelogReview, wc *webCache, wg *sync.WaitGroup) {
|
||||
func scrapeReview(url string, out chan tabelogReview, wc *web.Cache, wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
doc, err := wc.load(url)
|
||||
doc, err := wc.Load(url)
|
||||
if err != nil {
|
||||
log.Printf("failed to scrape review at %s (%v)", url, err)
|
||||
return
|
||||
@ -146,8 +148,8 @@ func scrapeReview(url string, out chan tabelogReview, wc *webCache, wg *sync.Wai
|
||||
out <- review
|
||||
}
|
||||
|
||||
func scrapeIndex(url string, out chan tabelogReview, wc *webCache, wg *sync.WaitGroup) {
|
||||
doc, err := wc.load(url)
|
||||
func scrapeIndex(url string, out chan tabelogReview, wc *web.Cache, wg *sync.WaitGroup) {
|
||||
doc, err := wc.Load(url)
|
||||
if err != nil {
|
||||
log.Printf("failed to scrape index at %s (%v)", url, err)
|
||||
return
|
||||
@ -176,7 +178,7 @@ func scrapeIndex(url string, out chan tabelogReview, wc *webCache, wg *sync.Wait
|
||||
}
|
||||
}
|
||||
|
||||
func scrapeReviews(url string, out chan tabelogReview, wc *webCache) error {
|
||||
func scrapeReviews(url string, out chan tabelogReview, wc *web.Cache) error {
|
||||
var wg sync.WaitGroup
|
||||
scrapeIndex(url, out, wc, &wg)
|
||||
wg.Wait()
|
||||
@ -186,12 +188,12 @@ func scrapeReviews(url string, out chan tabelogReview, wc *webCache) error {
|
||||
}
|
||||
|
||||
func scrapeTabelog(url, resultFile, webCacheDir, geoCacheFile string) error {
|
||||
wc, err := newWebCache(webCacheDir)
|
||||
wc, err := web.NewCache(webCacheDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gc, err := newGeoCache(geoCacheFile)
|
||||
gc, err := geo.NewCache(geoCacheFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -203,5 +205,5 @@ func scrapeTabelog(url, resultFile, webCacheDir, geoCacheFile string) error {
|
||||
scrapeReviews(url, scrapeChan, wc)
|
||||
dumpReviews(resultFile, decodeChan)
|
||||
|
||||
return gc.save()
|
||||
return gc.Save()
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
package main
|
||||
package web
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@ -35,17 +35,17 @@ import (
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
type webCache struct {
|
||||
type Cache struct {
|
||||
directory string
|
||||
ticker *time.Ticker
|
||||
}
|
||||
|
||||
func newWebCache(directory string) (*webCache, error) {
|
||||
func NewCache(directory string) (*Cache, error) {
|
||||
if err := os.MkdirAll(directory, 0755); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cache := &webCache{
|
||||
cache := &Cache{
|
||||
directory: directory,
|
||||
ticker: time.NewTicker(time.Millisecond * 100),
|
||||
}
|
||||
@ -53,13 +53,13 @@ func newWebCache(directory string) (*webCache, error) {
|
||||
return cache, nil
|
||||
}
|
||||
|
||||
func (c *webCache) urlToLocal(url string) string {
|
||||
func (c *Cache) urlToLocal(url string) string {
|
||||
hash := md5.New()
|
||||
hash.Write([]byte(url))
|
||||
return path.Join(c.directory, fmt.Sprintf("%x.html", hash.Sum(nil)))
|
||||
}
|
||||
|
||||
func (c *webCache) load(url string) (*goquery.Document, error) {
|
||||
func (c *Cache) Load(url string) (*goquery.Document, error) {
|
||||
localPath := c.urlToLocal(url)
|
||||
|
||||
if file, err := os.Open(localPath); err == nil {
|
Loading…
Reference in New Issue
Block a user