add caching support

This commit is contained in:
Alex Yatskov 2018-12-08 11:18:51 -08:00
parent 8400387d2a
commit 989bb18482
9 changed files with 476 additions and 450 deletions

98
cache.go Normal file
View File

@ -0,0 +1,98 @@
package goldsmith
import (
"encoding/binary"
"fmt"
"hash/crc32"
"os"
"path/filepath"
"sort"
)
type fileCache struct {
baseDir string
}
func (c *fileCache) retrieveFile(context *Context, outputPath string, inputFiles []*File) (*File, error) {
cachePath, err := c.buildCachePath(context, outputPath, inputFiles)
if err != nil {
return nil, err
}
outputFile, err := context.CreateFileFromAsset(outputPath, cachePath)
if err != nil {
if os.IsNotExist(err) {
return nil, nil
}
return nil, err
}
return outputFile, nil
}
func (c *fileCache) storeFile(context *Context, outputFile *File, inputFiles []*File) error {
cachePath, err := c.buildCachePath(context, outputFile.Path(), inputFiles)
if err != nil {
return err
}
if err := os.MkdirAll(c.baseDir, 0755); err != nil {
return err
}
fp, err := os.Create(cachePath)
if err != nil {
return err
}
defer fp.Close()
offset, err := outputFile.Seek(0, os.SEEK_CUR)
if err != nil {
return err
}
if _, err := outputFile.Seek(0, os.SEEK_SET); err != nil {
return err
}
if _, err := outputFile.WriteTo(fp); err != nil {
return err
}
if _, err := outputFile.Seek(offset, os.SEEK_SET); err != nil {
return err
}
return nil
}
func (c *fileCache) buildCachePath(context *Context, outputPath string, inputFiles []*File) (string, error) {
uintBuff := make([]byte, 4)
binary.LittleEndian.PutUint32(uintBuff, context.hash)
hasher := crc32.NewIEEE()
hasher.Write(uintBuff)
hasher.Write([]byte(outputPath))
sort.Sort(FilesByPath(inputFiles))
for _, inputFile := range inputFiles {
fileHash, err := inputFile.hash()
if err != nil {
return "", err
}
binary.LittleEndian.PutUint32(uintBuff, fileHash)
hasher.Write(uintBuff)
hasher.Write([]byte(inputFile.Path()))
}
cachePath := filepath.Join(c.baseDir, fmt.Sprintf(
"gs_%.8x%s",
hasher.Sum32(),
filepath.Ext(outputPath),
))
return cachePath, nil
}

View File

@ -1,61 +1,110 @@
package goldsmith
import (
"bytes"
"errors"
"os"
"runtime"
"sync"
"time"
)
type context struct {
gs *goldsmith
plug Plugin
filters []Filter
input, output chan *file
type Context struct {
goldsmith *Goldsmith
plugin Plugin
hash uint32
fileFilters []Filter
inputFiles chan *File
outputFiles chan *File
}
func (ctx *context) step() {
defer close(ctx.output)
func (*Context) CreateFileFromData(sourcePath string, data []byte) *File {
return &File{
sourcePath: sourcePath,
Meta: make(map[string]interface{}),
reader: bytes.NewReader(data),
size: int64(len(data)),
modTime: time.Now(),
}
}
func (*Context) CreateFileFromAsset(sourcePath, dataPath string) (*File, error) {
info, err := os.Stat(dataPath)
if err != nil {
return nil, err
}
if info.IsDir() {
return nil, errors.New("assets must be files")
}
file := &File{
sourcePath: sourcePath,
dataPath: dataPath,
Meta: make(map[string]interface{}),
size: info.Size(),
modTime: info.ModTime(),
}
return file, nil
}
func (ctx *Context) DispatchFile(file *File) {
ctx.outputFiles <- file
}
func (ctx *Context) DispatchAndCacheFile(outputFile *File, inputFiles ...*File) {
ctx.goldsmith.storeFile(ctx, outputFile, inputFiles)
ctx.outputFiles <- outputFile
}
func (ctx *Context) RetrieveCachedFile(outputPath string, inputFiles ...*File) *File {
return ctx.goldsmith.retrieveFile(ctx, outputPath, inputFiles)
}
func (ctx *Context) step() {
defer close(ctx.outputFiles)
var err error
var filters []Filter
if initializer, ok := ctx.plug.(Initializer); ok {
if initializer, ok := ctx.plugin.(Initializer); ok {
filters, err = initializer.Initialize(ctx)
if err != nil {
ctx.gs.fault(ctx.plug.Name(), nil, err)
ctx.goldsmith.fault(ctx.plugin.Name(), nil, err)
return
}
}
if ctx.input != nil {
processor, _ := ctx.plug.(Processor)
if ctx.inputFiles != nil {
processor, _ := ctx.plugin.(Processor)
var wg sync.WaitGroup
for i := 0; i < runtime.NumCPU(); i++ {
wg.Add(1)
go func() {
defer wg.Done()
for f := range ctx.input {
for inputFile := range ctx.inputFiles {
accept := processor != nil
for _, filter := range append(ctx.filters, filters...) {
if accept, err = filter.Accept(ctx, f); err != nil {
ctx.gs.fault(filter.Name(), f, err)
for _, filter := range append(ctx.fileFilters, filters...) {
if accept, err = filter.Accept(ctx, inputFile); err != nil {
ctx.goldsmith.fault(filter.Name(), inputFile, err)
return
}
if !accept {
break
}
}
if accept {
if _, err := f.Seek(0, os.SEEK_SET); err != nil {
ctx.gs.fault("core", f, err)
if _, err := inputFile.Seek(0, os.SEEK_SET); err != nil {
ctx.goldsmith.fault("core", inputFile, err)
}
if err := processor.Process(ctx, f); err != nil {
ctx.gs.fault(ctx.plug.Name(), f, err)
if err := processor.Process(ctx, inputFile); err != nil {
ctx.goldsmith.fault(ctx.plugin.Name(), inputFile, err)
}
} else {
ctx.output <- f
ctx.outputFiles <- inputFile
}
}
}()
@ -63,25 +112,9 @@ func (ctx *context) step() {
wg.Wait()
}
if finalizer, ok := ctx.plug.(Finalizer); ok {
if finalizer, ok := ctx.plugin.(Finalizer); ok {
if err := finalizer.Finalize(ctx); err != nil {
ctx.gs.fault(ctx.plug.Name(), nil, err)
ctx.goldsmith.fault(ctx.plugin.Name(), nil, err)
}
}
}
//
// Context Implementation
//
func (ctx *context) DispatchFile(f File) {
ctx.output <- f.(*file)
}
func (ctx *context) SrcDir() string {
return ctx.gs.srcDir
}
func (ctx *context) DstDir() string {
return ctx.gs.dstDir
}

132
core.go
View File

@ -1,132 +0,0 @@
package goldsmith
import (
"os"
"path/filepath"
"sync"
)
type goldsmith struct {
srcDir, dstDir string
contexts []*context
refs map[string]bool
complete bool
filters []Filter
errors []error
errorMtx sync.Mutex
}
func (gs *goldsmith) pushContext(plug Plugin) *context {
ctx := &context{gs: gs, plug: plug, output: make(chan *file)}
ctx.filters = append(ctx.filters, gs.filters...)
if len(gs.contexts) > 0 {
ctx.input = gs.contexts[len(gs.contexts)-1].output
}
gs.contexts = append(gs.contexts, ctx)
return ctx
}
func (gs *goldsmith) cleanupFiles() {
infos := make(chan fileInfo)
go scanDir(gs.dstDir, infos)
for info := range infos {
relPath, _ := filepath.Rel(gs.dstDir, info.path)
if contained, _ := gs.refs[relPath]; contained {
continue
}
os.RemoveAll(info.path)
}
}
func (gs *goldsmith) exportFile(f *file) error {
if err := f.export(gs.dstDir); err != nil {
return err
}
pathSeg := cleanPath(f.path)
for {
gs.refs[pathSeg] = true
if pathSeg == "." {
break
}
pathSeg = filepath.Dir(pathSeg)
}
return nil
}
func (gs *goldsmith) fault(name string, f *file, err error) {
gs.errorMtx.Lock()
defer gs.errorMtx.Unlock()
ferr := &Error{Name: name, Err: err}
if f != nil {
ferr.Path = f.path
}
gs.errors = append(gs.errors, ferr)
}
//
// Goldsmith Implementation
//
func (gs *goldsmith) Chain(p Plugin) Goldsmith {
if gs.complete {
panic("attempted reuse of goldsmith instance")
}
gs.pushContext(p)
return gs
}
func (gs *goldsmith) FilterPush(f Filter) Goldsmith {
if gs.complete {
panic("attempted reuse of goldsmith instance")
}
gs.filters = append(gs.filters, f)
return gs
}
func (gs *goldsmith) FilterPop() Goldsmith {
if gs.complete {
panic("attempted reuse of goldsmith instance")
}
count := len(gs.filters)
if count == 0 {
panic("attempted to pop empty filter stack")
}
gs.filters = gs.filters[:count-1]
return gs
}
func (gs *goldsmith) End(dstDir string) []error {
if gs.complete {
panic("attempted reuse of goldsmith instance")
}
gs.dstDir = dstDir
for _, ctx := range gs.contexts {
go ctx.step()
}
ctx := gs.contexts[len(gs.contexts)-1]
for f := range ctx.output {
gs.exportFile(f)
}
gs.cleanupFiles()
gs.complete = true
return gs.errors
}

18
error.go Normal file
View File

@ -0,0 +1,18 @@
package goldsmith
import "fmt"
type Error struct {
Name string
Path string
Err error
}
func (e Error) Error() string {
var path string
if len(e.Path) > 0 {
path = "@" + e.Path
}
return fmt.Sprintf("[%s%s]: %s", e.Name, path, e.Err.Error())
}

206
file.go
View File

@ -2,46 +2,114 @@ package goldsmith
import (
"bytes"
"hash/crc32"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"time"
)
type file struct {
path string
type File struct {
sourcePath string
dataPath string
Meta map[string]interface{}
hashValue uint32
hashValid bool
reader *bytes.Reader
size int64
modTime time.Time
asset string
}
func (f *file) export(dstDir string) error {
dstPath := filepath.Join(dstDir, f.path)
if len(f.asset) > 0 {
dstInfo, err := os.Stat(dstPath)
if err == nil && dstInfo.ModTime().Unix() >= f.ModTime().Unix() {
return nil
}
func (f *File) Path() string {
return f.sourcePath
}
func (f *File) Name() string {
return path.Base(f.sourcePath)
}
func (f *File) Dir() string {
return path.Dir(f.sourcePath)
}
func (f *File) Ext() string {
return path.Ext(f.sourcePath)
}
func (f *File) Size() int64 {
return f.size
}
func (f *File) ModTime() time.Time {
return f.modTime
}
func (f *File) Read(data []byte) (int, error) {
if err := f.load(); err != nil {
return 0, err
}
if err := os.MkdirAll(path.Dir(dstPath), 0755); err != nil {
return f.reader.Read(data)
}
func (f *File) WriteTo(writer io.Writer) (int64, error) {
if err := f.load(); err != nil {
return 0, err
}
return f.reader.WriteTo(writer)
}
func (f *File) Seek(offset int64, whence int) (int64, error) {
if f.reader == nil && offset == 0 && (whence == os.SEEK_SET || whence == os.SEEK_CUR) {
return 0, nil
}
if err := f.load(); err != nil {
return 0, err
}
return f.reader.Seek(offset, whence)
}
type FilesByPath []*File
func (f FilesByPath) Len() int {
return len(f)
}
func (f FilesByPath) Swap(i, j int) {
f[i], f[j] = f[j], f[i]
}
func (f FilesByPath) Less(i, j int) bool {
return strings.Compare(f[i].Path(), f[j].Path()) < 0
}
func (f *File) export(targetDir string) error {
targetPath := filepath.Join(targetDir, f.sourcePath)
if targetInfo, err := os.Stat(targetPath); err == nil && targetInfo.ModTime().After(f.ModTime()) {
return nil
}
if err := os.MkdirAll(path.Dir(targetPath), 0755); err != nil {
return err
}
fw, err := os.Create(dstPath)
fw, err := os.Create(targetPath)
if err != nil {
return err
}
defer fw.Close()
if f.reader == nil {
fr, err := os.Open(f.asset)
fr, err := os.Open(f.dataPath)
if err != nil {
return err
}
@ -54,6 +122,7 @@ func (f *file) export(dstDir string) error {
if _, err := f.Seek(0, os.SEEK_SET); err != nil {
return err
}
if _, err := f.WriteTo(fw); err != nil {
return err
}
@ -62,12 +131,12 @@ func (f *file) export(dstDir string) error {
return nil
}
func (f *file) cache() error {
func (f *File) load() error {
if f.reader != nil {
return nil
}
data, err := ioutil.ReadFile(f.asset)
data, err := ioutil.ReadFile(f.dataPath)
if err != nil {
return err
}
@ -76,73 +145,62 @@ func (f *file) cache() error {
return nil
}
//
// File Implementation
//
func (f *file) Path() string {
return f.path
}
func (f *file) Name() string {
return path.Base(f.path)
}
func (f *file) Dir() string {
return path.Dir(f.path)
}
func (f *file) Ext() string {
return path.Ext(f.path)
}
func (f *file) Size() int64 {
return f.size
}
func (f *file) ModTime() time.Time {
return f.modTime
}
func (f *file) Value(key string) (interface{}, bool) {
return getDelimValue(f.Meta, key)
}
func (f *file) SetValue(key string, value interface{}) bool {
return setDelimValue(f.Meta, key, value)
}
func (f *file) InheritValues(src File) {
rf := src.(*file)
for name, value := range rf.Meta {
f.SetValue(name, value)
func (f *File) hash() (uint32, error) {
if f.hashValid {
return f.hashValue, nil
}
}
func (f *file) Read(p []byte) (int, error) {
if err := f.cache(); err != nil {
if err := f.load(); err != nil {
return 0, err
}
return f.reader.Read(p)
}
func (f *file) WriteTo(w io.Writer) (int64, error) {
if err := f.cache(); err != nil {
offset, err := f.Seek(0, os.SEEK_CUR)
if err != nil {
return 0, err
}
return f.reader.WriteTo(w)
}
func (f *file) Seek(offset int64, whence int) (int64, error) {
if f.reader == nil && offset == 0 && (whence == os.SEEK_SET || whence == os.SEEK_CUR) {
return 0, nil
}
if err := f.cache(); err != nil {
if _, err := f.Seek(0, os.SEEK_SET); err != nil {
return 0, err
}
return f.reader.Seek(offset, whence)
hasher := crc32.NewIEEE()
if _, err := io.Copy(hasher, f.reader); err != nil {
return 0, err
}
if _, err := f.Seek(offset, os.SEEK_SET); err != nil {
return 0, err
}
f.hashValue = hasher.Sum32()
f.hashValid = true
return f.hashValue, nil
}
type fileInfo struct {
os.FileInfo
path string
}
func cleanPath(path string) string {
if filepath.IsAbs(path) {
var err error
if path, err = filepath.Rel("/", path); err != nil {
panic(err)
}
}
return filepath.Clean(path)
}
func scanDir(rootDir string, infos chan fileInfo) {
defer close(infos)
filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
if err == nil {
infos <- fileInfo{FileInfo: info, path: path}
}
return err
})
}

View File

@ -1,118 +1,144 @@
package goldsmith
import (
"bytes"
"errors"
"fmt"
"io"
"hash"
"hash/crc32"
"os"
"time"
"path/filepath"
"sync"
)
type Goldsmith interface {
Chain(p Plugin) Goldsmith
FilterPush(f Filter) Goldsmith
FilterPop() Goldsmith
End(dstDir string) []error
type Goldsmith struct {
sourceDir string
targetDir string
contexts []*Context
contextHasher hash.Hash32
fileRefs map[string]bool
fileFilters []Filter
fileCache *fileCache
errors []error
errorMtx sync.Mutex
}
func Begin(srcDir string) Goldsmith {
gs := &goldsmith{srcDir: srcDir, refs: make(map[string]bool)}
func Begin(sourceDir string) *Goldsmith {
gs := &Goldsmith{
sourceDir: sourceDir,
contextHasher: crc32.NewIEEE(),
fileRefs: make(map[string]bool),
}
gs.Chain(new(loader))
return gs
}
type File interface {
Path() string
Name() string
Dir() string
Ext() string
Size() int64
ModTime() time.Time
Value(key string) (interface{}, bool)
SetValue(key string, value interface{}) bool
InheritValues(src File)
Read(p []byte) (int, error)
WriteTo(w io.Writer) (int64, error)
Seek(offset int64, whence int) (int64, error)
func (gs *Goldsmith) Cache(cacheDir string) *Goldsmith {
gs.fileCache = &fileCache{cacheDir}
return gs
}
func NewFileFromData(path string, data []byte, modTime time.Time) File {
return &file{
path: path,
Meta: make(map[string]interface{}),
reader: bytes.NewReader(data),
size: int64(len(data)),
modTime: modTime,
func (gs *Goldsmith) Chain(plugin Plugin) *Goldsmith {
gs.contextHasher.Write([]byte(plugin.Name()))
context := &Context{
goldsmith: gs,
plugin: plugin,
hash: gs.contextHasher.Sum32(),
outputFiles: make(chan *File),
}
context.fileFilters = append(context.fileFilters, gs.fileFilters...)
if len(gs.contexts) > 0 {
context.inputFiles = gs.contexts[len(gs.contexts)-1].outputFiles
}
gs.contexts = append(gs.contexts, context)
return gs
}
func (gs *Goldsmith) FilterPush(filter Filter) *Goldsmith {
gs.fileFilters = append(gs.fileFilters, filter)
return gs
}
func (gs *Goldsmith) FilterPop() *Goldsmith {
count := len(gs.fileFilters)
if count == 0 {
panic("attempted to pop empty filter stack")
}
gs.fileFilters = gs.fileFilters[:count-1]
return gs
}
func (gs *Goldsmith) End(targetDir string) []error {
gs.targetDir = targetDir
for _, context := range gs.contexts {
go context.step()
}
context := gs.contexts[len(gs.contexts)-1]
for file := range context.outputFiles {
gs.exportFile(file)
}
gs.removeUnreferencedFiles()
return gs.errors
}
func (gs *Goldsmith) retrieveFile(context *Context, outputPath string, inputFiles []*File) *File {
if gs.fileCache != nil {
outputFile, _ := gs.fileCache.retrieveFile(context, outputPath, inputFiles)
return outputFile
}
return nil
}
func (gs *Goldsmith) storeFile(context *Context, outputFile *File, inputFiles []*File) {
if gs.fileCache != nil {
gs.fileCache.storeFile(context, outputFile, inputFiles)
}
}
func NewFileFromAsset(path, asset string) (File, error) {
info, err := os.Stat(asset)
if err != nil {
return nil, err
func (gs *Goldsmith) removeUnreferencedFiles() {
infos := make(chan fileInfo)
go scanDir(gs.targetDir, infos)
for info := range infos {
if info.path != gs.targetDir {
relPath, _ := filepath.Rel(gs.targetDir, info.path)
if contained, _ := gs.fileRefs[relPath]; !contained {
os.RemoveAll(info.path)
}
}
}
}
func (gs *Goldsmith) exportFile(file *File) error {
if err := file.export(gs.targetDir); err != nil {
return err
}
if info.IsDir() {
return nil, errors.New("assets must be files")
for pathSeg := cleanPath(file.sourcePath); pathSeg != "."; pathSeg = filepath.Dir(pathSeg) {
gs.fileRefs[pathSeg] = true
}
f := &file{
path: path,
Meta: make(map[string]interface{}),
size: info.Size(),
modTime: info.ModTime(),
asset: asset,
return nil
}
func (gs *Goldsmith) fault(pluginName string, file *File, err error) {
gs.errorMtx.Lock()
defer gs.errorMtx.Unlock()
faultError := &Error{Name: pluginName, Err: err}
if file != nil {
faultError.Path = file.sourcePath
}
return f, nil
}
type Context interface {
DispatchFile(f File)
SrcDir() string
DstDir() string
}
type Error struct {
Name string
Path string
Err error
}
func (e Error) Error() string {
var path string
if len(e.Path) > 0 {
path = "@" + e.Path
}
return fmt.Sprintf("[%s%s]: %s", e.Name, path, e.Err.Error())
}
type Initializer interface {
Initialize(ctx Context) ([]Filter, error)
}
type Processor interface {
Process(ctx Context, f File) error
}
type Finalizer interface {
Finalize(ctx Context) error
}
type Component interface {
Name() string
}
type Filter interface {
Component
Accept(ctx Context, f File) (bool, error)
}
type Plugin interface {
Component
gs.errors = append(gs.errors, faultError)
}

26
interface.go Normal file
View File

@ -0,0 +1,26 @@
package goldsmith
type Initializer interface {
Initialize(ctx *Context) ([]Filter, error)
}
type Processor interface {
Process(ctx *Context, file *File) error
}
type Finalizer interface {
Finalize(ctx *Context) error
}
type Component interface {
Name() string
}
type Filter interface {
Component
Accept(ctx *Context, file *File) (bool, error)
}
type Plugin interface {
Component
}

View File

@ -8,26 +8,26 @@ func (*loader) Name() string {
return "loader"
}
func (*loader) Initialize(ctx Context) ([]Filter, error) {
func (*loader) Initialize(ctx *Context) ([]Filter, error) {
infos := make(chan fileInfo)
go scanDir(ctx.SrcDir(), infos)
go scanDir(ctx.goldsmith.sourceDir, infos)
for info := range infos {
if info.IsDir() {
continue
}
relPath, _ := filepath.Rel(ctx.SrcDir(), info.path)
relPath, _ := filepath.Rel(ctx.goldsmith.sourceDir, info.path)
f := &file{
path: relPath,
Meta: make(map[string]interface{}),
modTime: info.ModTime(),
size: info.Size(),
asset: info.path,
file := &File{
sourcePath: relPath,
Meta: make(map[string]interface{}),
modTime: info.ModTime(),
size: info.Size(),
dataPath: info.path,
}
ctx.DispatchFile(f)
ctx.DispatchFile(file)
}
return nil, nil

101
util.go
View File

@ -1,101 +0,0 @@
package goldsmith
import (
"os"
"path/filepath"
"reflect"
"strings"
)
type fileInfo struct {
os.FileInfo
path string
}
func cleanPath(path string) string {
if filepath.IsAbs(path) {
var err error
if path, err = filepath.Rel("/", path); err != nil {
panic(err)
}
}
return filepath.Clean(path)
}
func scanDir(root string, infos chan fileInfo) {
defer close(infos)
filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
infos <- fileInfo{FileInfo: info, path: path}
return nil
})
}
func setDelimValue(container interface{}, path string, data interface{}) bool {
containerVal := reflect.Indirect(reflect.ValueOf(container))
segments := strings.Split(path, ".")
segmentHead := segments[0]
if len(segments) > 1 {
var fieldVal reflect.Value
switch containerVal.Kind() {
case reflect.Map:
fieldVal = containerVal.MapIndex(reflect.ValueOf(segmentHead))
case reflect.Struct:
fieldVal = containerVal.FieldByName(segmentHead)
if fieldVal.CanAddr() {
fieldVal = fieldVal.Addr()
}
}
if fieldVal.IsValid() && fieldVal.CanInterface() {
pathRest := strings.Join(segments[1:], ".")
return setDelimValue(fieldVal.Interface(), pathRest, data)
}
} else {
switch containerVal.Kind() {
case reflect.Map:
containerVal.SetMapIndex(reflect.ValueOf(segmentHead), reflect.ValueOf(data))
return true
case reflect.Struct:
fieldVal := containerVal.FieldByName(segmentHead)
if fieldVal.CanSet() {
fieldVal.Set(reflect.ValueOf(data))
return true
}
}
}
return false
}
func getDelimValue(container interface{}, path string) (interface{}, bool) {
containerVal := reflect.Indirect(reflect.ValueOf(container))
segments := strings.Split(path, ".")
segmentHead := segments[0]
var fieldVal reflect.Value
switch containerVal.Kind() {
case reflect.Map:
fieldVal = containerVal.MapIndex(reflect.ValueOf(segmentHead))
case reflect.Struct:
fieldVal = containerVal.FieldByName(segmentHead)
}
if fieldVal.IsValid() && fieldVal.CanInterface() {
if len(segments) > 1 {
return getDelimValue(fieldVal.Interface(), strings.Join(segments[1:], "."))
}
return fieldVal.Interface(), true
}
return nil, false
}