Initial v1.0.0 commit

This commit is contained in:
Jakub Vavřík
2021-01-28 17:37:47 +01:00
commit 1481d27782
4164 changed files with 1264675 additions and 0 deletions

View File

@@ -0,0 +1,111 @@
package resolver
import (
"io/ioutil"
"os"
"path/filepath"
"strings"
"sync"
"github.com/gobuffalo/packr/v2/file"
"github.com/gobuffalo/packr/v2/plog"
"github.com/karrick/godirwalk"
)
var _ Resolver = &Disk{}
type Disk struct {
Root string
}
func (d Disk) String() string {
return String(&d)
}
func (d *Disk) Resolve(box string, name string) (file.File, error) {
var err error
path := OsPath(name)
if !filepath.IsAbs(path) {
path, err = ResolvePathInBase(OsPath(d.Root), path)
if err != nil {
return nil, err
}
}
fi, err := os.Stat(path)
if err != nil {
return nil, err
}
if fi.IsDir() {
return nil, os.ErrNotExist
}
if bb, err := ioutil.ReadFile(path); err == nil {
return file.NewFile(OsPath(name), bb)
}
return nil, os.ErrNotExist
}
// ResolvePathInBase returns a path that is guaranteed to be inside of the base directory or an error
func ResolvePathInBase(base, path string) (string, error) {
// Determine the absolute file path of the base directory
d, err := filepath.Abs(base)
if err != nil {
return "", err
}
// Return the base directory if no file was requested
if path == "/" || path == "\\" {
return d, nil
}
// Resolve the absolute file path after combining the key with base
p, err := filepath.Abs(filepath.Join(d, path))
if err != nil {
return "", err
}
// Verify that the resolved path is inside of the base directory
if !strings.HasPrefix(p, d+string(filepath.Separator)) {
return "", os.ErrNotExist
}
return p, nil
}
var _ file.FileMappable = &Disk{}
func (d *Disk) FileMap() map[string]file.File {
moot := &sync.Mutex{}
m := map[string]file.File{}
root := OsPath(d.Root)
if _, err := os.Stat(root); err != nil {
return m
}
callback := func(path string, de *godirwalk.Dirent) error {
if _, err := os.Stat(root); err != nil {
return nil
}
if !de.IsRegular() {
return nil
}
moot.Lock()
name := strings.TrimPrefix(path, root+string(filepath.Separator))
b, err := ioutil.ReadFile(path)
if err != nil {
return err
}
m[name], err = file.NewFile(name, b)
if err != nil {
return err
}
moot.Unlock()
return nil
}
err := godirwalk.Walk(root, &godirwalk.Options{
FollowSymbolicLinks: true,
Callback: callback,
})
if err != nil {
plog.Logger.Errorf("[%s] error walking %v", root, err)
}
return m
}

View File

@@ -0,0 +1,314 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package hex implements hexadecimal encoding and decoding.
package hex
import (
"bytes"
"fmt"
"io"
)
const hextable = "0123456789abcdef"
// EncodedLen returns the length of an encoding of n source bytes.
// Specifically, it returns n * 2.
func EncodedLen(n int) int { return n * 2 }
// Encode encodes src into EncodedLen(len(src))
// bytes of dst. As a convenience, it returns the number
// of bytes written to dst, but this value is always EncodedLen(len(src)).
// Encode implements hexadecimal encoding.
func Encode(dst, src []byte) int {
for i, v := range src {
dst[i*2] = hextable[v>>4]
dst[i*2+1] = hextable[v&0x0f]
}
return len(src) * 2
}
// ErrLength reports an attempt to decode an odd-length input
// using Decode or DecodeString.
// The stream-based Decoder returns io.ErrUnexpectedEOF instead of ErrLength.
var ErrLength = fmt.Errorf("encoding/hex: odd length hex string")
// InvalidByteError values describe errors resulting from an invalid byte in a hex string.
type InvalidByteError byte
func (e InvalidByteError) Error() string {
return fmt.Sprintf("encoding/hex: invalid byte: %#U", rune(e))
}
// DecodedLen returns the length of a decoding of x source bytes.
// Specifically, it returns x / 2.
func DecodedLen(x int) int { return x / 2 }
// Decode decodes src into DecodedLen(len(src)) bytes,
// returning the actual number of bytes written to dst.
//
// Decode expects that src contains only hexadecimal
// characters and that src has even length.
// If the input is malformed, Decode returns the number
// of bytes decoded before the error.
func Decode(dst, src []byte) (int, error) {
var i int
for i = 0; i < len(src)/2; i++ {
a, ok := fromHexChar(src[i*2])
if !ok {
return i, InvalidByteError(src[i*2])
}
b, ok := fromHexChar(src[i*2+1])
if !ok {
return i, InvalidByteError(src[i*2+1])
}
dst[i] = (a << 4) | b
}
if len(src)%2 == 1 {
// Check for invalid char before reporting bad length,
// since the invalid char (if present) is an earlier problem.
if _, ok := fromHexChar(src[i*2]); !ok {
return i, InvalidByteError(src[i*2])
}
return i, ErrLength
}
return i, nil
}
// fromHexChar converts a hex character into its value and a success flag.
func fromHexChar(c byte) (byte, bool) {
switch {
case '0' <= c && c <= '9':
return c - '0', true
case 'a' <= c && c <= 'f':
return c - 'a' + 10, true
case 'A' <= c && c <= 'F':
return c - 'A' + 10, true
}
return 0, false
}
// EncodeToString returns the hexadecimal encoding of src.
func EncodeToString(src []byte) string {
dst := make([]byte, EncodedLen(len(src)))
Encode(dst, src)
return string(dst)
}
// DecodeString returns the bytes represented by the hexadecimal string s.
//
// DecodeString expects that src contains only hexadecimal
// characters and that src has even length.
// If the input is malformed, DecodeString returns
// the bytes decoded before the error.
func DecodeString(s string) ([]byte, error) {
src := []byte(s)
// We can use the source slice itself as the destination
// because the decode loop increments by one and then the 'seen' byte is not used anymore.
n, err := Decode(src, src)
return src[:n], err
}
// Dump returns a string that contains a hex dump of the given data. The format
// of the hex dump matches the output of `hexdump -C` on the command line.
func Dump(data []byte) string {
var buf bytes.Buffer
dumper := Dumper(&buf)
dumper.Write(data)
dumper.Close()
return buf.String()
}
// bufferSize is the number of hexadecimal characters to buffer in encoder and decoder.
const bufferSize = 1024
type encoder struct {
w io.Writer
err error
out [bufferSize]byte // output buffer
}
// NewEncoder returns an io.Writer that writes lowercase hexadecimal characters to w.
func NewEncoder(w io.Writer) io.Writer {
return &encoder{w: w}
}
func (e *encoder) Write(p []byte) (n int, err error) {
for len(p) > 0 && e.err == nil {
chunkSize := bufferSize / 2
if len(p) < chunkSize {
chunkSize = len(p)
}
var written int
encoded := Encode(e.out[:], p[:chunkSize])
written, e.err = e.w.Write(e.out[:encoded])
n += written / 2
p = p[chunkSize:]
}
return n, e.err
}
type decoder struct {
r io.Reader
err error
in []byte // input buffer (encoded form)
arr [bufferSize]byte // backing array for in
}
// NewDecoder returns an io.Reader that decodes hexadecimal characters from r.
// NewDecoder expects that r contain only an even number of hexadecimal characters.
func NewDecoder(r io.Reader) io.Reader {
return &decoder{r: r}
}
func (d *decoder) Read(p []byte) (n int, err error) {
// Fill internal buffer with sufficient bytes to decode
if len(d.in) < 2 && d.err == nil {
var numCopy, numRead int
numCopy = copy(d.arr[:], d.in) // Copies either 0 or 1 bytes
numRead, d.err = d.r.Read(d.arr[numCopy:])
d.in = d.arr[:numCopy+numRead]
if d.err == io.EOF && len(d.in)%2 != 0 {
if _, ok := fromHexChar(d.in[len(d.in)-1]); !ok {
d.err = InvalidByteError(d.in[len(d.in)-1])
} else {
d.err = io.ErrUnexpectedEOF
}
}
}
// Decode internal buffer into output buffer
if numAvail := len(d.in) / 2; len(p) > numAvail {
p = p[:numAvail]
}
numDec, err := Decode(p, d.in[:len(p)*2])
d.in = d.in[2*numDec:]
if err != nil {
d.in, d.err = nil, err // Decode error; discard input remainder
}
if len(d.in) < 2 {
return numDec, d.err // Only expose errors when buffer fully consumed
}
return numDec, nil
}
// Dumper returns a WriteCloser that writes a hex dump of all written data to
// w. The format of the dump matches the output of `hexdump -C` on the command
// line.
func Dumper(w io.Writer) io.WriteCloser {
return &dumper{w: w}
}
type dumper struct {
w io.Writer
rightChars [18]byte
buf [14]byte
used int // number of bytes in the current line
n uint // number of bytes, total
closed bool
}
func toChar(b byte) byte {
if b < 32 || b > 126 {
return '.'
}
return b
}
func (h *dumper) Write(data []byte) (n int, err error) {
if h.closed {
return 0, fmt.Errorf("encoding/hex: dumper closed")
}
// Output lines look like:
// 00000010 2e 2f 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d |./0123456789:;<=|
// ^ offset ^ extra space ^ ASCII of line.
for i := range data {
if h.used == 0 {
// At the beginning of a line we print the current
// offset in hex.
h.buf[0] = byte(h.n >> 24)
h.buf[1] = byte(h.n >> 16)
h.buf[2] = byte(h.n >> 8)
h.buf[3] = byte(h.n)
Encode(h.buf[4:], h.buf[:4])
h.buf[12] = ' '
h.buf[13] = ' '
_, err = h.w.Write(h.buf[4:])
if err != nil {
return
}
}
Encode(h.buf[:], data[i:i+1])
h.buf[2] = ' '
l := 3
if h.used == 7 {
// There's an additional space after the 8th byte.
h.buf[3] = ' '
l = 4
} else if h.used == 15 {
// At the end of the line there's an extra space and
// the bar for the right column.
h.buf[3] = ' '
h.buf[4] = '|'
l = 5
}
_, err = h.w.Write(h.buf[:l])
if err != nil {
return
}
n++
h.rightChars[h.used] = toChar(data[i])
h.used++
h.n++
if h.used == 16 {
h.rightChars[16] = '|'
h.rightChars[17] = '\n'
_, err = h.w.Write(h.rightChars[:])
if err != nil {
return
}
h.used = 0
}
}
return
}
func (h *dumper) Close() (err error) {
// See the comments in Write() for the details of this format.
if h.closed {
return
}
h.closed = true
if h.used == 0 {
return
}
h.buf[0] = ' '
h.buf[1] = ' '
h.buf[2] = ' '
h.buf[3] = ' '
h.buf[4] = '|'
nBytes := h.used
for h.used < 16 {
l := 3
if h.used == 7 {
l = 4
} else if h.used == 15 {
l = 5
}
_, err = h.w.Write(h.buf[:l])
if err != nil {
return
}
h.used++
}
h.rightChars[nBytes] = '|'
h.rightChars[nBytes+1] = '\n'
_, err = h.w.Write(h.rightChars[:nBytes+2])
return
}

View File

@@ -0,0 +1,112 @@
package resolver
import (
"bytes"
"compress/gzip"
"io"
"io/ioutil"
"os"
"strings"
"sync"
"github.com/gobuffalo/packr/v2/file/resolver/encoding/hex"
"github.com/gobuffalo/packr/v2/plog"
"github.com/gobuffalo/packr/v2/file"
)
var _ Resolver = &HexGzip{}
type HexGzip struct {
packed map[string]string
unpacked map[string]string
moot *sync.RWMutex
}
func (hg HexGzip) String() string {
return String(&hg)
}
var _ file.FileMappable = &HexGzip{}
func (hg *HexGzip) FileMap() map[string]file.File {
hg.moot.RLock()
var names []string
for k := range hg.packed {
names = append(names, k)
}
hg.moot.RUnlock()
m := map[string]file.File{}
for _, n := range names {
if f, err := hg.Resolve("", n); err == nil {
m[n] = f
}
}
return m
}
func (hg *HexGzip) Resolve(box string, name string) (file.File, error) {
plog.Debug(hg, "Resolve", "box", box, "name", name)
hg.moot.Lock()
defer hg.moot.Unlock()
if s, ok := hg.unpacked[name]; ok {
return file.NewFile(name, []byte(s))
}
packed, ok := hg.packed[name]
if !ok {
return nil, os.ErrNotExist
}
unpacked, err := UnHexGzipString(packed)
if err != nil {
return nil, err
}
f, err := file.NewFile(OsPath(name), []byte(unpacked))
if err != nil {
return nil, err
}
hg.unpacked[name] = f.String()
return f, nil
}
func NewHexGzip(files map[string]string) (*HexGzip, error) {
if files == nil {
files = map[string]string{}
}
hg := &HexGzip{
packed: files,
unpacked: map[string]string{},
moot: &sync.RWMutex{},
}
return hg, nil
}
func HexGzipString(s string) (string, error) {
bb := &bytes.Buffer{}
enc := hex.NewEncoder(bb)
zw := gzip.NewWriter(enc)
io.Copy(zw, strings.NewReader(s))
zw.Close()
return bb.String(), nil
}
func UnHexGzipString(packed string) (string, error) {
br := bytes.NewBufferString(packed)
dec := hex.NewDecoder(br)
zr, err := gzip.NewReader(dec)
if err != nil {
return "", err
}
defer zr.Close()
b, err := ioutil.ReadAll(zr)
if err != nil {
return "", err
}
return string(b), nil
}

View File

@@ -0,0 +1,21 @@
package resolver
import (
"path/filepath"
"runtime"
"strings"
)
func Key(s string) string {
s = strings.Replace(s, "\\", "/", -1)
return s
}
func OsPath(s string) string {
if runtime.GOOS == "windows" {
s = strings.Replace(s, "/", string(filepath.Separator), -1)
} else {
s = strings.Replace(s, "\\", string(filepath.Separator), -1)
}
return s
}

View File

@@ -0,0 +1,63 @@
package resolver
import (
"io/ioutil"
"github.com/gobuffalo/packd"
"github.com/gobuffalo/packr/v2/file"
"github.com/gobuffalo/packr/v2/plog"
)
var _ Resolver = &InMemory{}
type InMemory struct {
*packd.MemoryBox
}
func (d InMemory) String() string {
return String(&d)
}
func (d *InMemory) Resolve(box string, name string) (file.File, error) {
b, err := d.MemoryBox.Find(name)
if err != nil {
return nil, err
}
return file.NewFile(name, b)
}
func (d *InMemory) Pack(name string, f file.File) error {
plog.Debug(d, "Pack", "name", name)
b, err := ioutil.ReadAll(f)
if err != nil {
return err
}
d.AddBytes(name, b)
return nil
}
func (d *InMemory) FileMap() map[string]file.File {
m := map[string]file.File{}
d.Walk(func(path string, file file.File) error {
m[path] = file
return nil
})
return m
}
func NewInMemory(files map[string]file.File) *InMemory {
if files == nil {
files = map[string]file.File{}
}
box := packd.NewMemoryBox()
for p, f := range files {
if b, err := ioutil.ReadAll(f); err == nil {
box.AddBytes(p, b)
}
}
return &InMemory{
MemoryBox: box,
}
}

View File

@@ -0,0 +1,7 @@
package resolver
import "github.com/gobuffalo/packr/v2/file"
type Packable interface {
Pack(name string, f file.File) error
}

View File

@@ -0,0 +1,33 @@
package resolver
import (
"encoding/json"
"fmt"
"os"
"github.com/gobuffalo/packr/v2/file"
)
type Resolver interface {
Resolve(string, string) (file.File, error)
}
func defaultResolver() Resolver {
pwd, _ := os.Getwd()
return &Disk{
Root: pwd,
}
}
var DefaultResolver = defaultResolver()
func String(r Resolver) string {
m := map[string]interface{}{
"name": fmt.Sprintf("%T", r),
}
if fm, ok := r.(file.FileMappable); ok {
m["files"] = fm
}
b, _ := json.Marshal(m)
return string(b)
}