Workaround for splitting to multiple modules

This commit is contained in:
Laurence Withers 2019-04-12 09:14:21 +01:00
parent fdd1990e89
commit ce6749f0a3
5 changed files with 0 additions and 735 deletions

View File

@ -1,89 +0,0 @@
package main
import (
"errors"
"fmt"
"os"
"github.com/lwithers/htpack/packed"
"github.com/spf13/cobra"
)
var inspectCmd = &cobra.Command{
Use: "inspect",
Short: "View contents of an htpack file",
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) == 0 {
return errors.New("must specify one or more files")
}
var exitCode int
for _, filename := range args {
if err := Inspect(filename); err != nil {
fmt.Fprintf(os.Stderr, "%s: %v\n",
filename, err)
exitCode = 1
}
}
os.Exit(exitCode)
return nil
},
}
// Inspect a packfile.
// TODO: verify etag; verify integrity of compressed data.
// TODO: skip Gzip/Brotli if not present; print ratio.
func Inspect(filename string) error {
f, err := os.Open(filename)
if err != nil {
return err
}
defer f.Close()
hdr, dir, err := packed.Load(f)
if hdr != nil {
fmt.Printf("Header: %#v\n", hdr)
}
if dir != nil {
fmt.Printf("%d files:\n", len(dir.Files))
for path, info := range dir.Files {
fmt.Printf(" • %s\n"+
" · Etag: %s\n"+
" · Content type: %s\n"+
" · Uncompressed: %s (offset %d)\n",
path, info.Etag, info.ContentType,
printSize(info.Uncompressed.Length),
info.Uncompressed.Offset)
if info.Gzip != nil {
fmt.Printf(" · Gzipped: %s (offset %d)\n",
printSize(info.Gzip.Length), info.Gzip.Offset)
}
if info.Brotli != nil {
fmt.Printf(" · Brotli: %s (offset %d)\n",
printSize(info.Brotli.Length), info.Brotli.Offset)
}
}
}
return err
}
func printSize(size uint64) string {
switch {
case size < 1<<10:
return fmt.Sprintf("%d bytes", size)
case size < 1<<15:
return fmt.Sprintf("%.2f KiB", float64(size)/(1<<10))
case size < 1<<20:
return fmt.Sprintf("%.1f KiB", float64(size)/(1<<10))
case size < 1<<25:
return fmt.Sprintf("%.2f MiB", float64(size)/(1<<20))
case size < 1<<30:
return fmt.Sprintf("%.1f MiB", float64(size)/(1<<20))
case size < 1<<35:
return fmt.Sprintf("%.2f GiB", float64(size)/(1<<30))
default:
return fmt.Sprintf("%.1f GiB", float64(size)/(1<<30))
}
}

View File

@ -1,28 +0,0 @@
package main
import (
"fmt"
"os"
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "htpacker",
Short: "htpacker packs static files into a blob that can be served efficiently over HTTP",
Long: `Creates .htpack files comprising one or more static assets, and
compressed versions thereof. A YAML specification of files to pack may be
provided or generated on demand; or files and directories can be listed as
arguments.`,
}
func main() {
rootCmd.AddCommand(packCmd)
rootCmd.AddCommand(yamlCmd)
rootCmd.AddCommand(inspectCmd)
if err := rootCmd.Execute(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}

View File

@ -1,107 +0,0 @@
package main
import (
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"github.com/lwithers/htpack/cmd/htpacker/packer"
"github.com/spf13/cobra"
yaml "gopkg.in/yaml.v2"
)
var packCmd = &cobra.Command{
Use: "pack",
Short: "creates a packfile from a YAML spec or set of files/dirs",
RunE: func(c *cobra.Command, args []string) error {
// convert "out" to an absolute path, so that it will still
// work after chdir
out, err := c.Flags().GetString("out")
if err != nil {
return err
}
out, err = filepath.Abs(out)
if err != nil {
return err
}
// if "spec" is present, convert to an absolute path
spec, err := c.Flags().GetString("spec")
if err != nil {
return err
}
if spec != "" {
spec, err = filepath.Abs(spec)
if err != nil {
return err
}
}
// chdir if required
chdir, err := c.Flags().GetString("chdir")
if err != nil {
return err
}
if chdir != "" {
if err = os.Chdir(chdir); err != nil {
return err
}
}
// if "spec" is not present, then we expect a list of input
// files, and we'll build a spec from them
if spec == "" {
if len(args) == 0 {
return errors.New("need --yaml, " +
"or one or more filenames")
}
err = PackFiles(c, args, out)
} else {
if len(args) != 0 {
return errors.New("cannot specify files " +
"when using --yaml")
}
err = PackSpec(c, spec, out)
}
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
return nil
},
}
func init() {
packCmd.Flags().StringP("out", "O", "",
"Output filename")
packCmd.MarkFlagRequired("out")
packCmd.Flags().StringP("spec", "y", "",
"YAML specification file (if not present, just pack files)")
packCmd.Flags().StringP("chdir", "C", "",
"Change to directory before searching for input files")
}
func PackFiles(c *cobra.Command, args []string, out string) error {
ftp, err := filesFromList(args)
if err != nil {
return err
}
return packer.Pack(ftp, out)
}
func PackSpec(c *cobra.Command, spec, out string) error {
raw, err := ioutil.ReadFile(spec)
if err != nil {
return err
}
var ftp packer.FilesToPack
if err := yaml.UnmarshalStrict(raw, &ftp); err != nil {
return fmt.Errorf("parsing YAML spec %s: %v", spec, err)
}
return packer.Pack(ftp, out)
}

View File

@ -1,339 +0,0 @@
package packer
import (
"bufio"
"crypto/sha512"
"fmt"
"io/ioutil"
"net/http"
"os"
"os/exec"
"golang.org/x/sys/unix"
"github.com/foobaz/go-zopfli/zopfli"
"github.com/lwithers/htpack/packed"
"github.com/lwithers/pkg/writefile"
)
var BrotliPath string = "brotli"
type FilesToPack map[string]FileToPack
type FileToPack struct {
Filename string `yaml:"filename"`
ContentType string `yaml:"content_type"`
DisableCompression bool `yaml:"disable_compression"`
DisableGzip bool `yaml:"disable_gzip"`
DisableBrotli bool `yaml:"disable_brotli"`
uncompressed, gzip, brotli packInfo
}
type packInfo struct {
present bool
offset, len uint64
}
const (
// minCompressionSaving means we'll only use the compressed version of
// the file if it's at least this many bytes smaller than the original.
// Chosen somewhat arbitrarily; we have to add an HTTP header, and the
// decompression overhead is not zero.
minCompressionSaving = 128
// minCompressionFraction means we'll only use the compressed version of
// the file if it's at least (origSize>>minCompressionFraction) bytes
// smaller than the original. This is a guess at when the decompression
// overhead outweighs the time saved in transmission.
minCompressionFraction = 7 // i.e. files must be at least 1/128 smaller
)
// Pack a file.
func Pack(filesToPack FilesToPack, outputFilename string) error {
finalFname, outputFile, err := writefile.New(outputFilename)
if err != nil {
return err
}
defer writefile.Abort(outputFile)
packer := &packWriter{f: outputFile}
// write initial header (will rewrite offset/length when known)
hdr := &packed.Header{
Magic: packed.Magic,
Version: packed.VersionInitial,
DirectoryOffset: 1,
DirectoryLength: 1,
}
m, _ := hdr.Marshal()
packer.Write(m)
dir := packed.Directory{
Files: make(map[string]*packed.File),
}
for path, fileToPack := range filesToPack {
info, err := packOne(packer, fileToPack)
if err != nil {
return err
}
dir.Files[path] = &info
}
// write the directory
if m, err = dir.Marshal(); err != nil {
err = fmt.Errorf("marshaling directory object: %v", err)
return err
}
packer.Pad()
hdr.DirectoryOffset = packer.Pos()
hdr.DirectoryLength = uint64(len(m))
if _, err := packer.Write(m); err != nil {
return err
}
// write header at start of file
m, _ = hdr.Marshal()
if _, err = outputFile.WriteAt(m, 0); err != nil {
return err
}
// all done!
return writefile.Commit(finalFname, outputFile)
}
func packOne(packer *packWriter, fileToPack FileToPack) (info packed.File, err error) {
// implementation detail: write files at a page boundary
if err = packer.Pad(); err != nil {
return
}
// open and mmap input file
f, err := os.Open(fileToPack.Filename)
if err != nil {
return
}
defer f.Close()
fi, err := f.Stat()
if err != nil {
return
}
data, err := unix.Mmap(int(f.Fd()), 0, int(fi.Size()),
unix.PROT_READ, unix.MAP_SHARED)
if err != nil {
err = fmt.Errorf("mmap %s: %v", fileToPack.Filename, err)
return
}
defer unix.Munmap(data)
info.Etag = etag(data)
info.ContentType = fileToPack.ContentType
if info.ContentType == "" {
info.ContentType = http.DetectContentType(data)
}
// copy the uncompressed version
fileData := &packed.FileData{
Offset: packer.Pos(),
Length: uint64(len(data)),
}
if _, err = packer.CopyFrom(f, fi); err != nil {
return
}
info.Uncompressed = fileData
if fileToPack.DisableCompression {
return
}
// gzip compression
if !fileToPack.DisableGzip {
if err = packer.Pad(); err != nil {
return
}
fileData = &packed.FileData{
Offset: packer.Pos(),
}
fileData.Length, err = packOneGzip(packer, data,
info.Uncompressed.Length)
if err != nil {
return
}
if fileData.Length > 0 {
info.Gzip = fileData
}
}
// brotli compression
if BrotliPath != "" && !fileToPack.DisableBrotli {
if err = packer.Pad(); err != nil {
return
}
fileData = &packed.FileData{
Offset: packer.Pos(),
}
fileData.Length, err = packOneBrotli(packer,
fileToPack.Filename, info.Uncompressed.Length)
if err != nil {
return
}
if fileData.Length > 0 {
info.Brotli = fileData
}
}
return
}
func etag(in []byte) string {
h := sha512.New384()
h.Write(in)
return fmt.Sprintf(`"1--%x"`, h.Sum(nil))
}
func packOneGzip(packer *packWriter, data []byte, uncompressedSize uint64,
) (uint64, error) {
// write via temporary file
tmpfile, err := ioutil.TempFile("", "")
if err != nil {
return 0, err
}
defer os.Remove(tmpfile.Name())
defer tmpfile.Close()
// compress
opts := zopfli.DefaultOptions()
if len(data) > (10 << 20) { // 10MiB
opts.NumIterations = 5
}
buf := bufio.NewWriter(tmpfile)
if err = zopfli.GzipCompress(&opts, data, buf); err != nil {
return 0, err
}
if err = buf.Flush(); err != nil {
return 0, err
}
// copy into packfile
return packer.CopyIfSaving(tmpfile, uncompressedSize)
}
func packOneBrotli(packer *packWriter, filename string, uncompressedSize uint64,
) (uint64, error) {
// write via temporary file
tmpfile, err := ioutil.TempFile("", "")
if err != nil {
return 0, err
}
defer os.Remove(tmpfile.Name())
defer tmpfile.Close()
// compress via commandline
cmd := exec.Command(BrotliPath, "--input", filename,
"--output", tmpfile.Name())
out, err := cmd.CombinedOutput()
if err != nil {
err = fmt.Errorf("brotli: %v (process reported: %s)", err, out)
return 0, err
}
// copy into packfile
return packer.CopyIfSaving(tmpfile, uncompressedSize)
}
type packWriter struct {
f *os.File
err error
}
func (pw *packWriter) Write(buf []byte) (int, error) {
if pw.err != nil {
return 0, pw.err
}
n, err := pw.f.Write(buf)
pw.err = err
return n, err
}
func (pw *packWriter) Pos() uint64 {
pos, err := pw.f.Seek(0, os.SEEK_CUR)
if err != nil {
pw.err = err
}
return uint64(pos)
}
func (pw *packWriter) Pad() error {
if pw.err != nil {
return pw.err
}
pos, err := pw.f.Seek(0, os.SEEK_CUR)
if err != nil {
pw.err = err
return pw.err
}
pos &= 0xFFF
if pos == 0 {
return pw.err
}
if _, err = pw.f.Seek(4096-pos, os.SEEK_CUR); err != nil {
pw.err = err
}
return pw.err
}
func (pw *packWriter) CopyIfSaving(in *os.File, uncompressedSize uint64) (uint64, error) {
if pw.err != nil {
return 0, pw.err
}
fi, err := in.Stat()
if err != nil {
pw.err = err
return 0, pw.err
}
sz := uint64(fi.Size())
if sz+minCompressionSaving > uncompressedSize {
return 0, nil
}
if sz+(uncompressedSize>>minCompressionFraction) > uncompressedSize {
return 0, nil
}
return pw.CopyFrom(in, fi)
}
func (pw *packWriter) CopyFrom(in *os.File, fi os.FileInfo) (uint64, error) {
if pw.err != nil {
return 0, pw.err
}
var off int64
remain := fi.Size()
for remain > 0 {
var amt int
if remain > (1 << 30) {
amt = (1 << 30)
} else {
amt = int(remain)
}
amt, err := unix.Sendfile(int(pw.f.Fd()), int(in.Fd()), &off, amt)
remain -= int64(amt)
if err != nil {
pw.err = fmt.Errorf("sendfile (copying data to "+
"htpack): %v", err)
return uint64(off), pw.err
}
}
return uint64(off), nil
}

View File

@ -1,172 +0,0 @@
package main
import (
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"strings"
"github.com/lwithers/htpack/cmd/htpacker/packer"
"github.com/spf13/cobra"
yaml "gopkg.in/yaml.v2"
)
var yamlCmd = &cobra.Command{
Use: "yaml",
Short: "Build YAML spec from list of files/dirs",
Long: `Generates a YAML specification from a list of files and directories.
The specification is suitable for passing to pack.
File names will be mapped as follows:
if you specify a file, it will appear be served as "/filename";
if you specify a directory, its contents will be merged into "/", such that a
directory with contents "a", "b", and "c/d" will cause entries "/a", "/b" and
"/c/d" to be served.
`,
RunE: func(c *cobra.Command, args []string) error {
if len(args) == 0 {
return errors.New("must specify one or more files/directories")
}
// convert "out" to absolute path, in case we need to chdir
out, err := c.Flags().GetString("out")
if err != nil {
return err
}
out, err = filepath.Abs(out)
if err != nil {
return err
}
// chdir if required
chdir, err := c.Flags().GetString("chdir")
if err != nil {
return err
}
if chdir != "" {
if err = os.Chdir(chdir); err != nil {
return err
}
}
if err := MakeYaml(args, out); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
return nil
},
}
func init() {
yamlCmd.Flags().StringP("out", "O", "",
"Output filename")
yamlCmd.MarkFlagRequired("out")
yamlCmd.Flags().StringP("chdir", "C", "",
"Change to directory before searching for input files")
}
func MakeYaml(args []string, out string) error {
ftp, err := filesFromList(args)
if err != nil {
return err
}
raw, err := yaml.Marshal(ftp)
if err != nil {
return fmt.Errorf("failed to marshal %T to YAML: %v", ftp, err)
}
return ioutil.WriteFile(out, raw, 0666)
}
func filesFromList(args []string) (packer.FilesToPack, error) {
ftp := make(packer.FilesToPack)
// NB: we don't use filepath.Walk since:
// (a) we don't care about lexical order; just do it quick
// (b) we want to dereference symlinks
for _, arg := range args {
if err := filesFromListR(arg, arg, ftp); err != nil {
return nil, err
}
}
return ftp, nil
}
func filesFromListR(prefix, arg string, ftp packer.FilesToPack) error {
f, err := os.Open(arg)
if err != nil {
return err
}
defer f.Close()
fi, err := f.Stat()
if err != nil {
return err
}
switch {
case fi.Mode().IsDir():
// readdir
fnames, err := f.Readdirnames(0) // 0 ⇒ everything
if err != nil {
return err
}
for _, fname := range fnames {
fullname := filepath.Join(arg, fname)
if err = filesFromListR(prefix, fullname, ftp); err != nil {
return err
}
}
return nil
case fi.Mode().IsRegular():
// sniff content type
buf := make([]byte, 512)
n, err := f.Read(buf)
if err != nil {
return err
}
buf = buf[:n]
ctype := http.DetectContentType(buf)
// augmented rules for JS / CSS / etc.
switch {
case strings.HasPrefix(ctype, "text/plain"):
switch filepath.Ext(arg) {
case ".css":
ctype = "text/css"
case ".js":
ctype = "application/javascript"
case ".json":
ctype = "application/json"
}
case strings.HasPrefix(ctype, "text/xml"):
switch filepath.Ext(arg) {
case ".svg":
ctype = "image/svg+xml"
}
}
// pack
srvName := strings.TrimPrefix(arg, prefix)
if srvName == "" {
srvName = filepath.Base(arg)
}
if !strings.HasPrefix(srvName, "/") {
srvName = "/" + srvName
}
ftp[srvName] = packer.FileToPack{
Filename: arg,
ContentType: ctype,
}
return nil
default:
return fmt.Errorf("%s: not file/dir (mode %x)", arg, fi.Mode())
}
}