Skip to content
This repository was archived by the owner on Nov 17, 2021. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 30 additions & 13 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
goflag "flag"
"fmt"
"io"
"io/ioutil"
"net/url"
"os"
"path/filepath"
Expand Down Expand Up @@ -248,21 +247,29 @@ func JsonnetVM(cmd *cobra.Command) (*jsonnet.VM, error) {
log.Debugln("Jsonnet search path:", u)
}

cwd, err := os.Getwd()
if err != nil {
return nil, fmt.Errorf("Unable to determine current working directory: %v", err)
}

vm.Importer(utils.MakeUniversalImporter(searchUrls))

for _, spec := range []struct {
flagName string
inject func(string, string)
isCode bool
fromFile bool
}{
{flagExtVar, vm.ExtVar, false},
{flagExtVarFile, vm.ExtVar, true},
{flagExtCode, vm.ExtCode, false},
{flagExtCodeFile, vm.ExtCode, true},
{flagTLAVar, vm.TLAVar, false},
{flagTLAVarFile, vm.TLAVar, true},
{flagTLACode, vm.TLACode, false},
{flagTLACodeFile, vm.TLACode, true},
{flagExtVar, vm.ExtVar, false, false},
// Treat as code to evaluate "importstr":
{flagExtVarFile, vm.ExtCode, false, true},
{flagExtCode, vm.ExtCode, true, false},
{flagExtCodeFile, vm.ExtCode, true, true},
{flagTLAVar, vm.TLAVar, false, false},
// Treat as code to evaluate "importstr":
{flagTLAVarFile, vm.TLACode, false, true},
{flagTLACode, vm.TLACode, true, false},
{flagTLACodeFile, vm.TLACode, true, true},
} {
entries, err := flags.GetStringSlice(spec.flagName)
if err != nil {
Expand All @@ -274,11 +281,21 @@ func JsonnetVM(cmd *cobra.Command) (*jsonnet.VM, error) {
if len(kv) != 2 {
return nil, fmt.Errorf("Failed to parse %s: missing '=' in %s", spec.flagName, entry)
}
v, err := ioutil.ReadFile(kv[1])
if err != nil {
return nil, err
// Ensure that the import path we construct here is absolute, so that our Importer
// won't try to glean from an extVar or TLA reference the context necessary to
// resolve a relative path.
path := kv[1]
if !filepath.IsAbs(path) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why not just use filepath.Abs?

func Abs(path string) (string, error)
    Abs returns an absolute representation of path. If the path is not absolute
    it will be joined with the current working directory to turn it into an
    absolute path. The absolute path name for a given file is not guaranteed to
    be unique. Abs calls Clean on the result.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I plead ignorance. I had never noticed that function before.

We can use it here, removing a few lines, though in the future I expect that we'll want to grab the current working directory here once—as we do up at line 250—and use that path repeatedly in the Importer once it's been converted into a "file"-scheme URL.

In other words, I could remove lines 250 through 254 today, but I'll probably be adding them back once the expected change to the Jsonnet Importer interface contract arrives.

If we use filepath.Abs here, it determines the current working directory on each call, and makes this operation fallible as a result—as opposed to us getting this possibility for failure out of the way at lines 251-253.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

By the way, the implementation of filepath.Abs calls on filepath.unixAbs, whose implementation looks familiar!

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah, it does call Clean though. anyway, we'll going to iterate on that in the next PR.

Thank! merged

path = filepath.Join(cwd, path)
}
u := &url.URL{Scheme: "file", Path: path}
var imp string
if spec.isCode {
imp = "import"
} else {
imp = "importstr"
}
spec.inject(kv[0], string(v))
spec.inject(kv[0], fmt.Sprintf("%s @'%s'", imp, strings.ReplaceAll(u.String(), "'", "''")))
} else {
switch len(kv) {
case 1:
Expand Down
68 changes: 66 additions & 2 deletions cmd/show_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,21 @@ import (
"reflect"
"testing"

"github.com/spf13/cobra"
"github.com/spf13/pflag"
"gopkg.in/yaml.v2"
)

func resetFlagsOf(cmd *cobra.Command) {
cmd.Flags().VisitAll(func(f *pflag.Flag) {
if sv, ok := f.Value.(pflag.SliceValue); ok {
sv.Replace(nil)
} else {
f.Value.Set(f.DefValue)
}
})
}

func cmdOutput(t *testing.T, args []string) string {
var buf bytes.Buffer
RootCmd.SetOutput(&buf)
Expand Down Expand Up @@ -72,7 +84,7 @@ func TestShow(t *testing.T) {
for format, parser := range formats {
expected, err := parser(expected)
if err != nil {
t.Errorf("error parsing *expected* value: %s", err)
t.Fatalf("error parsing *expected* value: %v", err)
}

os.Setenv("anVar", "aVal2")
Expand All @@ -86,13 +98,65 @@ func TestShow(t *testing.T) {
"-V", "anVar",
"--ext-str-file", "filevar=" + filepath.FromSlash("../testdata/extvar.file"),
})
defer resetFlagsOf(RootCmd)

t.Log("output is", output)
actual, err := parser(output)
if err != nil {
t.Errorf("error parsing output of format %s: %s", format, err)
t.Errorf("error parsing output of format %s: %v", format, err)
} else if !reflect.DeepEqual(expected, actual) {
t.Errorf("format %s expected != actual: %s != %s", format, expected, actual)
}
}
}

func TestShowUsingExtVarFiles(t *testing.T) {
expectedText := `
{
"apiVersion": "v1",
"kind": "ConfigMap",
"metadata": {
"name": "sink"
},
"data": {
"input": {
"greeting": "Hello!",
"helper": true,
"top": true
},
"var": "I'm a var!"
}
}
`
var expected interface{}
if err := json.Unmarshal([]byte(expectedText), &expected); err != nil {
t.Fatalf("error parsing *expected* value: %v", err)
}

cwd, err := os.Getwd()
if err != nil {
t.Fatalf("failed to get current working directory: %v", err)
}
if err := os.Chdir("../testdata/extvars/feed"); err != nil {
t.Fatalf("failed to change to target directory: %v", err)
}
defer os.Chdir(cwd)

output := cmdOutput(t, []string{"show",
"top.jsonnet",
"-o", "json",
"--tla-code-file", "input=input.jsonnet",
"--tla-code-file", "sink=sink.jsonnet",
"--ext-str-file", "filevar=var.txt",
})
defer resetFlagsOf(RootCmd)

t.Log("output is", output)
var actual interface{}
err = json.Unmarshal([]byte(output), &actual)
if err != nil {
t.Errorf("error parsing output: %v", err)
} else if !reflect.DeepEqual(expected, actual) {
t.Errorf("expected != actual: %s != %s", expected, actual)
}
}
4 changes: 3 additions & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ require (
github.com/sergi/go-diff v0.0.0-20170409071739-feef008d51ad
github.com/sirupsen/logrus v1.0.6
github.com/spf13/cobra v0.0.0-20180319062004-c439c4fa0937
github.com/spf13/pflag v0.0.0-20180220143236-ee5fd03fd6ac // indirect
github.com/spf13/pflag v1.0.5
github.com/stretchr/testify v1.3.0
go.opencensus.io v0.19.0 // indirect
golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b
Expand All @@ -44,3 +44,5 @@ require (
k8s.io/kubernetes v1.13.4
sigs.k8s.io/yaml v1.1.0 // indirect
)

go 1.13
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -168,8 +168,8 @@ github.com/sirupsen/logrus v1.0.6 h1:hcP1GmhGigz/O7h1WVUM5KklBp1JoNS9FggWKdj/j3s
github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
github.com/spf13/cobra v0.0.0-20180319062004-c439c4fa0937 h1:+ryWjMVzFAkEz5zT+Ms49aROZwxlJce3x3zLTFpkz3Y=
github.com/spf13/cobra v0.0.0-20180319062004-c439c4fa0937/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/pflag v0.0.0-20180220143236-ee5fd03fd6ac h1:+uzyQ0TQ3aKorQxsOjcDDgE7CuUXwpkKnK19LULQALQ=
github.com/spf13/pflag v0.0.0-20180220143236-ee5fd03fd6ac/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
Expand Down
5 changes: 5 additions & 0 deletions testdata/extvars/feed/input.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
local helper = import '../helper.jsonnet';

{
greeting: 'Hello!',
} + helper
13 changes: 13 additions & 0 deletions testdata/extvars/feed/sink.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
function(input)
assert input.top;
{
apiVersion: 'v1',
kind: 'ConfigMap',
metadata: {
name: 'sink',
},
data: {
input: input,
var: std.extVar('filevar'),
},
}
4 changes: 4 additions & 0 deletions testdata/extvars/feed/top.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
function(input, sink)
sink(input {
top: true,
})
1 change: 1 addition & 0 deletions testdata/extvars/feed/var.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
I'm a var!
3 changes: 3 additions & 0 deletions testdata/extvars/helper.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
helper: true,
}
40 changes: 21 additions & 19 deletions utils/importer.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"net/http"
"net/url"
"os"
"regexp"
"strings"
"time"

Expand All @@ -18,6 +19,8 @@ import (

var errNotFound = errors.New("Not found")

var extVarKindRE = regexp.MustCompile("^<(?:extvar|top-level-arg):.+>$")

//go:generate go-bindata -nometadata -ignore .*_test\.|~$DOLLAR -pkg $GOPACKAGE -o bindata.go -prefix ../ ../lib/...
func newInternalFS(prefix string) http.FileSystem {
// Asset/AssetDir returns `fmt.Errorf("Asset %s not found")`,
Expand All @@ -43,23 +46,23 @@ func newInternalFS(prefix string) http.FileSystem {
}

/*
MakeUniversalImporter creates an importer that handles resolving imports from the filesystem and http/s.
MakeUniversalImporter creates an importer that handles resolving imports from the filesystem and HTTP/S.

In addition to the standard importer, supports:
- URLs in import statements
- URLs in library search paths

A real-world example:
- you have https://raw.githubusercontent.com/ksonnet/ksonnet-lib/master in your search URLs
- you evaluate a local file which calls `import "ksonnet.beta.2/k.libsonnet"`
- if the `ksonnet.beta.2/k.libsonnet`` is not located in the current workdir, an attempt
- You have https://raw.githubusercontent.com/ksonnet/ksonnet-lib/master in your search URLs.
- You evaluate a local file which calls `import "ksonnet.beta.2/k.libsonnet"`.
- If the `ksonnet.beta.2/k.libsonnet`` is not located in the current working directory, an attempt
will be made to follow the search path, i.e. to download
https://raw.githubusercontent.com/ksonnet/ksonnet-lib/master/ksonnet.beta.2/k.libsonnet
- since the downloaded `k.libsonnet`` file turn in contains `import "k8s.libsonnet"`, the import
https://raw.githubusercontent.com/ksonnet/ksonnet-lib/master/ksonnet.beta.2/k.libsonnet.
- Since the downloaded `k.libsonnet`` file turn in contains `import "k8s.libsonnet"`, the import
will be resolved as https://raw.githubusercontent.com/ksonnet/ksonnet-lib/master/ksonnet.beta.2/k8s.libsonnet
and downloaded from that location
and downloaded from that location.
*/
func MakeUniversalImporter(searchUrls []*url.URL) jsonnet.Importer {
func MakeUniversalImporter(searchURLs []*url.URL) jsonnet.Importer {
// Reconstructed copy of http.DefaultTransport (to avoid
// modifying the default)
t := &http.Transport{
Expand All @@ -79,7 +82,7 @@ func MakeUniversalImporter(searchUrls []*url.URL) jsonnet.Importer {
t.RegisterProtocol("internal", http.NewFileTransport(newInternalFS("lib")))

return &universalImporter{
BaseSearchURLs: searchUrls,
BaseSearchURLs: searchURLs,
HTTPClient: &http.Client{Transport: t},
cache: map[string]jsonnet.Contents{},
}
Expand All @@ -91,12 +94,12 @@ type universalImporter struct {
cache map[string]jsonnet.Contents
}

func (importer *universalImporter) Import(dir, importedPath string) (jsonnet.Contents, string, error) {
log.Debugf("Importing %q from %q", importedPath, dir)
func (importer *universalImporter) Import(importedFrom, importedPath string) (jsonnet.Contents, string, error) {
log.Debugf("Importing %q from %q", importedPath, importedFrom)

candidateURLs, err := importer.expandImportToCandidateURLs(dir, importedPath)
candidateURLs, err := importer.expandImportToCandidateURLs(importedFrom, importedPath)
if err != nil {
return jsonnet.Contents{}, "", fmt.Errorf("Could not get candidate URLs for when importing %s (import dir is %s)", importedPath, dir)
return jsonnet.Contents{}, "", fmt.Errorf("Could not get candidate URLs for when importing %s (imported from %s): %v", importedPath, importedFrom, err)
}

var tried []string
Expand Down Expand Up @@ -142,7 +145,7 @@ func (importer *universalImporter) tryImport(url string) (jsonnet.Contents, erro
return jsonnet.MakeContents(string(bodyBytes)), nil
}

func (importer *universalImporter) expandImportToCandidateURLs(dir, importedPath string) ([]*url.URL, error) {
func (importer *universalImporter) expandImportToCandidateURLs(importedFrom, importedPath string) ([]*url.URL, error) {
importedPathURL, err := url.Parse(importedPath)
if err != nil {
return nil, fmt.Errorf("Import path %q is not valid", importedPath)
Expand All @@ -151,14 +154,13 @@ func (importer *universalImporter) expandImportToCandidateURLs(dir, importedPath
return []*url.URL{importedPathURL}, nil
}

importDirURL, err := url.Parse(dir)
importDirURL, err := url.Parse(importedFrom)
if err != nil {
return nil, fmt.Errorf("Invalid import dir %q", dir)
return nil, fmt.Errorf("Invalid import dir %q: %v", importedFrom, err)
}

candidateURLs := make([]*url.URL, 0, len(importer.BaseSearchURLs)+1)

candidateURLs = append(candidateURLs, importDirURL.ResolveReference(importedPathURL))
candidateURLs := make([]*url.URL, 1, len(importer.BaseSearchURLs)+1)
candidateURLs[0] = importDirURL.ResolveReference(importedPathURL)

for _, u := range importer.BaseSearchURLs {
candidateURLs = append(candidateURLs, u.ResolveReference(importedPathURL))
Expand Down
Loading