From a70c21e827bd930680b5a91b37b038d0070db83f Mon Sep 17 00:00:00 2001 From: traut Date: Wed, 20 Dec 2023 15:06:24 +0100 Subject: [PATCH] Initial code commit --- .gitignore | 1 + build_and_run.sh | 8 + cmd/plugins/main.go | 25 +++ decodeHcl.go | 152 ++++++++++++++++ evaluateBlocks.go | 291 ++++++++++++++++++++++++++++++ go.mod | 36 ++++ go.sum | 85 +++++++++ main.go | 79 ++++++++ pkg/diagnostics/diagnostics.go | 51 ++++++ pkg/jsontools/jsontools.go | 68 +++++++ pkg/parexec/parexec.go | 204 +++++++++++++++++++++ plugins.go | 136 ++++++++++++++ plugins/cfg.go | 21 +++ plugins/content/content_plugin.go | 107 +++++++++++ plugins/content/table/impl.go | 60 ++++++ plugins/content/text/impl.go | 50 +++++ plugins/data/data_plugin.go | 72 ++++++++ plugins/data/plugin_a/impl.go | 25 +++ plugins/data/plugin_b/impl.go | 21 +++ read_hcl.go | 86 +++++++++ schema.go | 96 ++++++++++ schema_impl.go | 191 ++++++++++++++++++++ schema_traversal.go | 285 +++++++++++++++++++++++++++++ string_utils.go | 68 +++++++ templates/test.hcl | 40 ++++ util.go | 25 +++ 26 files changed, 2283 insertions(+) create mode 100755 build_and_run.sh create mode 100644 cmd/plugins/main.go create mode 100644 decodeHcl.go create mode 100644 evaluateBlocks.go create mode 100644 go.mod create mode 100644 go.sum create mode 100644 main.go create mode 100644 pkg/diagnostics/diagnostics.go create mode 100644 pkg/jsontools/jsontools.go create mode 100644 pkg/parexec/parexec.go create mode 100644 plugins.go create mode 100644 plugins/cfg.go create mode 100644 plugins/content/content_plugin.go create mode 100644 plugins/content/table/impl.go create mode 100644 plugins/content/text/impl.go create mode 100644 plugins/data/data_plugin.go create mode 100644 plugins/data/plugin_a/impl.go create mode 100644 plugins/data/plugin_b/impl.go create mode 100644 read_hcl.go create mode 100644 schema.go create mode 100644 schema_impl.go create mode 100644 schema_traversal.go create mode 100644 string_utils.go create mode 100644 templates/test.hcl create mode 100644 util.go diff --git a/.gitignore b/.gitignore index 3b735ec4..caebbcfc 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ # https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore # # Binaries for programs and plugins +bin/* *.exe *.exe~ *.dll diff --git a/build_and_run.sh b/build_and_run.sh new file mode 100755 index 00000000..0e05cae7 --- /dev/null +++ b/build_and_run.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +cd "$( dirname "${BASH_SOURCE[0]}" )" +rm ./bin/* >/dev/null 2>&1 || true +go build -o ./bin/plugins ./cmd/plugins +go build -o ./bin/ . +./bin/weave-cli -path ./templates/ -plugins ./bin/plugins -document "test-document" diff --git a/cmd/plugins/main.go b/cmd/plugins/main.go new file mode 100644 index 00000000..1dfdfbd3 --- /dev/null +++ b/cmd/plugins/main.go @@ -0,0 +1,25 @@ +package main + +import ( + "weave-cli/plugins" + "weave-cli/plugins/content" + "weave-cli/plugins/content/table" + "weave-cli/plugins/content/text" + "weave-cli/plugins/data" + "weave-cli/plugins/data/plugin_a" + "weave-cli/plugins/data/plugin_b" + + "github.com/hashicorp/go-plugin" +) + +func main() { + plugin.Serve(&plugin.ServeConfig{ + HandshakeConfig: plugins.Handshake, + Plugins: plugin.PluginSet{ + "data.plugin_a": &data.GoPlugin{Impl: &plugin_a.Impl{}}, + "data.plugin_b": &data.GoPlugin{Impl: &plugin_b.Impl{}}, + "content.table": &content.GoPlugin{Impl: &table.Impl{}}, + "content.text": &content.GoPlugin{Impl: &text.Impl{}}, + }, + }) +} diff --git a/decodeHcl.go b/decodeHcl.go new file mode 100644 index 00000000..40662260 --- /dev/null +++ b/decodeHcl.go @@ -0,0 +1,152 @@ +package main + +import ( + "fmt" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" + "github.com/hashicorp/hcl/v2/hclsyntax" +) + +func (d *Decoder) Decode() (diag hcl.Diagnostics) { + for i := range d.root.ContentBlocks { + diag = diag.Extend(d.DecodeBlock(&d.root.ContentBlocks[i])) + } + for i := range d.root.DataBlocks { + diag = diag.Extend(d.DecodeBlock(&d.root.DataBlocks[i])) + } + for i := range d.root.Documents { + diag = diag.Extend(d.DecodeDocumnet(&d.root.Documents[i])) + } + return +} + +func (d *Decoder) DecodeDocumnet(doc *Document) (diag hcl.Diagnostics) { + for i := range doc.ContentBlocks { + diag = diag.Extend(d.DecodeBlock(&doc.ContentBlocks[i])) + } + for i := range doc.DataBlocks { + diag = diag.Extend(d.DecodeBlock(&doc.DataBlocks[i])) + } + return +} + +func (d *Decoder) DecodeBlock(block Block) (diag hcl.Diagnostics) { + extra := block.NewBlockExtra() + + diag = gohcl.DecodeBody(block.GetUnparsed(), nil, extra) + + if diag.HasErrors() { + return + } + + // deferring errors in attrs, they do not prevent us from parsing + deferredDiags := block.DecodeNestedBlocks(d, extra) + defer func() { + diag = deferredDiags.Extend(diag) + }() + + leftover := extra.GetUnparsed() + attrs, attrDiags := leftover.JustAttributes() + if attrDiags.HasErrors() { + // TODO: messy hcl bug workaround, in some cases might silently ignore user's error + attrDiags = nil + body := leftover.(*hclsyntax.Body) + for _, b := range body.Blocks { + switch b.Type { + case "meta", "content": + continue + default: + attrDiags = attrDiags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: fmt.Sprintf("Unexpected %q block", b.Type), + Detail: "Blocks are not allowed here.", + Subject: &b.TypeRange, + }) + } + } + } + + deferredDiags = deferredDiags.Extend(attrDiags) + *block.GetAttrs() = attrs + + trav, refDiag := traversalForExpr(extra.GetRef()) + if *block.GetType() != "ref" { + if len(trav) != 0 || len(refDiag) != 0 { + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Non-empty ref attribute", + Detail: fmt.Sprintf( + "Non-empty ref attribute found in block of type '%s'. It will be ignored. Block must have type 'ref' in order to use references", + *block.GetType(), + ), + Subject: extra.GetRef().Range().Ptr(), + Expression: extra.GetRef(), + }) + } + // validate block type + plugins := d.plugins.ByKind(block.GetBlockKind()) + if _, found := plugins.plugins[*block.GetType()]; !found { + return diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: fmt.Sprintf("Unknown %s block type", block.GetBlockKind()), + Detail: fmt.Sprintf( + "Unknown content block type '%s', valid block types: %s. Referencing or evaluating this block would cause an error", + *block.GetType(), + plugins.Names(), + ), + // TODO: storing type as string doensn't allow good error here. Switch to Expression? + Subject: block.GetUnparsed().MissingItemRange().Ptr(), + }) + } + *block.GetDecoded() = true + return + } + // handling ref + diag = diag.Extend(refDiag) + if diag.HasErrors() { + return + } + if len(trav) == 0 { + missingRef := &hcl.Diagnostic{ + Severity: hcl.DiagError, + } + if extra.GetRef().Range().Empty() { + missingRef.Summary = "Missing ref" + missingRef.Detail = fmt.Sprintf("Block '%s %s' is of type 'ref', but the ref field is missing", block.GetBlockKind(), block.GetName()) + missingRef.Subject = block.GetUnparsed().MissingItemRange().Ptr() + } else { + missingRef.Summary = "Empty ref" + missingRef.Detail = fmt.Sprintf("Block '%s %s' is of type 'ref', but the ref field is empty", block.GetBlockKind(), block.GetName()) + missingRef.Subject = extra.GetRef().Range().Ptr() + missingRef.Expression = extra.GetRef() + } + return diag.Append(missingRef) + } + + refTgt, travDiag := d.Traverse(trav) + // annotate traverse diags + for _, d := range travDiag { + if d.Subject == nil { + d.Subject = extra.GetRef().Range().Ptr() + } else if d.Context == nil { + d.Context = extra.GetRef().Range().Ptr() + } + if d.Expression == nil { + d.Expression = extra.GetRef() + } + } + diag = diag.Extend(travDiag) + if diag.HasErrors() { + return + } + diag = diag.Extend( + block.UpdateFromRef(refTgt, extra.GetRef()), + ) + if diag.HasErrors() { + return + } + + *block.GetDecoded() = true + return +} diff --git a/evaluateBlocks.go b/evaluateBlocks.go new file mode 100644 index 00000000..681239d4 --- /dev/null +++ b/evaluateBlocks.go @@ -0,0 +1,291 @@ +package main + +import ( + "fmt" + "slices" + "strings" + "weave-cli/pkg/diagnostics" + "weave-cli/pkg/jsontools" + "weave-cli/pkg/parexec" + "weave-cli/plugins/content" + "weave-cli/plugins/data" + + "github.com/hashicorp/hcl/v2" + "github.com/itchyny/gojq" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/json" + "golang.org/x/exp/maps" +) + +// data block evaluation + +type dataBlocksEvaluator struct { + dataPlugins map[string]any +} + +type dataEvalResult struct { + diagnostics.Diagnostics + Type string + Name string + Res any +} + +func (eb *dataBlocksEvaluator) evalBlock(db *DataBlock) (res dataEvalResult) { + if !db.Decoded { + res.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Undecoded block", + Detail: fmt.Sprintf(`%s block '%s %s "%s"' wasn't decoded`, BK_DATA, BK_DATA, db.Type, db.Name), + }) + return + } + rawPlugin, found := eb.dataPlugins[db.Type] + if !found { + res.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Plugin not found", + Detail: fmt.Sprintf("plugin %s.%s not found", BK_DATA, db.Type), + }) + return + } + + attrs, diags := AttrsToJSON(db.Attrs) + if res.ExtendHcl(diags) { + return + } + + var err error + res.Res, err = rawPlugin.(data.Plugin).Execute(attrs) + if res.FromErr(err, "Data plugin error") { + return + } + + res.Type = db.Type + res.Name = db.Name + return +} + +func EvaluateDataBlocks(dataPlugins map[string]any, dataBlocks []DataBlock) (dict map[string]any, diags diagnostics.Diagnostics) { + ev := dataBlocksEvaluator{ + dataPlugins: dataPlugins, + } + // access through pe lock + dataDict := map[string]any{} + pe := parexec.New( + parexec.NewLimiter(5), + func(res dataEvalResult, _ int) (cmd parexec.Command) { + if diags.Extend(res.Diagnostics) { + return parexec.STOP + } + var err error + dataDict, err = jsontools.MapSet(dataDict, []string{res.Type, res.Name}, res.Res) + diags.FromErr(err, "Data dict set key error") + return + }, + ) + parexec.MapRef(pe, dataBlocks, ev.evalBlock) + pe.WaitDoneAndLock() + if diags.HasErrors() { + return + } + dict = map[string]any{ + BK_DATA: dataDict, + } + return +} + +// content block queries +type queryEvaluator struct { + pe parexec.Executor[diagnostics.Diagnostics] + dict map[string]any + goEvaluateQuery func(*ContentBlock) +} + +func EvaluateQueries(dict map[string]any, cbs []ContentBlock) (diags diagnostics.Diagnostics) { + ev := queryEvaluator{ + pe: *parexec.New( + parexec.NewLimiter(5), + func(res diagnostics.Diagnostics, idx int) (cmd parexec.Command) { + if diags.Extend(res) { + return parexec.STOP + } + return + }, + ), + dict: dict, + } + ev.goEvaluateQuery = parexec.GoWithArg(&ev.pe, ev.evaluateQuery) + ev.evaluateQueries(cbs) + ev.pe.WaitDoneAndLock() + return +} + +func (ev *queryEvaluator) evaluateQueries(cbs []ContentBlock) { + for i := range cbs { + cb := &cbs[i] + if cb.Query != nil { + ev.goEvaluateQuery(cb) + } else { + // no query -> no modifications -> no need to clone the dict + cb.localDict = ev.dict + } + ev.evaluateQueries(cb.NestedContentBlocks) + } +} + +func (ev *queryEvaluator) evaluateQuery(cb *ContentBlock) (diags diagnostics.Diagnostics) { + query, err := gojq.Parse(*cb.Query) + if err != nil { + diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Can't parse jq query", + Detail: fmt.Sprintf("Error: %s Query: %s", err, *cb.Query), + }) + } + + iter := query.Run(ev.dict) + + qRes, ok := iter.Next() + if !ok { + diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Jq query returned nothing", + Detail: fmt.Sprintf("Query: %s", *cb.Query), + }) + return + } + // decouple from the ev.dict + cb.localDict = maps.Clone(ev.dict) + cb.localDict["query_result"] = qRes + return +} + +// content block queries +type contentBlocksEvaluator struct { + pe parexec.Executor[contentEvalResult] + contentPlugins map[string]any + goEvaluateContentBlock func(*ContentBlock) +} + +type contentEvalResult struct { + diagnostics.Diagnostics + res string +} + +func EvaluateContentBlocks(contentPlugins map[string]any, cbs []ContentBlock) (output string, diags diagnostics.Diagnostics) { + var orderedResult []string + ev := contentBlocksEvaluator{ + pe: *parexec.New( + parexec.NewLimiter(5), + func(res contentEvalResult, idx int) (cmd parexec.Command) { + if diags.Extend(res.Diagnostics) { + return parexec.STOP + } + orderedResult = parexec.SetAt(orderedResult, idx, res.res) + return + }, + ), + contentPlugins: contentPlugins, + } + ev.goEvaluateContentBlock = parexec.GoWithArg(&ev.pe, ev.evaluateContentBlock) + ev.evaluateContentBlocks(cbs) + ev.pe.WaitDoneAndLock() + if diags.HasErrors() { + return + } + output = strings.Join(orderedResult, "\n") + return +} + +func (ev *contentBlocksEvaluator) evaluateContentBlock(cb *ContentBlock) (res contentEvalResult) { + if !cb.Decoded { + res.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Undecoded block", + Detail: fmt.Sprintf(`%s block '%s %s "%s"' wasn't decoded`, BK_DATA, BK_DATA, cb.Type, cb.Name), + }) + return + } + rawPlugin, found := ev.contentPlugins[cb.Type] + if !found { + res.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Plugin not found", + Detail: fmt.Sprintf("plugin %s.%s not found", BK_DATA, cb.Type), + }) + return + } + attrs, diags := AttrsToJSON(cb.Attrs) + if res.ExtendHcl(diags) { + return + } + + pluginRes, err := rawPlugin.(content.Plugin).Execute(attrs, cb.localDict) + if res.FromErr(err, "Content plugin error") { + return + } + res.res = pluginRes + return +} + +func (ev *contentBlocksEvaluator) evaluateContentBlocks(cbs []ContentBlock) { + for i := range cbs { + cb := &cbs[i] + if cb.Type != "generic" { + ev.goEvaluateContentBlock(cb) + } + ev.evaluateContentBlocks(cb.NestedContentBlocks) + } +} + +func (d *Decoder) FindDoc(name string) (doc *Document, diags diagnostics.Diagnostics) { + n := slices.IndexFunc(d.root.Documents, func(d Document) bool { + return d.Name == name + }) + if n == -1 { + diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Documnent not found", + Detail: fmt.Sprintf("Can't find a document named '%s'", name), + }) + return + } + return &d.root.Documents[n], nil +} + +func (d *Decoder) Evaluate(name string) (output string, diags diagnostics.Diagnostics) { + doc, diag := d.FindDoc(name) + if diags.Extend(diag) { + return + } + + dict, diag := EvaluateDataBlocks(d.plugins.data.plugins, doc.DataBlocks) + if diags.Extend(diag) { + return + } + diag = EvaluateQueries(dict, doc.ContentBlocks) + if diags.Extend(diag) { + return + } + + output, diag = EvaluateContentBlocks(d.plugins.content.plugins, doc.ContentBlocks) + diags.Extend(diag) + return +} + +func AttrsToJSON(attrs hcl.Attributes) (res json.SimpleJSONValue, diag hcl.Diagnostics) { + attrsMap := make(map[string]cty.Value, len(attrs)) + for key, attr := range attrs { + val, dgs := attr.Expr.Value(nil) + if len(dgs) > 0 { + for _, di := range dgs { + di.Severity = hcl.DiagWarning + di.Detail = fmt.Sprintf("Evaluation failed for value at key '%s': %s", key, di.Detail) + } + diag = diag.Extend(dgs) + continue + } + attrsMap[key] = val + } + return json.SimpleJSONValue{Value: cty.ObjectVal(attrsMap)}, nil +} diff --git a/go.mod b/go.mod new file mode 100644 index 00000000..0d09b262 --- /dev/null +++ b/go.mod @@ -0,0 +1,36 @@ +module weave-cli + +go 1.21.5 + +require ( + github.com/hashicorp/go-hclog v0.14.1 + github.com/hashicorp/go-plugin v1.6.0 + github.com/hashicorp/hcl/v2 v2.19.1 + github.com/itchyny/gojq v0.12.14 + github.com/zclconf/go-cty v1.13.0 + golang.org/x/exp v0.0.0-20231214170342-aacd6d4b4611 + golang.org/x/term v0.15.0 +) + +require ( + github.com/agext/levenshtein v1.2.1 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect + github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/fatih/color v1.7.0 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/hashicorp/yamux v0.1.1 // indirect + github.com/itchyny/timefmt-go v0.1.5 // indirect + github.com/mattn/go-colorable v0.1.4 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77 // indirect + github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/oklog/run v1.0.0 // indirect + github.com/stretchr/testify v1.8.4 // indirect + golang.org/x/net v0.17.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/text v0.13.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d // indirect + google.golang.org/grpc v1.59.0 // indirect + google.golang.org/protobuf v1.31.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000..67cef455 --- /dev/null +++ b/go.sum @@ -0,0 +1,85 @@ +github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= +github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= +github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= +github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/hashicorp/go-hclog v0.14.1 h1:nQcJDQwIAGnmoUWp8ubocEX40cCml/17YkF6csQLReU= +github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-plugin v1.6.0 h1:wgd4KxHJTVGGqWBq4QPB1i5BZNEx9BR8+OFmHDmTk8A= +github.com/hashicorp/go-plugin v1.6.0/go.mod h1:lBS5MtSSBZk0SHc66KACcjjlU6WzEVP/8pwz68aMkCI= +github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI= +github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE= +github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE= +github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= +github.com/itchyny/gojq v0.12.14 h1:6k8vVtsrhQSYgSGg827AD+PVVaB1NLXEdX+dda2oZCc= +github.com/itchyny/gojq v0.12.14/go.mod h1:y1G7oO7XkcR1LPZO59KyoCRy08T3j9vDYRV0GgYSS+s= +github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE= +github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8= +github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= +github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= +github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77 h1:7GoSOOW2jpsfkntVKaS2rAr1TJqfcxotyaUcuxoZSzg= +github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/zclconf/go-cty v1.13.0 h1:It5dfKTTZHe9aeppbNOda3mN7Ag7sg6QkBNm6TkyFa0= +github.com/zclconf/go-cty v1.13.0/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= +golang.org/x/exp v0.0.0-20231214170342-aacd6d4b4611 h1:qCEDpW1G+vcj3Y7Fy52pEM1AWm3abj8WimGYejI3SC4= +golang.org/x/exp v0.0.0-20231214170342-aacd6d4b4611/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d h1:uvYuEyMHKNt+lT4K3bN6fGswmK8qSvcreM3BwjDh+y4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= +google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= +google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/main.go b/main.go new file mode 100644 index 00000000..10be0afe --- /dev/null +++ b/main.go @@ -0,0 +1,79 @@ +package main + +import ( + "flag" + "fmt" + + "os" + + "github.com/hashicorp/hcl/v2/gohcl" +) + +var path, pluginPath, docName string + +type Decoder struct { + root *Templates + plugins *Plugins +} + +func argParse() error { + flag.StringVar(&path, "path", "", "a path to a directory with *.hcl files") + flag.StringVar(&pluginPath, "plugins", "", "a path to a __plugin file__") + flag.StringVar(&docName, "document", "", "the name of the document to process") + flag.Parse() + if path == "" { + return fmt.Errorf("path required") + } + if docName == "" { + return fmt.Errorf("document name required") + } + if pluginPath == "" { + return fmt.Errorf("plugins required") + } + return nil +} + +func run() error { + err := argParse() + if err != nil { + return err + } + body, fileMap, diags := fromDisk() + defer func() { PrintDiags(diags, fileMap) }() + if diags.HasErrors() { + return diags + } + + plugins, pluginDiag := NewPlugins(pluginPath) + + if diags.Extend(pluginDiag) { + return diags + } + + defer plugins.Kill() + d := Decoder{ + root: &Templates{}, + plugins: plugins, + } + if diags.ExtendHcl(gohcl.DecodeBody(body, nil, d.root)) { + return diags + } + + if diags.ExtendHcl(d.Decode()) { + return diags + } + + output, diag := d.Evaluate(docName) + if diag.HasErrors() { + return diags + } + fmt.Println(output) + return nil +} + +func main() { + if err := run(); err != nil { + fmt.Fprintf(os.Stderr, "error: %s\n", err) + os.Exit(1) + } +} diff --git a/pkg/diagnostics/diagnostics.go b/pkg/diagnostics/diagnostics.go new file mode 100644 index 00000000..17fc0b64 --- /dev/null +++ b/pkg/diagnostics/diagnostics.go @@ -0,0 +1,51 @@ +package diagnostics + +import "github.com/hashicorp/hcl/v2" + +type Diagnostics hcl.Diagnostics + +func (d Diagnostics) Error() string { + return (hcl.Diagnostics)(d).Error() +} + +// Appends diag to diagnostics, returns true if the just-appended diagnostic is an error +func (d *Diagnostics) Append(diag *hcl.Diagnostic) (addedErrors bool) { + *d = append(*d, diag) + return diag.Severity == hcl.DiagError +} + +// Appends all diags to diagnostics, returns true if the just-appended diagnostics contain an error +func (d *Diagnostics) Extend(diags Diagnostics) (addedErrors bool) { + *d = append(*d, diags...) + return diags.HasErrors() +} + +func (d *Diagnostics) ExtendHcl(diags hcl.Diagnostics) (addedErrors bool) { + *d = append(*d, diags...) + return diags.HasErrors() +} + +// HasErrors returns true if the receiver contains any diagnostics of +// severity DiagError. +func (d *Diagnostics) HasErrors() bool { + return (*hcl.Diagnostics)(d).HasErrors() +} + +// Create diagnostic and append it if err !=nil +func (d *Diagnostics) FromErr(err error, summary string) (addedErrors bool) { + if err == nil { + return false + } + // for FromErr to be inlined more often + *d = append(*d, FromErr(err, summary)) + return true +} + +func FromErr(err error, summary string) *hcl.Diagnostic { + return &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: summary, + Detail: err.Error(), + Extra: err, + } +} diff --git a/pkg/jsontools/jsontools.go b/pkg/jsontools/jsontools.go new file mode 100644 index 00000000..a6630b48 --- /dev/null +++ b/pkg/jsontools/jsontools.go @@ -0,0 +1,68 @@ +package jsontools + +import ( + "encoding/json" + "fmt" +) + +func MapSet(m map[string]any, keys []string, val any) (map[string]any, error) { + curMap := m + if len(keys) == 0 { + return m, fmt.Errorf("MapSet: keys empty") + } + if len(keys) > 1 { + for _, k := range keys[:len(keys)-1] { + v, found := curMap[k] + if found { + var ok bool + curMap, ok = v.(map[string]any) + if !ok { + return m, fmt.Errorf("MapSet: failed to cast to map[string]any") + } + } else { + nextMap := map[string]any{} + curMap[k] = nextMap + curMap = nextMap + } + } + } + curMap[keys[len(keys)-1]] = val + return m, nil +} + +func MapGet(m any, keys []string) (val any, err error) { + if len(keys) == 0 { + err = fmt.Errorf("MapGet: keys empty") + return + } + val = m + for _, k := range keys { + asMap, ok := val.(map[string]any) + if !ok { + err = fmt.Errorf("MapGet: failed to cast to map[string]any") + return + } + val, ok = asMap[k] + if !ok { + err = fmt.Errorf("can't find a key") + return + } + } + return +} + +func Dump(obj any) string { + objBytes, err := json.Marshal(obj) + if err != nil { + objBytes = []byte(fmt.Sprintf("Failed to dump the object as json: %s", err)) + } + return string(objBytes) +} + +func UnmarshalBytes(bytes, value any) error { + data, ok := bytes.([]byte) + if !ok { + return fmt.Errorf("expected array of bytes") + } + return json.Unmarshal(data, value) +} diff --git a/pkg/parexec/parexec.go b/pkg/parexec/parexec.go new file mode 100644 index 00000000..735f3d7e --- /dev/null +++ b/pkg/parexec/parexec.go @@ -0,0 +1,204 @@ +package parexec + +import ( + "sync" + "sync/atomic" +) + +// Controls the scheduler. Values: PROCEED (zero-value) and STOP +type Command uint8 + +const ( + // Continue async execution (zero-value) + PROCEED = Command(iota) + // Try to stop as soon as possible (start canceling new tasks and stop scheduling alreday submitted) + STOP +) + +// Sets no limit to the degree of parallel execution +var NO_LIMIT = (*Limiter)(nil) + +type Limiter struct { + cond sync.Cond + availible int + total int +} + +// Limits parallel executions to at most limit simultaneously. +// +// Can be shared between multiple [ParExecutor]s +func NewLimiter(limit int) *Limiter { + l := &Limiter{ + availible: limit, + total: limit, + } + l.cond = *sync.NewCond(&sync.Mutex{}) + return l +} + +// Takes a limiter token. Must [Return] it after +func (l *Limiter) Take() { + l.cond.L.Lock() + for l.availible <= 0 { + l.cond.Wait() + } + l.availible-- + l.cond.L.Unlock() +} + +// Returns a token taken with Take +func (l *Limiter) Return() { + l.cond.L.Lock() + if l.availible == 0 { + l.cond.Signal() + } + l.availible++ + l.cond.L.Unlock() +} + +// Parallel executor combined with a [sync.Locker] for results +type Executor[T any] struct { + idx atomic.Int64 + tasks atomic.Int64 + stop atomic.Bool + limiter *Limiter + + cond sync.Cond + processor func(res T, idx int) Command +} + +// Create a new parallel executor +// +// 'processor' func is called syncronously (under lock) with result of execution and idx – +// a monotonically increasing from 0 number, reflecting the order in which the tasks were scheduled +// +// ParExecutor is also a mutex around data captured by the "processor" closure as soon as it's created. +// To be safe, use WaitDoneAndLock() to access this data. +func New[T any](limiter *Limiter, processor func(res T, idx int) Command) *Executor[T] { + pe := &Executor[T]{ + processor: processor, + limiter: limiter, + } + pe.cond.L = &sync.Mutex{} + return pe +} + +func (pe *Executor[T]) Lock() { + pe.cond.L.Lock() +} + +func (pe *Executor[T]) Unlock() { + pe.cond.L.Unlock() +} + +// Wait unill all scheduled parallel tasks are done (or cancelled) +// After this function returns and before any other tasks are scheduled nothing +// will execute processor function and you can access its data + +// Allows scheduled tasks to modify the "processor" data, wait untill all tasks are done and return +func (pe *Executor[T]) WaitDoneAndLock() { + pe.cond.L.Lock() + for pe.tasks.Load() != 0 { + pe.cond.Wait() + } +} + +// n >= 1! +func (pe *Executor[T]) taskAdd(n int) (idx int) { + if n < 1 { + panic("n must be strictly positive") + } + n64 := int64(n) + pe.tasks.Add(n64) + return int(pe.idx.Add(n64) - n64) +} + +func (pe *Executor[T]) taskDone() { + taskCount := pe.tasks.Add(-1) + if taskCount == 0 { + pe.cond.Broadcast() + } +} + +func (pe *Executor[T]) goroutineBody(idx int, f func() T) { + defer pe.taskDone() + limiterActive := pe.limiter != nil + if limiterActive { + pe.limiter.Take() + defer func() { + if limiterActive { + pe.limiter.Return() + } + }() + } + if pe.stop.Load() { + return + } + res := f() + if limiterActive { + limiterActive = false + pe.limiter.Return() + } + pe.Lock() + defer pe.Unlock() + cmd := pe.processor(res, idx) + if cmd == STOP { + pe.stop.Store(true) + } +} + +// Execute function f in parallel executor, returns the result into executor's "processor" function +func (pe *Executor[T]) Go(f func() T) { + if pe.stop.Load() { + return + } + idx := pe.taskAdd(1) + go pe.goroutineBody(idx, f) +} + +func GoWithArg[I, T any](pe *Executor[T], f func(I) T) func(I) { + return func(input I) { + pe.Go(func() T { + return f(input) + }) + } +} + +func MapRef[I, T any](pe *Executor[T], input []I, f func(*I) T) { + if len(input) == 0 || pe.stop.Load() { + return + } + idxStart := pe.taskAdd(len(input)) + for i := range input { + go func(idx int, input *I) { + pe.goroutineBody(idx, func() T { return f(input) }) + }(idxStart+i, &input[i]) + } +} + +func Map[I, T any](pe *Executor[T], input []I, f func(I) T) { + if len(input) == 0 || pe.stop.Load() { + return + } + idxStart := pe.taskAdd(len(input)) + for i, input := range input { + go func(idx int, input I) { + pe.goroutineBody(idx, func() T { return f(input) }) + }(idxStart+i, input) + } +} + +// Sets s[idx] = val, growing s if needed, and returns updated slice +func SetAt[T any](s []T, idx int, val T) []T { + needToAlloc := idx - len(s) + switch { + case needToAlloc > 0: + s = append(s, make([]T, needToAlloc)...) + fallthrough + case needToAlloc == 0: + s = append(s, val) + default: + s[idx] = val + } + return s +} diff --git a/plugins.go b/plugins.go new file mode 100644 index 00000000..788fbfa2 --- /dev/null +++ b/plugins.go @@ -0,0 +1,136 @@ +package main + +import ( + "fmt" + "io" + "os/exec" + "slices" + "strings" + "sync" + "weave-cli/pkg/diagnostics" + "weave-cli/plugins" + plugContent "weave-cli/plugins/content" + + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/hcl/v2" + "golang.org/x/exp/maps" +) + +type Plugins struct { + content PluginType + data PluginType + client *plugin.Client +} + +type PluginType struct { + plugins map[string]any + Names func() string +} + +func NewPluginType(plugins map[string]any) PluginType { + return PluginType{ + plugins: plugins, + Names: memoizedKeys(&plugins), + } +} + +func (p *Plugins) ByKind(kind string) *PluginType { + switch kind { + case BK_CONTENT: + return &p.content + case BK_DATA: + return &p.data + default: + panic(fmt.Errorf("unknown plugin kind '%s'", kind)) + } +} + +func memoizedKeys[M ~map[string]V, V any](m *M) func() string { + return sync.OnceValue(func() string { + keys := maps.Keys(*m) + slices.Sort(keys) + return JoinSurround(", ", "'", keys...) + }) +} + +type genericPlugin struct{} + +// Execute implements content.Plugin. +func (*genericPlugin) Execute(attrs, content any) (string, error) { + return "", nil +} + +var _ plugContent.Plugin = (*genericPlugin)(nil) + +func NewPlugins(pluginPath string) (p *Plugins, diag diagnostics.Diagnostics) { + // TODO: setup pluggin logging? + hclog.DefaultOutput = io.Discard + client := plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: plugins.Handshake, + Plugins: plugins.PluginMap, + Cmd: exec.Command(pluginPath), + // Logger: hclog.de, + }) + defer func() { + if diag.HasErrors() { + client.Kill() + } + }() + + // Connect via RPC + rpcClient, err := client.Client() + if diag.FromErr(err, "Plugin connection error") { + return + } + + content := map[string]any{ + "generic": &genericPlugin{}, + } + data := map[string]any{} + + for pluginName := range plugins.PluginMap { + split := strings.SplitN(pluginName, ".", 2) + if len(split) != 2 { + diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid plugin name", + Detail: fmt.Sprintf("invalid name format for plugin '%s': missing dot", pluginName), + }) + return + } + var tgtMap map[string]any + switch split[0] { + case BK_CONTENT: + tgtMap = content + case BK_DATA: + tgtMap = data + default: + diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid plugin name", + Detail: fmt.Sprintf("invalid name for plugin '%s': '%s' is an invalid plugin kind", pluginName, split[0]), + }) + return + } + // Request the plugin + var rawPlugin any + rawPlugin, err = rpcClient.Dispense(pluginName) + if diag.FromErr(err, "Plugin RPC error") { + return + } + tgtMap[split[1]] = rawPlugin + } + + p = &Plugins{ + content: NewPluginType(content), + data: NewPluginType(data), + } + return +} + +func (p *Plugins) Kill() { + if p != nil && p.client != nil { + p.client.Kill() + } +} diff --git a/plugins/cfg.go b/plugins/cfg.go new file mode 100644 index 00000000..81e30c03 --- /dev/null +++ b/plugins/cfg.go @@ -0,0 +1,21 @@ +package plugins + +import ( + "weave-cli/plugins/content" + "weave-cli/plugins/data" + + "github.com/hashicorp/go-plugin" +) + +var Handshake = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "PLUGINS_FOR", + MagicCookieValue: "weave-cli", +} + +var PluginMap = plugin.PluginSet{ + "data.plugin_a": &data.GoPlugin{}, + "data.plugin_b": &data.GoPlugin{}, + "content.table": &content.GoPlugin{}, + "content.text": &content.GoPlugin{}, +} diff --git a/plugins/content/content_plugin.go b/plugins/content/content_plugin.go new file mode 100644 index 00000000..e0597924 --- /dev/null +++ b/plugins/content/content_plugin.go @@ -0,0 +1,107 @@ +package content + +import ( + "encoding/json" + "fmt" + "net/rpc" + + "github.com/hashicorp/go-plugin" +) + +// Interface of the plugin + +type Plugin interface { + Execute(attrs, dict any) (string, error) +} + +type RPCServer struct { + Impl Plugin +} + +type Args struct { + Attrs []byte + Content []byte +} + +func (s *RPCServer) Execute(input Args, resp *[]byte) (err error) { + result, err := s.Impl.Execute(input.Attrs, input.Content) + if err != nil { + return err + } + *resp, err = json.Marshal( + TextStruct{ + Text: string(result), + }, + ) + return +} + +// Adapter between plugin interface and net/rpc + +type RPCClient struct { + client *rpc.Client +} + +var _ Plugin = (*RPCClient)(nil) + +func (c *RPCClient) Execute(attrs, dict any) (res string, err error) { + attrsBytes, err := json.Marshal(attrs) + if err != nil { + return + } + contentBytes, err := json.Marshal(dict) + if err != nil { + return + } + var response []byte + err = c.client.Call( + "Plugin.Execute", + Args{ + Attrs: attrsBytes, + Content: contentBytes, + }, + &response) + if err != nil { + return + } + var text TextStruct + err = json.Unmarshal(response, &text) + if err != nil { + return + } + return text.Text, nil +} + +// The go-plugin plugin, combines all above into one interface + +type GoPlugin struct { + Impl Plugin +} + +var _ plugin.Plugin = (*GoPlugin)(nil) + +func (p *GoPlugin) Server(_ *plugin.MuxBroker) (any, error) { + return &RPCServer{Impl: p.Impl}, nil +} + +func (p *GoPlugin) Client(_ *plugin.MuxBroker, c *rpc.Client) (any, error) { + return &RPCClient{client: c}, nil +} + +type TextStruct struct { + Text string `json:"text"` +} + +func GetText(attrs any) (text string, err error) { + m, ok := attrs.(map[string]any) + if !ok { + err = fmt.Errorf("failed to parse") + return + } + text, ok = m["text"].(string) + if !ok { + err = fmt.Errorf("failed to parse") + return + } + return +} diff --git a/plugins/content/table/impl.go b/plugins/content/table/impl.go new file mode 100644 index 00000000..b658b00e --- /dev/null +++ b/plugins/content/table/impl.go @@ -0,0 +1,60 @@ +package table + +import ( + "bytes" + "fmt" + "text/template" + "weave-cli/pkg/jsontools" + "weave-cli/plugins/content" +) + +// Actual implementation of the plugin + +type Impl struct{} + +var _ content.Plugin = (*Impl)(nil) + +const PLUGIN_NAME = "content.table" + +func (Impl) Execute(attrsRaw, dictRaw any) (resp string, err error) { + var attrs struct { + Text string `json:"text"` + Columns []string `json:"columns"` + } + var dict any + err = jsontools.UnmarshalBytes(attrsRaw, &attrs) + if err != nil { + return + } + err = jsontools.UnmarshalBytes(dictRaw, &dict) + if err != nil { + return + } + + tmpl, err := template.New(PLUGIN_NAME).Parse(attrs.Text) + if err != nil { + err = fmt.Errorf("failed to parse the template: %w; template: `%s`", err, attrs.Text) + return + } + + var buf bytes.Buffer + buf.WriteString(PLUGIN_NAME) + buf.WriteByte(':') + + err = tmpl.Execute(&buf, dict) + if err != nil { + err = fmt.Errorf("failed to execute the template: %w; template: `%s`; dict: `%s`", err, attrs.Text, jsontools.Dump(dict)) + return + } + buf.WriteByte('.') + + if len(attrs.Columns) == 0 { + return buf.String(), nil + } + buf.WriteString(attrs.Columns[0]) + for _, col := range attrs.Columns[1:] { + buf.WriteByte(',') + buf.WriteString(col) + } + return buf.String(), nil +} diff --git a/plugins/content/text/impl.go b/plugins/content/text/impl.go new file mode 100644 index 00000000..b287ccae --- /dev/null +++ b/plugins/content/text/impl.go @@ -0,0 +1,50 @@ +package text + +import ( + "bytes" + "fmt" + "text/template" + "weave-cli/pkg/jsontools" + "weave-cli/plugins/content" +) + +// Actual implementation of the plugin + +type Impl struct{} + +var _ content.Plugin = (*Impl)(nil) + +const PLUGIN_NAME = "content.text" + +func (Impl) Execute(attrsRaw, dictRaw any) (resp string, err error) { + var attrs struct { + Text string `json:"text"` + } + var dict any + err = jsontools.UnmarshalBytes(attrsRaw, &attrs) + if err != nil { + return + } + err = jsontools.UnmarshalBytes(dictRaw, &dict) + if err != nil { + return + } + + tmpl, err := template.New(PLUGIN_NAME).Parse(attrs.Text) + if err != nil { + err = fmt.Errorf("failed to parse the template: %w; template: `%s`", err, attrs.Text) + return + } + + var buf bytes.Buffer + buf.WriteString(PLUGIN_NAME) + buf.WriteByte(':') + + err = tmpl.Execute(&buf, dict) + if err != nil { + err = fmt.Errorf("failed to execute the template: %w; template: `%s`; dict: `%s`", err, attrs.Text, jsontools.Dump(dict)) + return + } + resp = buf.String() + return +} diff --git a/plugins/data/data_plugin.go b/plugins/data/data_plugin.go new file mode 100644 index 00000000..08988270 --- /dev/null +++ b/plugins/data/data_plugin.go @@ -0,0 +1,72 @@ +package data + +import ( + "encoding/json" + "net/rpc" + + "github.com/hashicorp/go-plugin" +) + +// Interface of the plugin + +type Plugin interface { + Execute(input any) (result any, err error) +} + +type RPCServer struct { + Impl Plugin +} + +func (s *RPCServer) Execute(input []byte, resp *[]byte) (err error) { + result, err := s.Impl.Execute(input) + if err != nil { + return err + } + *resp, err = json.Marshal(Result{ + Result: result, + }) + return +} + +// Adapter between plugin interface and net/rpc + +type RPCClient struct { + client *rpc.Client +} + +var _ Plugin = (*RPCClient)(nil) + +func (c *RPCClient) Execute(attrs any) (res any, err error) { + attrsBytes, err := json.Marshal(attrs) + if err != nil { + return + } + var response []byte + err = c.client.Call("Plugin.Execute", attrsBytes, &response) + if err != nil { + return + } + + err = json.Unmarshal(response, &res) + return +} + +// The go-plugin plugin, combines all above into one interface + +type GoPlugin struct { + Impl Plugin +} + +var _ plugin.Plugin = (*GoPlugin)(nil) + +func (p *GoPlugin) Server(_ *plugin.MuxBroker) (any, error) { + return &RPCServer{Impl: p.Impl}, nil +} + +func (p *GoPlugin) Client(_ *plugin.MuxBroker, c *rpc.Client) (any, error) { + return &RPCClient{client: c}, nil +} + +type Result struct { + Result any `json:"result"` +} diff --git a/plugins/data/plugin_a/impl.go b/plugins/data/plugin_a/impl.go new file mode 100644 index 00000000..82c1b9dd --- /dev/null +++ b/plugins/data/plugin_a/impl.go @@ -0,0 +1,25 @@ +package plugin_a + +import ( + "weave-cli/pkg/jsontools" + "weave-cli/plugins/data" +) + +// Actual implementation of the plugin + +type Impl struct{} + +var _ data.Plugin = (*Impl)(nil) + +func (Impl) Execute(input any) (result any, err error) { + var inputParsed struct { + ParameterX int64 `json:"parameter_x"` + ParameterY int64 `json:"parameter_y"` + } + err = jsontools.UnmarshalBytes(input, &inputParsed) + if err != nil { + return + } + + return inputParsed.ParameterX + inputParsed.ParameterY, nil +} diff --git a/plugins/data/plugin_b/impl.go b/plugins/data/plugin_b/impl.go new file mode 100644 index 00000000..78244639 --- /dev/null +++ b/plugins/data/plugin_b/impl.go @@ -0,0 +1,21 @@ +package plugin_b + +import ( + "weave-cli/pkg/jsontools" + "weave-cli/plugins/data" +) + +type Impl struct{} + +var _ data.Plugin = (*Impl)(nil) + +func (Impl) Execute(input any) (result any, err error) { + var inputParsed struct { + ParameterZ any `json:"parameter_z"` + } + err = jsontools.UnmarshalBytes(input, &inputParsed) + if err != nil { + return + } + return inputParsed.ParameterZ, nil +} diff --git a/read_hcl.go b/read_hcl.go new file mode 100644 index 00000000..d6a59f3a --- /dev/null +++ b/read_hcl.go @@ -0,0 +1,86 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "slices" + "weave-cli/pkg/diagnostics" + "weave-cli/pkg/parexec" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" +) + +type fileParseResult struct { + file *hcl.File + diags diagnostics.Diagnostics +} + +func readFile(path string) (bytes []byte, err error) { + bytes, err = os.ReadFile(path) + if err != nil { + err = fmt.Errorf("failed to read file '%s': %w", path, err) + } + return +} + +func parseHcl(bytes []byte, filename string) *fileParseResult { + file, diags := hclsyntax.ParseConfig(bytes, filename, hcl.InitialPos) + return &fileParseResult{ + file: file, + diags: diagnostics.Diagnostics(diags), + } +} + +func processFile(path string) *fileParseResult { + bytes, err := readFile(path) + if err != nil { + diag := diagnostics.FromErr(err, "File read error") + diag.Subject = &hcl.Range{Filename: path} + return &fileParseResult{diags: []*hcl.Diagnostic{diag}} + } + return parseHcl(bytes, path) +} + +func readAndParse(files []string) (body hcl.Body, fileMap map[string]*hcl.File, diags diagnostics.Diagnostics) { + slices.Sort(files) + bodies := make([]hcl.Body, len(files)) + fileMap = make(map[string]*hcl.File, len(files)) + + pe := parexec.New( + parexec.NewLimiter(min(len(files), 4)), + func(res *fileParseResult, idx int) (cmd parexec.Command) { + if diags.Extend(res.diags) { + return + } + bodies[idx] = res.file.Body + fileMap[files[idx]] = res.file + return + }, + ) + parexec.Map(pe, files, processFile) + pe.WaitDoneAndLock() + if diags.HasErrors() { + return nil, nil, diags + } + body = hcl.MergeBodies(bodies) + return +} + +func fromDisk() (body hcl.Body, fileMap map[string]*hcl.File, diags diagnostics.Diagnostics) { + // TODO: replace with filepath.WalkDir() + files, err := filepath.Glob(path + "*.hcl") + if diags.FromErr(err, "Can't find files") { + return + } + if len(files) == 0 { + diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Failed to load files", + Detail: fmt.Sprintf("no *.hcl files found at %s", path), + }) + return + } + return readAndParse(files) +} diff --git a/schema.go b/schema.go new file mode 100644 index 00000000..0c20e40b --- /dev/null +++ b/schema.go @@ -0,0 +1,96 @@ +package main + +import ( + "github.com/hashicorp/hcl/v2" +) + +const ( + BK_CONTENT = "content" + BK_DATA = "data" + BK_DOCUMENT = "document" +) + +type Templates struct { + ContentBlocks []ContentBlock `hcl:"content,block"` + DataBlocks []DataBlock `hcl:"data,block"` + Documents []Document `hcl:"document,block"` +} + +type MetaBlock struct { + Name *string `hcl:"name,optional"` + Author *string `hcl:"author,optional"` + Description *string `hcl:"description,optional"` + Tags []*string `hcl:"tags,optional"` + UpdatedAt *string `hcl:"updated_at,optional"` + + RequiredFields []*string `hcl:"required_fields,optional"` +} + +type DataBlock struct { + Type string `hcl:"type,label"` + Name string `hcl:"type,label"` + Attrs hcl.Attributes + Meta *MetaBlock `hcl:"meta,block"` + Decoded bool + Extra hcl.Body `hcl:",remain"` +} + +type DataBlockExtra struct { + Ref hcl.Expression `hcl:"ref,optional"` + Extra hcl.Body `hcl:",remain"` +} + +type ContentBlock struct { + Type string `hcl:"type,label"` + Name string `hcl:"name,label"` + Attrs hcl.Attributes + Meta *MetaBlock `hcl:"meta,block"` + Decoded bool + + Query *string `hcl:"query,optional"` + Title *string `hcl:"title,optional"` + + Unparsed hcl.Body `hcl:",remain"` + NestedContentBlocks []ContentBlock + + localDict map[string]any +} + +type ContentBlockExtra struct { + Ref hcl.Expression `hcl:"ref,optional"` + ContentBlocks []ContentBlock `hcl:"content,block"` + Unparsed hcl.Body `hcl:",remain"` +} + +type Document struct { + Name string `hcl:"name,label"` + + Meta *MetaBlock `hcl:"meta,block"` + Title *string `hcl:"title,optional"` + + DataBlocks []DataBlock `hcl:"data,block"` + ContentBlocks []ContentBlock `hcl:"content,block"` +} + +// Block interfaces + +type Block interface { + // Data, common to all block kinds + GetType() *string + GetName() string + GetAttrs() *hcl.Attributes + GetMeta() **MetaBlock + GetDecoded() *bool + GetUnparsed() hcl.Body + + GetBlockKind() string + // Get the structure for parsing this block's Extra fields + NewBlockExtra() BlockExtra + DecodeNestedBlocks(*Decoder, BlockExtra) hcl.Diagnostics + UpdateFromRef(any, hcl.Expression) hcl.Diagnostics +} + +type BlockExtra interface { + GetRef() hcl.Expression + GetUnparsed() hcl.Body +} diff --git a/schema_impl.go b/schema_impl.go new file mode 100644 index 00000000..bf75e1c3 --- /dev/null +++ b/schema_impl.go @@ -0,0 +1,191 @@ +package main + +import ( + "fmt" + + "github.com/hashicorp/hcl/v2" +) + +func updateCommon(b Block, ref Block) { + // Assigning data from referenced block + refAttrs := *ref.GetAttrs() + bAttrs := *b.GetAttrs() + for k, v := range refAttrs { + if _, exists := bAttrs[k]; exists { + continue + } + bAttrs[k] = v + } + *b.GetType() = *ref.GetType() + + // TODO: ref with meta-blocks: field by field or all together? + if *b.GetMeta() == nil { + *b.GetMeta() = *ref.GetMeta() + } +} + +// Implementing Block for ContentBlock + +var _ BlockExtra = (*ContentBlockExtra)(nil) + +func (br *ContentBlockExtra) GetRef() hcl.Expression { + return br.Ref +} + +func (br *ContentBlockExtra) GetUnparsed() hcl.Body { + return br.Unparsed +} + +var _ Block = (*ContentBlock)(nil) + +func (b *ContentBlock) GetAttrs() *hcl.Attributes { + return &b.Attrs +} + +func (b *ContentBlock) GetDecoded() *bool { + return &b.Decoded +} + +func (b *ContentBlock) GetMeta() **MetaBlock { + return &b.Meta +} + +func (b *ContentBlock) GetName() string { + return b.Name +} + +func (b *ContentBlock) GetUnparsed() hcl.Body { + return b.Unparsed +} + +func (b *ContentBlock) GetType() *string { + return &b.Type +} + +func (b *ContentBlock) GetBlockKind() string { + return BK_CONTENT +} + +func (b *ContentBlock) NewBlockExtra() BlockExtra { + return &ContentBlockExtra{} +} + +func (b *ContentBlock) DecodeNestedBlocks(d *Decoder, br BlockExtra) (diag hcl.Diagnostics) { + extra := br.((*ContentBlockExtra)) + b.NestedContentBlocks = extra.ContentBlocks + for i := range b.NestedContentBlocks { + // errors in nested content blocks do not prevent us from parsing the current one + diag = diag.Extend(d.DecodeBlock(&b.NestedContentBlocks[i])) + } + return +} + +func (b *ContentBlock) UpdateFromRef(refTgt any, ref hcl.Expression) (diag hcl.Diagnostics) { + tgt, ok := refTgt.(*ContentBlock) + if !ok || tgt == nil { + return diag.Append(invalidRefDiag(refTgt, ref, b.GetBlockKind())) + } + + updateCommon(b, tgt) + + if b.Query == nil { + b.Query = tgt.Query + } + if b.Title == nil { + b.Title = tgt.Title + } + + // TODO: do nested content blocks in the "ref" come over too, or just the attrs? + return +} + +// Implementing Block for DataBlock + +var _ BlockExtra = (*DataBlockExtra)(nil) + +func (br *DataBlockExtra) GetRef() hcl.Expression { + return br.Ref +} + +func (br *DataBlockExtra) GetUnparsed() hcl.Body { + return br.Extra +} + +var _ Block = (*DataBlock)(nil) + +func (b *DataBlock) GetAttrs() *hcl.Attributes { + return &b.Attrs +} + +func (b *DataBlock) GetDecoded() *bool { + return &b.Decoded +} + +func (b *DataBlock) GetMeta() **MetaBlock { + return &b.Meta +} + +func (b *DataBlock) GetName() string { + return b.Name +} + +func (b *DataBlock) GetUnparsed() hcl.Body { + return b.Extra +} + +func (b *DataBlock) GetType() *string { + return &b.Type +} + +func (b *DataBlock) GetBlockKind() string { + return BK_DATA +} + +func (b *DataBlock) NewBlockExtra() BlockExtra { + return &DataBlockExtra{} +} + +func (b *DataBlock) DecodeNestedBlocks(d *Decoder, br BlockExtra) (diag hcl.Diagnostics) { + // DataBlock doesn't have nested blocks + return +} + +func (b *DataBlock) UpdateFromRef(refTgt any, ref hcl.Expression) (diag hcl.Diagnostics) { + tgt, ok := refTgt.(*DataBlock) + if !ok || tgt == nil { + return diag.Append(invalidRefDiag(refTgt, ref, b.GetBlockKind())) + } + if !tgt.Decoded { + return diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Reference to unparsed data", + Detail: ("Reference points to contents of the block that hasn't been parsed. " + + "Make sure that the reference is located after the block is defined and that the block has no errors"), + }) + } + + updateCommon(b, tgt) + return +} + +func invalidRefDiag(tgt any, ref hcl.Expression, curBlock string) (diag *hcl.Diagnostic) { + diag = &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid reference destination", + Subject: ref.Range().Ptr(), + Expression: ref, + } + switch tgt.(type) { + case (*ContentBlock): + diag.Detail = fmt.Sprintf("%s block can not reference content blocks", CapitalizeFirstLetter(curBlock)) + case (*DataBlock): + diag.Detail = fmt.Sprintf("%s block can not reference data blocks", CapitalizeFirstLetter(curBlock)) + case (*Document): + diag.Detail = fmt.Sprintf("%s block can not reference documents", CapitalizeFirstLetter(curBlock)) + case nil: + diag.Detail = "Unknown error while traversing a reference" + default: + diag.Detail = fmt.Sprintf("Reference in %s block points to an unsupported block type", curBlock) + } + return +} diff --git a/schema_traversal.go b/schema_traversal.go new file mode 100644 index 00000000..6451d82d --- /dev/null +++ b/schema_traversal.go @@ -0,0 +1,285 @@ +package main + +// + +import ( + "fmt" + "slices" + "sync" + + "github.com/hashicorp/hcl/v2" +) + +func traversalForExpr(expr hcl.Expression) (trav hcl.Traversal, diag hcl.Diagnostics) { + // ignore diags, just checking if the val is null + val, _ := expr.Value(nil) + if val.IsNull() { + // empty ref + return + } + trav, diag = hcl.AbsTraversalForExpr(expr) + if diag.HasErrors() { + trav = nil + } + return +} + +func (d *Decoder) Traverse(trav hcl.Traversal) (tgt any, diag hcl.Diagnostics) { + travPos := 0 + blockKind, bkTrav, bkDiag := decodeHclTraverser(trav, travPos, "block kind") + travPos++ + diag = diag.Extend(bkDiag) + if diag.HasErrors() { + return + } + + switch blockKind { + case BK_CONTENT: + return d.TraverseContentBlocks(d.root.ContentBlocks, trav, travPos) + case BK_DATA: + return d.TraverseDataBlocks(d.root.DataBlocks, trav, travPos) + case BK_DOCUMENT: + return d.TraverseDocuments(d.root.Documents, trav, travPos) + default: + return nil, diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unknown block kind", + Detail: fmt.Sprintf("Unknown block kind '%s', valid kinds: %s", blockKind, validBlockKinds()), + Subject: bkTrav.SourceRange().Ptr(), + }) + } +} + +func (d *Decoder) TraverseContentBlocks(cb []ContentBlock, trav hcl.Traversal, travPos int) (tgt any, diag hcl.Diagnostics) { + blockType, blockTypeTrav, btDiag := decodeHclTraverser(trav, travPos, "content block type") + travPos++ + diag = diag.Extend(btDiag) + blockName, blockNameTrav, bnDiag := decodeHclTraverser(trav, travPos, "content block name") + travPos++ + diag = diag.Extend(bnDiag) + if diag.HasErrors() { + return + } + // find referenced block + n := slices.IndexFunc(cb, func(cb ContentBlock) bool { + return cb.Type == blockType && cb.Name == blockName + }) + if n == -1 { + subj := blockTypeTrav.SourceRange() + subj.End = blockNameTrav.SourceRange().End + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Content block not found", + Detail: fmt.Sprintf( + "Content block with type '%s' and name '%s' was not found", + blockType, + blockName, + ), + Subject: &subj, + }) + return + } + return d.TraverseContentBlock(&cb[n], trav, travPos) +} + +func (d *Decoder) TraverseContentBlock(cb *ContentBlock, trav hcl.Traversal, travPos int) (tgt any, diag hcl.Diagnostics) { + if travPos == len(trav) { + // we've traversed to the destination block! + return cb, nil + } + blockKind, bkTrav, bkDiag := decodeHclTraverser(trav, travPos, "block kind") + travPos++ + diag = diag.Extend(bkDiag) + if diag.HasErrors() { + return + } + + switch blockKind { + case BK_CONTENT: + if !cb.Decoded { + subj := trav[0].SourceRange() + subj.End = trav[travPos-1].SourceRange().End + return nil, diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Reference to unparsed data", + Detail: ("Reference passed through the contents of the block that hasn't been parsed. " + + "Make sure that the reference is located after the block is defined and that the block is correct"), + Subject: &subj, + }) + } + return d.TraverseContentBlocks(cb.NestedContentBlocks, trav, travPos) + case BK_DATA, BK_DOCUMENT: + return nil, diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid block kind", + Detail: fmt.Sprintf("Content blocks can contain only 'content' subblocks, '%s' is invalid", blockKind), + Subject: bkTrav.SourceRange().Ptr(), + }) + default: + return nil, diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unknown block kind", + Detail: fmt.Sprintf("Unknown kind '%s', content blocks can contain only 'content' subblocks", blockKind), + Subject: bkTrav.SourceRange().Ptr(), + }) + } +} + +func (d *Decoder) TraverseDataBlocks(db []DataBlock, trav hcl.Traversal, travPos int) (tgt any, diag hcl.Diagnostics) { + blockType, blockTypeTrav, btDiag := decodeHclTraverser(trav, travPos, "data block type") + travPos++ + diag = diag.Extend(btDiag) + blockName, blockNameTrav, bnDiag := decodeHclTraverser(trav, travPos, "data block name") + travPos++ + diag = diag.Extend(bnDiag) + if diag.HasErrors() { + return + } + // find referenced block + n := slices.IndexFunc(db, func(db DataBlock) bool { + return db.Type == blockType && db.Name == blockName + }) + if n == -1 { + subj := blockTypeTrav.SourceRange() + subj.End = blockNameTrav.SourceRange().End + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Content block not found", + Detail: fmt.Sprintf( + "Content block with type '%s' and name '%s' was not found", + blockType, + blockName, + ), + Subject: &subj, + }) + return + } + return d.TraverseDataBlock(&db[n], trav, travPos) +} + +func (d *Decoder) TraverseDataBlock(db *DataBlock, trav hcl.Traversal, travPos int) (tgt any, diag hcl.Diagnostics) { + if travPos == len(trav) { + // we've traversed to the destination block! + return db, nil + } + blockKind, bkTrav, bkDiag := decodeHclTraverser(trav, travPos, "block kind") + travPos++ + + switch blockKind { + case BK_CONTENT, BK_DATA, BK_DOCUMENT: + diag = diag.Extend(bkDiag) + + subj := trav[0].SourceRange() + subj.End = trav[travPos-1].SourceRange().End + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid block nesting", + Detail: "Data blocks can not contain contain any subblocks", + Subject: &subj, + }) + default: + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unknown block kind", + Detail: fmt.Sprintf("Unknown block kind '%s'", blockKind), + Subject: bkTrav.SourceRange().Ptr(), + }) + } + return nil, diag +} + +func (d *Decoder) TraverseDocuments(docs []Document, trav hcl.Traversal, travPos int) (tgt any, diag hcl.Diagnostics) { + docName, docNameTrav, btDiag := decodeHclTraverser(trav, travPos, "document name") + travPos++ + diag = diag.Extend(btDiag) + if diag.HasErrors() { + return + } + // find referenced block + n := slices.IndexFunc(docs, func(doc Document) bool { + return doc.Name == docName + }) + + if n == -1 { + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Document not found", + Detail: fmt.Sprintf( + "Document with name '%s' was not found", + docName, + ), + Subject: docNameTrav.SourceRange().Ptr(), + }) + return + } + return d.TraverseDocument(&docs[n], trav, travPos) +} + +func (d *Decoder) TraverseDocument(doc *Document, trav hcl.Traversal, travPos int) (tgt any, diag hcl.Diagnostics) { + if travPos == len(trav) { + // we've traversed to the destination document! + // (currently it's an invalid target for a ref, but this validation is on caller to do) + return doc, nil + } + blockKind, bkTrav, bkDiag := decodeHclTraverser(trav, travPos, "block kind") + travPos++ + diag = diag.Extend(bkDiag) + if diag.HasErrors() { + return + } + + switch blockKind { + case BK_CONTENT: + return d.TraverseContentBlocks(doc.ContentBlocks, trav, travPos) + case BK_DATA: + return d.TraverseDataBlocks(doc.DataBlocks, trav, travPos) + case BK_DOCUMENT: + return nil, diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid block kind", + Detail: fmt.Sprintf("Documents can contain only 'content' and 'data' subblocks, not '%s'", blockKind), + Subject: bkTrav.SourceRange().Ptr(), + }) + default: + return nil, diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unknown block kind", + Detail: fmt.Sprintf("Unknown block kind '%s', documents can contain only 'content' and 'data' subblocks", blockKind), + Subject: bkTrav.SourceRange().Ptr(), + }) + } +} + +// Utils + +var validBlockKinds = sync.OnceValue(func() string { + return JoinSurround(", ", "'", BK_CONTENT, BK_DATA, BK_DOCUMENT) +}) + +func decodeHclTraverser(trav hcl.Traversal, travPos int, what string) (name string, traverser hcl.Traverser, diag hcl.Diagnostics) { + if travPos >= len(trav) { + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: fmt.Sprintf("Missing %s", what), + Detail: fmt.Sprintf("Required %s path element wasn't specified", what), + Subject: trav[len(trav)-1].SourceRange().Ptr(), + }) + return + } + traverser = trav[travPos] + + switch typedTraverser := traverser.(type) { + case hcl.TraverseRoot: + name = typedTraverser.Name + case hcl.TraverseAttr: + name = typedTraverser.Name + default: + diag = diag.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid reference", + Detail: "The ref attribute can not contain this operation", + Subject: typedTraverser.SourceRange().Ptr(), + }) + } + return +} diff --git a/string_utils.go b/string_utils.go new file mode 100644 index 00000000..9382914a --- /dev/null +++ b/string_utils.go @@ -0,0 +1,68 @@ +package main + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" +) + +func Surround(surround string, elems ...string) []string { + res := make([]string, len(elems)) + for i := range res { + res[i] = fmt.Sprintf("%s%s%s", surround, elems[i], surround) + } + return res +} + +func Join(sep string, elems ...string) string { + return strings.Join(elems, sep) +} + +func JoinSurround(sep, surround string, elems ...string) string { + if len(elems) == 0 { + return "" + } + var b strings.Builder + resLen := len(sep) * (len(elems) - 1) + resLen += len(surround) * 2 * len(elems) + for _, e := range elems { + resLen += len(e) + } + b.Grow(resLen) + + b.WriteString(surround) + b.WriteString(elems[0]) + b.WriteString(surround) + for _, e := range elems[1:] { + b.WriteString(sep) + b.WriteString(surround) + b.WriteString(e) + b.WriteString(surround) + } + return b.String() +} + +// Proper unicode aware captialization function. If something is wrong – just returns string as is +func CapitalizeFirstLetter(s string) string { + r, offset := utf8.DecodeRuneInString(s) + if r == utf8.RuneError { + // do nothing, since this is just a cosmetic function + return s + } + upperR := unicode.ToUpper(r) + if r == upperR { + // upper and lower case letters are identical, do not realloc + return s + } + capitalRuneLen := utf8.RuneLen(upperR) + if capitalRuneLen == -1 { + return s + } + + var b strings.Builder + b.Grow(capitalRuneLen + len(s) - offset) + b.WriteRune(upperR) + b.WriteString(s[offset:]) + return b.String() +} diff --git a/templates/test.hcl b/templates/test.hcl new file mode 100644 index 00000000..19289162 --- /dev/null +++ b/templates/test.hcl @@ -0,0 +1,40 @@ +data plugin_b "data_plugin_b" { + parameter_z = ["a", "b", "c", "d"] +} + +content text "external_block" { + text = "External block body" +} + +document "test-document" { + + data plugin_a "data_plugin_a" { + parameter_x = 1 + parameter_y = 2 + } + + data ref "data_plugin_b" { + // This should be automatically resolved to the referenced block by HCL parser + ref = data.plugin_b.data_plugin_b + } + + content text _ { + query = ".data.plugin_a.data_plugin_a" + text = "The value is {{ .data.plugin_a.data_plugin_a.result }}" + } + + content generic _ { + + content ref _ { + // This should be automatically resolved to the referenced block by HCL parser + ref = content.text.external_block + } + + content table _ { + // JQ query + query = ".data.plugin_b.data_plugin_b.result | length" + text = "The length of the list is {{ .query_result }}" + columns = ["ColumnA", "ColumnB", "ColumnC"] + } + } +} \ No newline at end of file diff --git a/util.go b/util.go new file mode 100644 index 00000000..9b1bc9d2 --- /dev/null +++ b/util.go @@ -0,0 +1,25 @@ +package main + +import ( + "bufio" + "os" + "weave-cli/pkg/diagnostics" + + "github.com/hashicorp/hcl/v2" + "golang.org/x/term" +) + +func PrintDiags(diags diagnostics.Diagnostics, fileMap map[string]*hcl.File) { + if len(diags) == 0 { + return + } + + colorize := term.IsTerminal(0) + width, _, err := term.GetSize(0) + if err != nil || width <= 0 { + width = 80 + } + wr := bufio.NewWriter(os.Stderr) + _ = hcl.NewDiagnosticTextWriter(wr, fileMap, uint(width), colorize).WriteDiagnostics(hcl.Diagnostics(diags)) + wr.Flush() +}