diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..e324eac91 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/generated diff --git a/cue.mod/module.cue b/cue.mod/module.cue new file mode 100644 index 000000000..37d7d2c35 --- /dev/null +++ b/cue.mod/module.cue @@ -0,0 +1 @@ +module: "github.com/grafana/cog" diff --git a/go.mod b/go.mod new file mode 100644 index 000000000..9e90cd823 --- /dev/null +++ b/go.mod @@ -0,0 +1,52 @@ +module github.com/grafana/cog + +go 1.21 + +require ( + cuelang.org/go v0.5.0 + github.com/grafana/codejen v0.0.4-0.20221122220907-a5e7cc5407b3 + github.com/grafana/kindsys v0.0.0-20230615185749-1424263c17c7 + github.com/grafana/thema v0.0.0-20230628103417-8f63313207a5 + github.com/santhosh-tekuri/jsonschema v1.2.4 + github.com/yalue/merged_fs v1.2.2 + golang.org/x/text v0.10.0 + golang.org/x/tools v0.10.0 +) + +require ( + github.com/cockroachdb/apd/v2 v2.0.2 // indirect + github.com/cockroachdb/errors v1.10.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/emicklei/proto v1.11.2 // indirect + github.com/getsentry/sentry-go v0.22.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/lib/pq v1.10.6 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect + github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/protocolbuffers/txtpbfmt v0.0.0-20230412060525-fa9f017c0ded // indirect + github.com/rogpeppe/go-internal v1.10.0 // indirect + golang.org/x/mod v0.11.0 // indirect + golang.org/x/net v0.11.0 // indirect + golang.org/x/sync v0.3.0 // indirect + golang.org/x/sys v0.9.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) + +// replace github.com/grafana/grafana => ../grafana + +// replace github.com/grafana/codejen => ../codejen + +// replace github.com/grafana/thema => ../thema + +replace cuelang.org/go => github.com/sdboyer/cue v0.5.0-beta.2.0.20221218111347-341999f48bdb + +replace github.com/deepmap/oapi-codegen => github.com/spinillos/oapi-codegen v1.12.5-0.20230417081915-2945b61c0b1c diff --git a/go.sum b/go.sum new file mode 100644 index 000000000..6765a10c8 --- /dev/null +++ b/go.sum @@ -0,0 +1,113 @@ +github.com/cockroachdb/apd/v2 v2.0.2 h1:weh8u7Cneje73dDh+2tEVLUvyBc89iwepWCD8b8034E= +github.com/cockroachdb/apd/v2 v2.0.2/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw= +github.com/cockroachdb/errors v1.10.0 h1:lfxS8zZz1+OjtV4MtNWgboi/W5tyLEB6VQZBXN+0VUU= +github.com/cockroachdb/errors v1.10.0/go.mod h1:lknhIsEVQ9Ss/qKDBQS/UqFSvPQjOwNq2qyKAxtHRqE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/emicklei/proto v1.11.2 h1:DiIeyTJ+gPSyJI+RIAqvuTeKb0tLUmaGXbYg6aFKsnE= +github.com/emicklei/proto v1.11.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= +github.com/getsentry/sentry-go v0.22.0 h1:XNX9zKbv7baSEI65l+H1GEJgSeIC1c7EN5kluWaP6dM= +github.com/getsentry/sentry-go v0.22.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grafana/codejen v0.0.4-0.20221122220907-a5e7cc5407b3 h1:XasADId/N6Jk842t0GlEqtJkxvKzAcEaPNxa2NdusNs= +github.com/grafana/codejen v0.0.4-0.20221122220907-a5e7cc5407b3/go.mod h1:zmwwM/DRyQB7pfuBjTWII3CWtxcXh8LTwAYGfDfpR6s= +github.com/grafana/kindsys v0.0.0-20230615185749-1424263c17c7 h1:D0n6gj3isll8DqJl3oF/E+QwH656Q81cYyubt9kif8U= +github.com/grafana/kindsys v0.0.0-20230615185749-1424263c17c7/go.mod h1:BONQgHsNwL+szUkxaP31IHfZJYdFAuO1DnwJRdu9ISE= +github.com/grafana/thema v0.0.0-20230628103417-8f63313207a5 h1:RtcQqXR1LaJyHMLm+9cGAjwHqU+g3dScR/jfnrxbA9I= +github.com/grafana/thema v0.0.0-20230628103417-8f63313207a5/go.mod h1:KWAKeFXxQYiJ/kBVbijBLRVq9atxkfkeeFIvmj4clEA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= +github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= +github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto= +github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/protocolbuffers/txtpbfmt v0.0.0-20230412060525-fa9f017c0ded h1:XHLAvwaTYM0PxS/HO7E0PfBaY/y0jGM5NM7g05lCb0k= +github.com/protocolbuffers/txtpbfmt v0.0.0-20230412060525-fa9f017c0ded/go.mod h1:jgxiZysxFPM+iWKwQwPR+y+Jvo54ARd4EisXxKYpB5c= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/santhosh-tekuri/jsonschema v1.2.4 h1:hNhW8e7t+H1vgY+1QeEQpveR6D4+OwKPXCfD2aieJis= +github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4= +github.com/sdboyer/cue v0.5.0-beta.2.0.20221218111347-341999f48bdb h1:X6XJsprVDQnlG4vT5TVb+cRlGMU78L/IKej8Q6SDFGY= +github.com/sdboyer/cue v0.5.0-beta.2.0.20221218111347-341999f48bdb/go.mod h1:okjJBHFQFer+a41sAe2SaGm1glWS8oEb6CmJvn5Zdws= +github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/yalue/merged_fs v1.2.2 h1:vXHTpJBluJryju7BBpytr3PDIkzsPMpiEknxVGPhN/I= +github.com/yalue/merged_fs v1.2.2/go.mod h1:WqqchfVYQyclV2tnR7wtRhBddzBvLVR83Cjw9BKQw0M= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= +golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= +golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= +golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58= +golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.10.0 h1:tvDr/iQoUqNdohiYm0LmmKcBk+q86lb9EprIUFhHHGg= +golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/ast/builder.go b/internal/ast/builder.go new file mode 100644 index 000000000..fdf4f5bcc --- /dev/null +++ b/internal/ast/builder.go @@ -0,0 +1,145 @@ +package ast + +type Builder struct { + Package string + For Object + Options []Option + Initializations []Assignment +} + +type Builders []Builder + +func (builders Builders) LocateByObject(pkg string, name string) (Builder, bool) { + for _, builder := range builders { + if builder.Package == pkg && builder.For.Name == name { + return builder, true + } + } + + return Builder{}, false +} + +type Option struct { + Name string + Comments []string + Args []Argument + Assignments []Assignment + Default *OptionDefault + IsConstructorArg bool +} + +type OptionDefault struct { + ArgsValues []any +} + +type Argument struct { + Name string + Type Type +} + +type Assignment struct { + // Where + Path string + + // What + ValueType Type // type of the value being assigned + ArgumentName string // if empty, then use `Value` + Value any + + Constraints []TypeConstraint + + // Some more context on the what + IntoOptionalField bool +} + +type BuilderGenerator struct { +} + +func (generator *BuilderGenerator) FromAST(files []*File) []Builder { + builders := make([]Builder, 0, len(files)) + + for _, file := range files { + for _, object := range file.Definitions { + // we only want builders for structs + if object.Type.Kind() != KindStruct { + continue + } + + builders = append(builders, generator.structObjectToBuilder(file, object)) + } + } + + return builders +} + +func (generator *BuilderGenerator) structObjectToBuilder(file *File, object Object) Builder { + builder := Builder{ + Package: file.Package, + For: object, + Options: nil, + } + structType := object.Type.(StructType) + + for _, field := range structType.Fields { + if generator.fieldHasStaticValue(field) { + builder.Initializations = append(builder.Initializations, generator.structFieldToStaticInitialization(field)) + continue + } + + builder.Options = append(builder.Options, generator.structFieldToOption(field)) + } + + return builder +} + +func (generator *BuilderGenerator) fieldHasStaticValue(field StructField) bool { + scalarType, ok := field.Type.(ScalarType) + + return ok && scalarType.Value != nil +} + +func (generator *BuilderGenerator) structFieldToStaticInitialization(field StructField) Assignment { + scalarType, _ := field.Type.(ScalarType) + + return Assignment{ + Path: field.Name, + Value: scalarType.Value, + ValueType: field.Type, + IntoOptionalField: !field.Required, + } +} + +func (generator *BuilderGenerator) structFieldToOption(field StructField) Option { + var constraints []TypeConstraint + if scalarType, ok := field.Type.(ScalarType); ok { + constraints = scalarType.Constraints + } + + opt := Option{ + Name: field.Name, + Comments: field.Comments, + Args: []Argument{ + { + Name: field.Name, + Type: field.Type, + }, + }, + Assignments: []Assignment{ + { + Path: field.Name, + ArgumentName: field.Name, + ValueType: field.Type, + Constraints: constraints, + IntoOptionalField: !field.Required, + }, + }, + } + + if field.Default != nil { + opt.Default = &OptionDefault{ + ArgsValues: []any{field.Default}, + } + } + + return opt +} diff --git a/internal/ast/compiler/anonymous_enum.go b/internal/ast/compiler/anonymous_enum.go new file mode 100644 index 000000000..10a5f23d1 --- /dev/null +++ b/internal/ast/compiler/anonymous_enum.go @@ -0,0 +1,119 @@ +package compiler + +import ( + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +var _ Pass = (*AnonymousEnumToExplicitType)(nil) + +type AnonymousEnumToExplicitType struct { + newObjects []ast.Object +} + +func (pass *AnonymousEnumToExplicitType) Process(files []*ast.File) ([]*ast.File, error) { + newFiles := make([]*ast.File, 0, len(files)) + + for _, file := range files { + newFile, err := pass.processFile(file) + if err != nil { + return nil, err + } + + newFiles = append(newFiles, newFile) + } + + return newFiles, nil +} + +func (pass *AnonymousEnumToExplicitType) processFile(file *ast.File) (*ast.File, error) { + pass.newObjects = nil + + processedObjects := make([]ast.Object, 0, len(file.Definitions)) + for _, object := range file.Definitions { + processedObjects = append(processedObjects, pass.processObject(object)) + } + + return &ast.File{ + Package: file.Package, + Definitions: append(processedObjects, pass.newObjects...), + }, nil +} + +func (pass *AnonymousEnumToExplicitType) processObject(object ast.Object) ast.Object { + if object.Type.Kind() == ast.KindEnum { + return object + } + + newObject := object + newObject.Type = pass.processType(object.Name, object.Type) + + return newObject +} + +func (pass *AnonymousEnumToExplicitType) processType(parentName string, def ast.Type) ast.Type { + if def.Kind() == ast.KindArray { + return pass.processArray(parentName, def.(ast.ArrayType)) + } + + if def.Kind() == ast.KindStruct { + return pass.processStruct(def.(ast.StructType)) + } + + if def.Kind() == ast.KindEnum { + return pass.processAnonymousEnum(parentName, def.(ast.EnumType)) + } + + // TODO: do the same for disjunctions? + + return def +} + +func (pass *AnonymousEnumToExplicitType) processArray(parentName string, def ast.ArrayType) ast.ArrayType { + return ast.ArrayType{ + ValueType: pass.processType(parentName, def.ValueType), + } +} + +func (pass *AnonymousEnumToExplicitType) processStruct(def ast.StructType) ast.StructType { + newDef := def + + processedFields := make([]ast.StructField, 0, len(def.Fields)) + for _, field := range def.Fields { + processedFields = append(processedFields, ast.StructField{ + Name: field.Name, + Comments: field.Comments, + Type: pass.processType(field.Name, field.Type), + Required: field.Required, + Default: field.Default, + }) + } + + newDef.Fields = processedFields + + return newDef +} + +func (pass *AnonymousEnumToExplicitType) processAnonymousEnum(parentName string, def ast.EnumType) ast.RefType { + enumTypeName := tools.UpperCamelCase(parentName) + "Enum" + + values := make([]ast.EnumValue, 0, len(def.Values)) + for _, val := range def.Values { + values = append(values, ast.EnumValue{ + Type: val.Type, + Name: parentName + tools.UpperCamelCase(val.Name), + Value: val.Value, + }) + } + + pass.newObjects = append(pass.newObjects, ast.Object{ + Name: enumTypeName, + Type: ast.EnumType{ + Values: values, + }, + }) + + return ast.RefType{ + ReferredType: enumTypeName, + } +} diff --git a/internal/ast/compiler/compiler.go b/internal/ast/compiler/compiler.go new file mode 100644 index 000000000..f465312b8 --- /dev/null +++ b/internal/ast/compiler/compiler.go @@ -0,0 +1,9 @@ +package compiler + +import ( + "github.com/grafana/cog/internal/ast" +) + +type Pass interface { + Process(files []*ast.File) ([]*ast.File, error) +} diff --git a/internal/ast/compiler/disjunctions.go b/internal/ast/compiler/disjunctions.go new file mode 100644 index 000000000..3041a9e6d --- /dev/null +++ b/internal/ast/compiler/disjunctions.go @@ -0,0 +1,157 @@ +package compiler + +import ( + "strings" + + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +var _ Pass = (*DisjunctionToType)(nil) + +type DisjunctionToType struct { + newObjects map[string]ast.Object +} + +func (pass *DisjunctionToType) Process(files []*ast.File) ([]*ast.File, error) { + newFiles := make([]*ast.File, 0, len(files)) + + for _, file := range files { + newFile, err := pass.processFile(file) + if err != nil { + return nil, err + } + + newFiles = append(newFiles, newFile) + } + + return newFiles, nil +} + +func (pass *DisjunctionToType) processFile(file *ast.File) (*ast.File, error) { + pass.newObjects = make(map[string]ast.Object) + + processedObjects := make([]ast.Object, 0, len(file.Definitions)) + for _, object := range file.Definitions { + processedObjects = append(processedObjects, pass.processObject(object)) + } + + newObjects := make([]ast.Object, 0, len(pass.newObjects)) + for _, obj := range pass.newObjects { + newObjects = append(newObjects, obj) + } + + return &ast.File{ + Package: file.Package, + Definitions: append(processedObjects, newObjects...), + }, nil +} + +func (pass *DisjunctionToType) processObject(object ast.Object) ast.Object { + newObject := object + newObject.Type = pass.processType(object.Type) + + return newObject +} + +func (pass *DisjunctionToType) processType(def ast.Type) ast.Type { + if def.Kind() == ast.KindArray { + return pass.processArray(def.(ast.ArrayType)) + } + + if def.Kind() == ast.KindStruct { + return pass.processStruct(def.(ast.StructType)) + } + + if def.Kind() == ast.KindDisjunction { + return pass.processDisjunction(def.(ast.DisjunctionType)) + } + + return def +} + +func (pass *DisjunctionToType) processArray(def ast.ArrayType) ast.ArrayType { + return ast.ArrayType{ + ValueType: pass.processType(def.ValueType), + } +} + +func (pass *DisjunctionToType) processStruct(def ast.StructType) ast.StructType { + newDef := def + + processedFields := make([]ast.StructField, 0, len(def.Fields)) + for _, field := range def.Fields { + processedFields = append(processedFields, ast.StructField{ + Name: field.Name, + Comments: field.Comments, + Type: pass.processType(field.Type), + Required: field.Required, + Default: field.Default, + }) + } + + newDef.Fields = processedFields + + return newDef +} + +func (pass *DisjunctionToType) processDisjunction(def ast.DisjunctionType) ast.Type { + // Ex: type | null + if len(def.Branches) == 2 && def.Branches.HasNullType() { + finalType := def.Branches.NonNullTypes()[0] + //finalType.Nullable = true + + return finalType + } + + // type | otherType | something (| null)? + // generate a type with a nullable field for every branch of the disjunction, + // add it to preprocessor.types, and use it instead. + newTypeName := pass.disjunctionTypeName(def) + + if _, ok := pass.newObjects[newTypeName]; !ok { + structType := ast.StructType{ + Fields: make([]ast.StructField, 0, len(def.Branches)), + } + + for _, branch := range def.Branches { + if branch.Kind() == ast.KindNull { + continue + } + + structType.Fields = append(structType.Fields, ast.StructField{ + Name: "Val" + tools.UpperCamelCase(pass.typeName(branch)), + Type: branch, + Required: false, + }) + } + + pass.newObjects[newTypeName] = ast.Object{ + Name: newTypeName, + Type: structType, + } + } + + return ast.RefType{ + ReferredType: newTypeName, + //Nullable: def.Branches.HasNullType(), + } +} + +func (pass *DisjunctionToType) disjunctionTypeName(def ast.DisjunctionType) string { + parts := make([]string, 0, len(def.Branches)) + + for _, subType := range def.Branches { + parts = append(parts, tools.UpperCamelCase(pass.typeName(subType))) + } + + return strings.Join(parts, "Or") +} + +func (pass *DisjunctionToType) typeName(typeDef ast.Type) string { + if typeDef.Kind() == ast.KindRef { + return typeDef.(ast.RefType).ReferredType + } + + return string(typeDef.Kind()) +} diff --git a/internal/ast/types.go b/internal/ast/types.go new file mode 100644 index 000000000..6038270d3 --- /dev/null +++ b/internal/ast/types.go @@ -0,0 +1,179 @@ +package ast + +type Kind string + +const ( + KindDisjunction Kind = "disjunction" + KindRef Kind = "ref" + + KindStruct Kind = "struct" + KindEnum Kind = "enum" + KindMap Kind = "map" + + KindNull Kind = "null" + KindAny Kind = "any" + KindBytes Kind = "bytes" + KindArray Kind = "array" + KindString Kind = "string" + + KindFloat32 Kind = "float32" + KindFloat64 Kind = "float64" + + KindUint8 Kind = "uint8" + KindUint16 Kind = "uint16" + KindUint32 Kind = "uint32" + KindUint64 Kind = "uint64" + KindInt8 Kind = "int8" + KindInt16 Kind = "int16" + KindInt32 Kind = "int32" + KindInt64 Kind = "int64" + + KindBool Kind = "bool" +) + +type TypeConstraint struct { + // TODO: something more descriptive here? constant? + Op string + Args []any +} + +// interface for every type that we can represent: +// struct, enum, array, string, int, ... +type Type interface { + Kind() Kind +} + +// named declaration of a type +type Object struct { + Name string + Comments []string + Type Type +} + +type File struct { + Package string + Definitions []Object +} + +func (file *File) LocateDefinition(name string) Object { + for _, def := range file.Definitions { + if def.Name == name { + return def + } + } + + return Object{} +} + +var _ Type = (*DisjunctionType)(nil) + +type Types []Type + +func (types Types) HasNullType() bool { + for _, t := range types { + if t.Kind() == KindNull { + return true + } + } + + return false +} + +func (types Types) NonNullTypes() Types { + results := make([]Type, 0, len(types)) + + for _, t := range types { + if t.Kind() == KindNull { + continue + } + + results = append(results, t) + } + + return results +} + +type DisjunctionType struct { + Branches Types +} + +func (disjunctionType DisjunctionType) Kind() Kind { + return KindDisjunction +} + +var _ Type = (*ArrayType)(nil) + +type ArrayType struct { + ValueType Type +} + +func (arrayType ArrayType) Kind() Kind { + return KindArray +} + +var _ Type = (*EnumType)(nil) + +type EnumType struct { + Values []EnumValue // possible values. Value types might be different +} + +type EnumValue struct { + Type Type + Name string + Value any +} + +func (arrayType EnumType) Kind() Kind { + return KindEnum +} + +var _ Type = (*MapType)(nil) + +type MapType struct { + IndexType Type + ValueType Type +} + +func (arrayType MapType) Kind() Kind { + return KindMap +} + +var _ Type = (*StructType)(nil) + +type StructType struct { + Fields []StructField +} + +func (structType StructType) Kind() Kind { + return KindStruct +} + +type StructField struct { + Name string + Comments []string + Type Type + Required bool + Default any +} + +var _ Type = (*RefType)(nil) + +type RefType struct { + ReferredType string +} + +func (refType RefType) Kind() Kind { + return KindRef +} + +var _ Type = (*ScalarType)(nil) + +type ScalarType struct { + ScalarKind Kind // bool, bytes, string, int, float, ... + Value any // if value isn't nil, we're representing a constant scalar + Constraints []TypeConstraint +} + +func (scalarType ScalarType) Kind() Kind { + return scalarType.ScalarKind +} diff --git a/internal/jennies/all.go b/internal/jennies/all.go new file mode 100644 index 000000000..b9606e246 --- /dev/null +++ b/internal/jennies/all.go @@ -0,0 +1,33 @@ +package jennies + +import ( + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + compiler2 "github.com/grafana/cog/internal/ast/compiler" + "github.com/grafana/cog/internal/jennies/golang" + "github.com/grafana/cog/internal/jennies/typescript" +) + +type LanguageTarget struct { + Jennies *codejen.JennyList[[]*ast.File] + CompilerPasses []compiler2.Pass +} + +func All() map[string]LanguageTarget { + targets := map[string]LanguageTarget{ + // Compiler passes should not have side effects, but they do. + "go": { + Jennies: golang.Jennies(), + CompilerPasses: []compiler2.Pass{ + &compiler2.AnonymousEnumToExplicitType{}, + &compiler2.DisjunctionToType{}, + }, + }, + "typescript": { + Jennies: typescript.Jennies(), + CompilerPasses: nil, + }, + } + + return targets +} diff --git a/internal/jennies/golang/builder.go b/internal/jennies/golang/builder.go new file mode 100644 index 000000000..be09fe95c --- /dev/null +++ b/internal/jennies/golang/builder.go @@ -0,0 +1,340 @@ +package golang + +import ( + "fmt" + "strings" + + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +type GoBuilder struct { +} + +func (jenny *GoBuilder) JennyName() string { + return "GoBuilder" +} + +func (jenny *GoBuilder) Generate(builders []ast.Builder) (codejen.Files, error) { + files := codejen.Files{} + + for _, builder := range builders { + output, err := jenny.generateBuilder(builders, builder) + if err != nil { + return nil, err + } + + files = append( + files, + *codejen.NewFile(builder.Package+"/"+strings.ToLower(builder.For.Name)+"/builder_gen.go", output, jenny), + ) + } + + return files, nil +} + +func (jenny *GoBuilder) generateBuilder(builders ast.Builders, builder ast.Builder) ([]byte, error) { + var buffer strings.Builder + + buffer.WriteString(fmt.Sprintf("package %s\n\n", strings.ToLower(builder.For.Name))) + + // import generated types + buffer.WriteString(fmt.Sprintf("import \"github.com/grafana/cog/generated/types/%s\"\n\n", builder.Package)) + + // Option type declaration + buffer.WriteString("type Option func(builder *Builder) error\n\n") + + // Builder type declaration + buffer.WriteString(fmt.Sprintf(`type Builder struct { + internal *types.%s +} + +`, tools.UpperCamelCase(builder.For.Name))) + + // Add a constructor for the builder + constructorCode := jenny.generateConstructor(builders, builder) + buffer.WriteString(constructorCode) + + // Allow builders to expose the resource they're building + // TODO: do we want to do this? + // TODO: better name, with less conflict chance + buffer.WriteString(fmt.Sprintf(` +func (builder *Builder) Internal() *types.%s { + return builder.internal +} +`, tools.UpperCamelCase(builder.For.Name))) + + // Define options + for _, option := range builder.Options { + buffer.WriteString(jenny.generateOption(builders, builder, option) + "\n") + } + + // add calls to set default values + buffer.WriteString("\n") + buffer.WriteString("func defaults() []Option {\n") + buffer.WriteString("return []Option{\n") + for _, opt := range builder.Options { + if opt.Default != nil { + buffer.WriteString(jenny.generateDefaultCall(opt) + ",\n") + } + } + buffer.WriteString("}\n") + buffer.WriteString("}\n") + + return []byte(buffer.String()), nil +} + +func (jenny *GoBuilder) generateConstructor(builders ast.Builders, builder ast.Builder) string { + var buffer strings.Builder + + typeName := tools.UpperCamelCase(builder.For.Name) + args := "" + fieldsInit := "" + var argsList []string + var fieldsInitList []string + for _, opt := range builder.Options { + if !opt.IsConstructorArg { + continue + } + + // FIXME: this is assuming that there's only one argument for that option + argsList = append(argsList, jenny.generateArgument(builders, builder, opt.Args[0])) + fieldsInitList = append( + fieldsInitList, + jenny.generateInitAssignment(builders, builder, opt.Assignments[0]), + ) + } + + for _, init := range builder.Initializations { + fieldsInitList = append( + fieldsInitList, + jenny.generateInitAssignment(builders, builder, init), + ) + } + + if len(argsList) != 0 { + args = strings.Join(argsList, ", ") + ", " + } + if len(fieldsInitList) != 0 { + fieldsInit = strings.Join(fieldsInitList, ",\n") + ",\n" + } + + buffer.WriteString(fmt.Sprintf(` +func New(%[2]soptions ...Option) (Builder, error) { + resource := &types.%[1]s{ + %[3]s + } + builder := &Builder{internal: resource} + + for _, opt := range append(defaults(), options...) { + if err := opt(builder); err != nil { + return *builder, err + } + } + + return *builder, nil +} +`, typeName, args, fieldsInit)) + + return buffer.String() +} + +func (jenny *GoBuilder) formatFieldPath(fieldPath string) string { + parts := strings.Split(fieldPath, ".") + formatted := make([]string, 0, len(parts)) + + for _, part := range parts { + formatted = append(formatted, tools.UpperCamelCase(part)) + } + + return strings.Join(formatted, ".") +} + +func (jenny *GoBuilder) generateInitAssignment(builders ast.Builders, builder ast.Builder, assignment ast.Assignment) string { + fieldPath := jenny.formatFieldPath(assignment.Path) + valueType := assignment.ValueType + + if _, valueHasBuilder := jenny.typeHasBuilder(builders, builder, assignment.ValueType); valueHasBuilder { + return "constructor init assignment with type that has a builder is not supported yet" + } + + if assignment.ArgumentName == "" { + return fmt.Sprintf("%[1]s: %[2]s", fieldPath, formatScalar(assignment.Value)) + } + + argName := jenny.escapeVarName(tools.LowerCamelCase(assignment.ArgumentName)) + + asPointer := "" + // FIXME: this condition is probably wrong + if valueType.Kind() != ast.KindArray && valueType.Kind() != ast.KindStruct && assignment.IntoOptionalField { + asPointer = "&" + } + + generatedConstraints := strings.Join(jenny.constraints(argName, assignment.Constraints), "\n") + if generatedConstraints != "" { + generatedConstraints = generatedConstraints + "\n\n" + } + + return generatedConstraints + fmt.Sprintf("%[1]s: %[3]s%[2]s", fieldPath, argName, asPointer) +} + +func (jenny *GoBuilder) generateOption(builders ast.Builders, builder ast.Builder, def ast.Option) string { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + // Option name + optionName := tools.UpperCamelCase(def.Name) + + // Arguments list + arguments := "" + if len(def.Args) != 0 { + argsList := make([]string, 0, len(def.Args)) + for _, arg := range def.Args { + argsList = append(argsList, jenny.generateArgument(builders, builder, arg)) + } + + arguments = strings.Join(argsList, ", ") + } + + // Assignments + assignmentsList := make([]string, 0, len(def.Assignments)) + for _, assignment := range def.Assignments { + assignmentsList = append(assignmentsList, jenny.generateAssignment(builders, builder, assignment)) + } + assignments := strings.Join(assignmentsList, "\n") + + buffer.WriteString(fmt.Sprintf(`func %[1]s(%[2]s) Option { + return func(builder *Builder) error { + %[3]s + + return nil + } +} +`, optionName, arguments, assignments)) + + return buffer.String() +} + +func (jenny *GoBuilder) typeHasBuilder(builders ast.Builders, builder ast.Builder, t ast.Type) (string, bool) { + if t.Kind() != ast.KindRef { + return "", false + } + + referredTypeName := t.(ast.RefType).ReferredType + referredTypePkg := strings.ToLower(referredTypeName) + _, builderFound := builders.LocateByObject(builder.Package, referredTypeName) + + return referredTypePkg, builderFound +} + +func (jenny *GoBuilder) generateArgument(builders ast.Builders, builder ast.Builder, arg ast.Argument) string { + typeName := formatType(arg.Type, true, "types") + + if builderPkg, found := jenny.typeHasBuilder(builders, builder, arg.Type); found { + return fmt.Sprintf(`opts ...%[1]s.Option`, builderPkg) + } + + name := jenny.escapeVarName(tools.LowerCamelCase(arg.Name)) + + return fmt.Sprintf("%s %s", name, typeName) +} + +func (jenny *GoBuilder) generateAssignment(builders ast.Builders, builder ast.Builder, assignment ast.Assignment) string { + fieldPath := jenny.formatFieldPath(assignment.Path) + valueType := assignment.ValueType + + if builderPkg, found := jenny.typeHasBuilder(builders, builder, assignment.ValueType); found { + intoPointer := "*" + if assignment.IntoOptionalField { + intoPointer = "" + } + + return fmt.Sprintf(`resource, err := %[2]s.New(opts...) + if err != nil { + return err + } + + builder.internal.%[1]s = %[3]sresource.Internal() +`, fieldPath, builderPkg, intoPointer) + } + + if assignment.ArgumentName == "" { + return fmt.Sprintf("builder.internal.%[1]s = %[2]s", fieldPath, formatScalar(assignment.Value)) + } + + argName := jenny.escapeVarName(tools.LowerCamelCase(assignment.ArgumentName)) + + asPointer := "" + // FIXME: this condition is probably wrong + if valueType.Kind() != ast.KindArray && valueType.Kind() != ast.KindStruct && assignment.IntoOptionalField { + asPointer = "&" + } + + generatedConstraints := strings.Join(jenny.constraints(argName, assignment.Constraints), "\n") + if generatedConstraints != "" { + generatedConstraints = generatedConstraints + "\n\n" + } + + return generatedConstraints + fmt.Sprintf("builder.internal.%[1]s = %[3]s%[2]s", fieldPath, argName, asPointer) +} + +func (jenny *GoBuilder) escapeVarName(varName string) string { + if isReservedGoKeyword(varName) { + return varName + "Arg" + } + + return varName +} + +func (jenny *GoBuilder) generateDefaultCall(option ast.Option) string { + args := make([]string, 0, len(option.Default.ArgsValues)) + for _, arg := range option.Default.ArgsValues { + args = append(args, formatScalar(arg)) + } + + return fmt.Sprintf("%s(%s)", tools.UpperCamelCase(option.Name), strings.Join(args, ", ")) +} + +func (jenny *GoBuilder) constraints(argumentName string, constraints []ast.TypeConstraint) []string { + output := make([]string, 0, len(constraints)) + + for _, constraint := range constraints { + output = append(output, jenny.constraint(argumentName, constraint)) + } + + return output +} + +func (jenny *GoBuilder) constraint(argumentName string, constraint ast.TypeConstraint) string { + var buffer strings.Builder + + buffer.WriteString(fmt.Sprintf("if !(%s) {\n", jenny.constraintComparison(argumentName, constraint))) + buffer.WriteString(fmt.Sprintf("return errors.New(\"%[1]s must be %[2]s %[3]v\")\n", argumentName, constraint.Op, constraint.Args[0])) + buffer.WriteString("}\n") + + return buffer.String() +} + +func (jenny *GoBuilder) constraintComparison(argumentName string, constraint ast.TypeConstraint) string { + if constraint.Op == "minLength" { + return fmt.Sprintf("len([]rune(%[1]s)) >= %[2]v", argumentName, constraint.Args[0]) + } + if constraint.Op == "maxLength" { + return fmt.Sprintf("len([]rune(%[1]s)) <= %[2]v", argumentName, constraint.Args[0]) + } + + return fmt.Sprintf("%[1]s %[2]s %#[3]v", argumentName, constraint.Op, constraint.Args[0]) +} + +func isReservedGoKeyword(input string) bool { + // TODO + if input == "type" { + return true + } + + return false +} diff --git a/internal/jennies/golang/jennies.go b/internal/jennies/golang/jennies.go new file mode 100644 index 000000000..5a44f9b41 --- /dev/null +++ b/internal/jennies/golang/jennies.go @@ -0,0 +1,31 @@ +package golang + +import ( + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/jennies/tools" + "github.com/grafana/cog/internal/veneers" +) + +func Jennies() *codejen.JennyList[[]*ast.File] { + targets := codejen.JennyListWithNamer[[]*ast.File](func(files []*ast.File) string { + return "golang" + }) + targets.AppendManyToMany( + tools.Foreach[*ast.File](GoRawTypes{}), + ) + targets.AppendOneToMany( + codejen.AdaptOneToMany[[]ast.Builder, []*ast.File]( + &GoBuilder{}, + func(files []*ast.File) []ast.Builder { + generator := &ast.BuilderGenerator{} + builders := generator.FromAST(files) + + return veneers.Engine().ApplyTo(builders) + }, + ), + ) + targets.AddPostprocessors(PostProcessFile) + + return targets +} diff --git a/internal/jennies/golang/postprocessor.go b/internal/jennies/golang/postprocessor.go new file mode 100644 index 000000000..3ffdcf21d --- /dev/null +++ b/internal/jennies/golang/postprocessor.go @@ -0,0 +1,27 @@ +package golang + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/grafana/codejen" + "golang.org/x/tools/imports" +) + +func PostProcessFile(file codejen.File) (codejen.File, error) { + if !strings.HasSuffix(file.RelativePath, ".go") { + return file, nil + } + + output, err := imports.Process(filepath.Base(file.RelativePath), file.Data, nil) + if err != nil { + return codejen.File{}, fmt.Errorf("goimports processing of generated file failed: %w", err) + } + + return codejen.File{ + RelativePath: file.RelativePath, + Data: output, + From: file.From, + }, nil +} diff --git a/internal/jennies/golang/rawtypes.go b/internal/jennies/golang/rawtypes.go new file mode 100644 index 000000000..3b2577649 --- /dev/null +++ b/internal/jennies/golang/rawtypes.go @@ -0,0 +1,293 @@ +package golang + +import ( + "bytes" + "fmt" + "strings" + + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +type GoRawTypes struct { +} + +func (jenny GoRawTypes) JennyName() string { + return "GoRawTypes" +} + +func (jenny GoRawTypes) Generate(file *ast.File) (codejen.Files, error) { + output, err := jenny.generateFile(file) + if err != nil { + return nil, err + } + + return codejen.Files{ + *codejen.NewFile("types/"+file.Package+"/types_gen.go", output, jenny), + }, nil +} + +func (jenny GoRawTypes) generateFile(file *ast.File) ([]byte, error) { + var buffer strings.Builder + + buffer.WriteString("package types\n\n") + + for _, object := range file.Definitions { + objectOutput, err := jenny.formatObject(object) + if err != nil { + return nil, err + } + + buffer.Write(objectOutput) + buffer.WriteString("\n") + + // Add JSON (un)marshaling shortcuts + if object.Type.Kind() != ast.KindAny { + jsonMarshal, err := jenny.veneer("json_marshal", object) + if err != nil { + return nil, err + } + buffer.WriteString(jsonMarshal) + } + } + + return []byte(buffer.String()), nil +} + +func (jenny GoRawTypes) formatObject(def ast.Object) ([]byte, error) { + defName := tools.UpperCamelCase(def.Name) + + switch def.Type.Kind() { + case ast.KindStruct: + return jenny.formatStructDef(def) + case ast.KindEnum: + return jenny.formatEnumDef(def) + case ast.KindString, + ast.KindInt8, ast.KindInt16, ast.KindInt32, ast.KindInt64, + ast.KindUint8, ast.KindUint16, ast.KindUint32, ast.KindUint64, + ast.KindFloat32, ast.KindFloat64: + scalarType, ok := def.Type.(ast.ScalarType) + if ok && scalarType.Value != nil { + return []byte(fmt.Sprintf("const %s = %s", defName, formatScalar(scalarType.Value))), nil + } + + return []byte(fmt.Sprintf("type %s %s", defName, formatType(def.Type, true, ""))), nil + case ast.KindMap, ast.KindBool: + return []byte(fmt.Sprintf("type %s %s", defName, formatType(def.Type, true, ""))), nil + case ast.KindRef: + return []byte(fmt.Sprintf("type %s %s", defName, def.Type.(ast.RefType).ReferredType)), nil + case ast.KindAny: + return []byte(fmt.Sprintf("type %s any", defName)), nil + default: + return nil, fmt.Errorf("unhandled type def kind: %s", def.Type.Kind()) + } +} + +func (jenny GoRawTypes) formatEnumDef(def ast.Object) ([]byte, error) { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + enumName := tools.UpperCamelCase(def.Name) + enumType := def.Type.(ast.EnumType) + + buffer.WriteString(fmt.Sprintf("type %s %s\n", enumName, enumType.Values[0].Type.Kind())) + + buffer.WriteString("const (\n") + for _, val := range enumType.Values { + buffer.WriteString(fmt.Sprintf("\t%s %s = %#v\n", tools.UpperCamelCase(val.Name), enumName, val.Value)) + } + buffer.WriteString(")\n") + + return []byte(buffer.String()), nil +} + +func (jenny GoRawTypes) formatStructDef(def ast.Object) ([]byte, error) { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + buffer.WriteString(fmt.Sprintf("type %s ", tools.UpperCamelCase(def.Name))) + buffer.WriteString(formatStructBody(def.Type.(ast.StructType), "")) + buffer.WriteString("\n") + + return []byte(buffer.String()), nil +} + +func (jenny GoRawTypes) formatMapDef(def ast.Object) ([]byte, error) { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + buffer.WriteString(fmt.Sprintf("type %s ", tools.UpperCamelCase(def.Name))) + buffer.WriteString(formatMap(def.Type.(ast.MapType), "")) + buffer.WriteString("\n") + + return []byte(buffer.String()), nil +} + +func (jenny GoRawTypes) veneer(veneerType string, def ast.Object) (string, error) { + // First, see if there is a definition-specific veneer + templateFile := fmt.Sprintf("%s.types.%s.go.tmpl", strings.ToLower(def.Name), veneerType) + tmpl := templates.Lookup(templateFile) + + // If not, get the generic one + if tmpl == nil { + tmpl = templates.Lookup(fmt.Sprintf("types.%s.go.tmpl", veneerType)) + } + // If not, something went wrong. + if tmpl == nil { + return "", fmt.Errorf("veneer '%s' not found", veneerType) + } + + buf := bytes.Buffer{} + if err := tmpl.Execute(&buf, map[string]any{ + "def": def, + }); err != nil { + return "", fmt.Errorf("failed executing veneer template: %w", err) + } + + return buf.String(), nil +} + +func formatStructBody(def ast.StructType, typesPkg string) string { + var buffer strings.Builder + + buffer.WriteString("struct {\n") + + for _, fieldDef := range def.Fields { + buffer.WriteString("\t" + formatField(fieldDef, typesPkg)) + } + + buffer.WriteString("}") + + return buffer.String() +} + +func formatField(def ast.StructField, typesPkg string) string { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + // ToDo: this doesn't follow references to other types like the builder jenny does + /* + if def.Type.Default != nil { + buffer.WriteString(fmt.Sprintf("// Default: %#v\n", def.Type.Default)) + } + */ + + jsonOmitEmpty := "" + if !def.Required { + jsonOmitEmpty = ",omitempty" + } + + buffer.WriteString(fmt.Sprintf( + "%s %s `json:\"%s%s\"`\n", + tools.UpperCamelCase(def.Name), + formatType(def.Type, def.Required, typesPkg), + def.Name, + jsonOmitEmpty, + )) + + return buffer.String() +} +func formatType(def ast.Type, fieldIsRequired bool, typesPkg string) string { + if def.Kind() == ast.KindAny { + return "any" + } + + if def.Kind() == ast.KindDisjunction { + return formatDisjunction(def.(ast.DisjunctionType), typesPkg) + } + + if def.Kind() == ast.KindArray { + return formatArray(def.(ast.ArrayType), typesPkg) + } + + if def.Kind() == ast.KindMap { + return formatMap(def.(ast.MapType), typesPkg) + } + + if def.Kind() == ast.KindRef { + typeName := def.(ast.RefType).ReferredType + + if typesPkg != "" { + typeName = typesPkg + "." + typeName + } + + if !fieldIsRequired { + typeName = "*" + typeName + } + + return typeName + } + + if def.Kind() == ast.KindEnum { + return "enum here" + } + + // anonymous struct + if def.Kind() == ast.KindStruct { + return formatStructBody(def.(ast.StructType), typesPkg) + } + + // TODO: there should be an ast.KindScalar with a matching type + typeName := string(def.(ast.ScalarType).ScalarKind) + + if !fieldIsRequired { + typeName = "*" + typeName + } + /* + if def.Nullable || !fieldIsRequired { + typeName = "*" + typeName + } + */ + + return typeName +} + +func formatArray(def ast.ArrayType, typesPkg string) string { + subTypeString := formatType(def.ValueType, true, typesPkg) + + return fmt.Sprintf("[]%s", subTypeString) +} + +func formatMap(def ast.MapType, typesPkg string) string { + keyTypeString := def.IndexType.Kind() + valueTypeString := formatType(def.ValueType, true, typesPkg) + + return fmt.Sprintf("map[%s]%s", keyTypeString, valueTypeString) +} + +func formatDisjunction(def ast.DisjunctionType, typesPkg string) string { + subTypes := make([]string, 0, len(def.Branches)) + for _, subType := range def.Branches { + subTypes = append(subTypes, formatType(subType, true, typesPkg)) + } + + return fmt.Sprintf("disjunction<%s>", strings.Join(subTypes, " | ")) +} + +func formatScalar(val any) string { + if list, ok := val.([]any); ok { + items := make([]string, 0, len(list)) + + for _, item := range list { + items = append(items, formatScalar(item)) + } + + // TODO: we can't assume a list of strings + return fmt.Sprintf("[]string{%s}", strings.Join(items, ", ")) + } + + return fmt.Sprintf("%#v", val) +} diff --git a/internal/jennies/golang/tmpl.go b/internal/jennies/golang/tmpl.go new file mode 100644 index 000000000..313b02b4a --- /dev/null +++ b/internal/jennies/golang/tmpl.go @@ -0,0 +1,21 @@ +package golang + +import ( + "embed" + "html/template" + + "github.com/grafana/cog/internal/tools" +) + +var templates *template.Template + +//go:embed veneers/*.tmpl +var veneersFS embed.FS + +func init() { + base := template.New("golang") + base.Funcs(map[string]any{ + "formatIdentifier": tools.UpperCamelCase, + }) + templates = template.Must(base.ParseFS(veneersFS, "veneers/*.tmpl")) +} diff --git a/internal/jennies/golang/veneers/stringorbool.types.json_marshal.go.tmpl b/internal/jennies/golang/veneers/stringorbool.types.json_marshal.go.tmpl new file mode 100644 index 000000000..2cdd3008c --- /dev/null +++ b/internal/jennies/golang/veneers/stringorbool.types.json_marshal.go.tmpl @@ -0,0 +1,51 @@ +// MarshalJSON implements the encoding/json.Marshaler interface. +// +// This method can be used to render the resource as JSON +// which your configuration management tool of choice can then feed into +// Grafana. +func (resource {{ .def.Name|formatIdentifier }}) MarshalJSON() ([]byte, error) { + if resource.ValString != nil { + var buf bytes.Buffer + buf.WriteRune('"') + buf.WriteString(*resource.ValString) + buf.WriteRune('"') + return buf.Bytes(), nil + } + + return strconv.AppendBool([]byte{}, *resource.ValBool), nil +} + +// MarshalIndentJSON renders the resource as indented JSON +// which your configuration management tool of choice can then feed into +// Grafana. +func (resource {{ .def.Name|formatIdentifier }}) MarshalIndentJSON() ([]byte, error) { + return json.MarshalIndent(resource, "", " ") +} + +func (resource {{ .def.Name|formatIdentifier }}) UnmarshalJSON(raw []byte) error { + if raw == nil || bytes.Equal(raw, []byte(`"null"`)) { + return nil + } + var ( + tmp string + err error + ) + if raw[0] != '"' { + if bytes.Equal(raw, []byte("true")) { + yup := true + resource.ValBool = &yup + return nil + } + if bytes.Equal(raw, []byte("false")) { + nope := false + resource.ValBool = &nope + return nil + } + return errors.New("bad boolean value provided") + } + if err = json.Unmarshal(raw, &tmp); err != nil { + return err + } + resource.ValString = &tmp + return nil +} diff --git a/internal/jennies/golang/veneers/types.json_marshal.go.tmpl b/internal/jennies/golang/veneers/types.json_marshal.go.tmpl new file mode 100644 index 000000000..262820290 --- /dev/null +++ b/internal/jennies/golang/veneers/types.json_marshal.go.tmpl @@ -0,0 +1,6 @@ +// MarshalIndentJSON renders the resource as indented JSON +// which your configuration management tool of choice can then feed into +// Grafana. +func (resource {{ .def.Name|formatIdentifier }}) MarshalIndentJSON() ([]byte, error) { + return json.MarshalIndent(resource, "", " ") +} diff --git a/internal/jennies/tools/foreach.go b/internal/jennies/tools/foreach.go new file mode 100644 index 000000000..e97787a87 --- /dev/null +++ b/internal/jennies/tools/foreach.go @@ -0,0 +1,37 @@ +package tools + +import ( + "github.com/grafana/codejen" +) + +type foreach[Input any] struct { + inner codejen.OneToMany[Input] +} + +func (jenny foreach[Input]) JennyName() string { + return "ForeachFile" +} + +func (jenny foreach[Input]) Generate(inputs ...[]Input) (codejen.Files, error) { + outputs := make([]codejen.File, 0, len(inputs)) + + for _, input := range inputs { + for _, item := range input { + out, err := jenny.inner.Generate(item) + if err != nil { + return nil, err + } + + outputs = append(outputs, out...) + + } + } + + return outputs, nil +} + +func Foreach[InputInner any](decoratedJenny codejen.OneToMany[InputInner]) codejen.ManyToMany[[]InputInner] { + return foreach[InputInner]{ + inner: decoratedJenny, + } +} diff --git a/internal/jennies/typescript/builder.go b/internal/jennies/typescript/builder.go new file mode 100644 index 000000000..db17d88d3 --- /dev/null +++ b/internal/jennies/typescript/builder.go @@ -0,0 +1,254 @@ +package typescript + +import ( + "fmt" + "strings" + + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +type TypescriptBuilder struct { + defaults []string + file *ast.File +} + +func (jenny *TypescriptBuilder) JennyName() string { + return "TypescriptBuilder" +} + +func (jenny *TypescriptBuilder) Generate(builders []ast.Builder) (codejen.Files, error) { + files := codejen.Files{} + + for _, builder := range builders { + output, err := jenny.generateBuilder(builders, builder) + if err != nil { + return nil, err + } + + files = append( + files, + *codejen.NewFile(builder.Package+"/"+strings.ToLower(builder.For.Name)+"/builder_gen.ts", output, jenny), + ) + } + + return files, nil +} + +func (jenny *TypescriptBuilder) generateBuilder(builders ast.Builders, builder ast.Builder) ([]byte, error) { + var buffer strings.Builder + + objectName := tools.UpperCamelCase(builder.For.Name) + + // imports + buffer.WriteString(fmt.Sprintf("import * as types from \"../../types/%s/types_gen\";\n", strings.ToLower(objectName))) + buffer.WriteString(fmt.Sprintf("import { OptionsBuilder } from \"../../options_builder_gen\";\n\n")) + + // Builder class declaration + buffer.WriteString(fmt.Sprintf("export class %[1]sBuilder implements OptionsBuilder {\n", objectName)) + + // internal property, representing the object being built + buffer.WriteString(fmt.Sprintf("\tinternal: types.%[1]s;\n", objectName)) + + // Add a constructor for the builder + constructorCode := jenny.generateConstructor(builders, builder) + buffer.WriteString(constructorCode) + + // Allow builders to expose the resource they're building + buffer.WriteString(fmt.Sprintf(` + build(): types.%s { + return this.internal; + } + +`, objectName)) + + // Define options + for _, option := range builder.Options { + opt, err := jenny.generateOption(builders, builder, option) + if err != nil { + return nil, err + } + buffer.WriteString(opt) + } + + // End builder class declaration + buffer.WriteString("}\n") + + return []byte(buffer.String()), nil +} + +func (jenny *TypescriptBuilder) generateConstructor(builders ast.Builders, builder ast.Builder) string { + var buffer strings.Builder + + typeName := tools.UpperCamelCase(builder.For.Name) + args := "" + fieldsInit := "" + var argsList []string + var fieldsInitList []string + for _, opt := range builder.Options { + if !opt.IsConstructorArg { + continue + } + + // FIXME: this is assuming that there's only one argument for that option + argsList = append(argsList, jenny.generateArgument(builders, builder, opt.Args[0])) + fieldsInitList = append( + fieldsInitList, + jenny.generateInitAssignment(builders, builder, opt.Assignments[0]), + ) + } + + for _, init := range builder.Initializations { + fieldsInitList = append( + fieldsInitList, + jenny.generateInitAssignment(builders, builder, init), + ) + } + + args = strings.Join(argsList, ", ") + fieldsInit = strings.Join(fieldsInitList, "\n") + + buffer.WriteString(fmt.Sprintf(` + constructor(%[2]s) { +%[3]s + } +`, typeName, args, fieldsInit)) + + return buffer.String() +} + +func (jenny *TypescriptBuilder) typeHasBuilder(builders ast.Builders, builder ast.Builder, t ast.Type) (string, bool) { + if t.Kind() != ast.KindRef { + return "", false + } + + referredTypeName := t.(ast.RefType).ReferredType + referredTypePkg := strings.ToLower(referredTypeName) + + _, builderFound := builders.LocateByObject(builder.Package, referredTypeName) + + return referredTypePkg, builderFound +} + +func (jenny *TypescriptBuilder) generateInitAssignment(builders ast.Builders, builder ast.Builder, assignment ast.Assignment) string { + fieldPath := assignment.Path + + if _, valueHasBuilder := jenny.typeHasBuilder(builders, builder, assignment.ValueType); valueHasBuilder { + return "constructor init assignment with type that has a builder is not supported yet" + } + + if assignment.ArgumentName == "" { + return fmt.Sprintf("\t\tthis.internal.%[1]s = %[2]s;", fieldPath, formatScalar(assignment.Value)) + } + + argName := tools.LowerCamelCase(assignment.ArgumentName) + + generatedConstraints := strings.Join(jenny.constraints(argName, assignment.Constraints), "\n") + if generatedConstraints != "" { + generatedConstraints = generatedConstraints + "\n\n" + } + + return generatedConstraints + fmt.Sprintf("\t\tthis.internal.%[1]s = %[2]s;", fieldPath, argName) +} + +func (jenny *TypescriptBuilder) generateOption(builders ast.Builders, builder ast.Builder, def ast.Option) (string, error) { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("\t// %s\n", commentLine)) + } + + // Arguments list + arguments := "" + if len(def.Args) != 0 { + argsList := make([]string, 0, len(def.Args)) + for _, arg := range def.Args { + argsList = append(argsList, jenny.generateArgument(builders, builder, arg)) + } + + arguments = strings.Join(argsList, ", ") + } + + // Assignments + assignmentsList := make([]string, 0, len(def.Assignments)) + for _, assignment := range def.Assignments { + assignmentsList = append(assignmentsList, jenny.generateAssignment(builders, builder, assignment)) + } + assignments := strings.Join(assignmentsList, "\n") + + // Option body + buffer.WriteString(fmt.Sprintf(` %[1]s(%[2]s): this { +%[3]s + + return this; + } + +`, def.Name, arguments, assignments)) + + return buffer.String(), nil +} + +func (jenny *TypescriptBuilder) generateArgument(builders ast.Builders, builder ast.Builder, arg ast.Argument) string { + typeName := formatType(arg.Type, "types") + + if builderPkg, found := jenny.typeHasBuilder(builders, builder, arg.Type); found { + return fmt.Sprintf(`%[1]s: OptionsBuilder`, arg.Name, builderPkg) + } + + name := tools.LowerCamelCase(arg.Name) + + return fmt.Sprintf("%s: %s", name, typeName) +} + +func (jenny *TypescriptBuilder) generateAssignment(builders ast.Builders, builder ast.Builder, assignment ast.Assignment) string { + fieldPath := assignment.Path + + if _, found := jenny.typeHasBuilder(builders, builder, assignment.ValueType); found { + return fmt.Sprintf("\t\tthis.internal.%[1]s = %[2]s.build();", fieldPath, assignment.ArgumentName) + } + + if assignment.ArgumentName == "" { + return fmt.Sprintf("\t\tthis.internal.%[1]s = %[2]s;", fieldPath, formatScalar(assignment.Value)) + } + + argName := tools.LowerCamelCase(assignment.ArgumentName) + + generatedConstraints := strings.Join(jenny.constraints(argName, assignment.Constraints), "\n") + if generatedConstraints != "" { + generatedConstraints = generatedConstraints + "\n\n" + } + + return generatedConstraints + fmt.Sprintf("\t\tthis.internal.%[1]s = %[2]s;", fieldPath, argName) +} + +func (jenny *TypescriptBuilder) constraints(argumentName string, constraints []ast.TypeConstraint) []string { + output := make([]string, 0, len(constraints)) + + for _, constraint := range constraints { + output = append(output, jenny.constraint(argumentName, constraint)) + } + + return output +} + +func (jenny *TypescriptBuilder) constraint(argumentName string, constraint ast.TypeConstraint) string { + var buffer strings.Builder + + buffer.WriteString(fmt.Sprintf("\t\tif (!(%s)) {\n", jenny.constraintComparison(argumentName, constraint))) + buffer.WriteString(fmt.Sprintf("\t\t\tthrow new Error(\"%[1]s must be %[2]s %[3]v\");\n", argumentName, constraint.Op, constraint.Args[0])) + buffer.WriteString("\t\t}\n") + + return buffer.String() +} + +func (jenny *TypescriptBuilder) constraintComparison(argumentName string, constraint ast.TypeConstraint) string { + if constraint.Op == "minLength" { + return fmt.Sprintf("%[1]s.length >= %[2]v", argumentName, constraint.Args[0]) + } + if constraint.Op == "maxLength" { + return fmt.Sprintf("%[1]s.length <= %[2]v", argumentName, constraint.Args[0]) + } + + return fmt.Sprintf("%[1]s %[2]s %#[3]v", argumentName, constraint.Op, constraint.Args[0]) +} diff --git a/internal/jennies/typescript/jennies.go b/internal/jennies/typescript/jennies.go new file mode 100644 index 000000000..af0ff65d7 --- /dev/null +++ b/internal/jennies/typescript/jennies.go @@ -0,0 +1,33 @@ +package typescript + +import ( + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/jennies/tools" + "github.com/grafana/cog/internal/veneers" +) + +func Jennies() *codejen.JennyList[[]*ast.File] { + targets := codejen.JennyListWithNamer[[]*ast.File](func(f []*ast.File) string { + return "typescript" + }) + targets.AppendOneToOne( + TypescriptOptionsBuilder{}, + ) + targets.AppendManyToMany( + tools.Foreach[*ast.File](TypescriptRawTypes{}), + ) + targets.AppendOneToMany( + codejen.AdaptOneToMany[[]ast.Builder, []*ast.File]( + &TypescriptBuilder{}, + func(files []*ast.File) []ast.Builder { + generator := &ast.BuilderGenerator{} + builders := generator.FromAST(files) + + return veneers.Engine().ApplyTo(builders) + }, + ), + ) + + return targets +} diff --git a/internal/jennies/typescript/optionsbuilder.go b/internal/jennies/typescript/optionsbuilder.go new file mode 100644 index 000000000..47569106a --- /dev/null +++ b/internal/jennies/typescript/optionsbuilder.go @@ -0,0 +1,26 @@ +package typescript + +import ( + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" +) + +type TypescriptOptionsBuilder struct { +} + +func (jenny TypescriptOptionsBuilder) JennyName() string { + return "TypescriptOptionsBuilder" +} + +func (jenny TypescriptOptionsBuilder) Generate(files []*ast.File) (*codejen.File, error) { + output := jenny.generateFile() + + return codejen.NewFile("options_builder_gen.ts", []byte(output), jenny), nil +} + +func (jenny TypescriptOptionsBuilder) generateFile() string { + return `export interface OptionsBuilder { + build: () => T; +} +` +} diff --git a/internal/jennies/typescript/rawtypes.go b/internal/jennies/typescript/rawtypes.go new file mode 100644 index 000000000..9e8849e6b --- /dev/null +++ b/internal/jennies/typescript/rawtypes.go @@ -0,0 +1,241 @@ +package typescript + +import ( + "fmt" + "strings" + + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +type TypescriptRawTypes struct { +} + +func (jenny TypescriptRawTypes) JennyName() string { + return "TypescriptRawTypes" +} + +func (jenny TypescriptRawTypes) Generate(file *ast.File) (codejen.Files, error) { + output, err := jenny.generateFile(file) + if err != nil { + return nil, err + } + + return codejen.Files{ + *codejen.NewFile("types/"+file.Package+"/types_gen.ts", output, jenny), + }, nil +} + +func (jenny TypescriptRawTypes) generateFile(file *ast.File) ([]byte, error) { + var buffer strings.Builder + + for _, typeDef := range file.Definitions { + typeDefGen, err := jenny.formatObject(typeDef, "") + if err != nil { + return nil, err + } + + buffer.Write(typeDefGen) + buffer.WriteString("\n") + } + + return []byte(buffer.String()), nil +} + +func (jenny TypescriptRawTypes) formatObject(def ast.Object, typesPkg string) ([]byte, error) { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + buffer.WriteString("export ") + + switch def.Type.Kind() { + case ast.KindStruct: + buffer.WriteString(fmt.Sprintf("interface %s ", def.Name)) + buffer.WriteString(formatStructFields(def.Type.(ast.StructType).Fields, typesPkg)) + buffer.WriteString("\n") + case ast.KindEnum: + buffer.WriteString(fmt.Sprintf("enum %s {\n", def.Name)) + for _, val := range def.Type.(ast.EnumType).Values { + buffer.WriteString(fmt.Sprintf("\t%s = %s,\n", tools.UpperCamelCase(val.Name), formatScalar(val.Value))) + } + buffer.WriteString("}\n") + case ast.KindRef: + refType := def.Type.(ast.RefType) + + buffer.WriteString(fmt.Sprintf("type %s = %s;", def.Name, refType.ReferredType)) + case ast.KindDisjunction, ast.KindMap: + buffer.WriteString(fmt.Sprintf("type %s = %s;\n", def.Name, formatType(def.Type, ""))) + case ast.KindString, + ast.KindInt8, ast.KindInt16, ast.KindInt32, ast.KindInt64, + ast.KindUint8, ast.KindUint16, ast.KindUint32, ast.KindUint64, + ast.KindFloat32, ast.KindFloat64: + scalarType, ok := def.Type.(ast.ScalarType) + if ok && scalarType.Value != nil { + buffer.WriteString(fmt.Sprintf("const %s = %s;\n", def.Name, formatScalar(scalarType.Value))) + } else { + buffer.WriteString(fmt.Sprintf("type %s = %s;\n", def.Name, formatType(def.Type, ""))) + } + case ast.KindAny: + buffer.WriteString(fmt.Sprintf("type %s = any;\n", def.Name)) + default: + return nil, fmt.Errorf("unhandled type def kind: %s", def.Type.Kind()) + } + + return []byte(buffer.String()), nil +} + +func formatStructFields(fields []ast.StructField, typesPkg string) string { + var buffer strings.Builder + + buffer.WriteString("{\n") + + for i, fieldDef := range fields { + fieldDefGen := formatField(fieldDef, typesPkg) + + buffer.WriteString( + strings.TrimSuffix( + prefixLinesWith(string(fieldDefGen), "\t"), + "\n\t", + ), + ) + + if i != len(fields)-1 { + buffer.WriteString("\n") + } + } + + buffer.WriteString("\n}") + + return buffer.String() +} + +func formatField(def ast.StructField, typesPkg string) []byte { + var buffer strings.Builder + + for _, commentLine := range def.Comments { + buffer.WriteString(fmt.Sprintf("// %s\n", commentLine)) + } + + required := "" + if !def.Required { + required = "?" + } + + formattedType := formatType(def.Type, typesPkg) + + buffer.WriteString(fmt.Sprintf( + "%s%s: %s;\n", + tools.LowerCamelCase(def.Name), + required, + formattedType, + )) + + return []byte(buffer.String()) +} + +func formatType(def ast.Type, typesPkg string) string { + // todo: handle nullable + // maybe if nullable, append | null to the type? + switch def.Kind() { + case ast.KindDisjunction: + return formatDisjunction(def.(ast.DisjunctionType), typesPkg) + case ast.KindRef: + if typesPkg != "" { + return typesPkg + "." + (def.(ast.RefType)).ReferredType + } + + return (def.(ast.RefType)).ReferredType + case ast.KindArray: + return formatArray(def.(ast.ArrayType), typesPkg) + case ast.KindStruct: + return formatStructFields(def.(ast.StructType).Fields, typesPkg) + case ast.KindMap: + return formatMap(def.(ast.MapType), typesPkg) + case ast.KindEnum: + return formatAnonymousEnum(def.(ast.EnumType)) + + case ast.KindNull: + return "null" + case ast.KindAny: + return "any" + + case ast.KindBytes, ast.KindString: + return "string" + + case ast.KindFloat32, ast.KindFloat64: + return "number" + case ast.KindUint8, ast.KindUint16, ast.KindUint32, ast.KindUint64: + return "number" + case ast.KindInt8, ast.KindInt16, ast.KindInt32, ast.KindInt64: + return "number" + + case ast.KindBool: + return "boolean" + + default: + return string(def.Kind()) + } +} + +func formatArray(def ast.ArrayType, typesPkg string) string { + subTypeString := formatType(def.ValueType, typesPkg) + + return fmt.Sprintf("%s[]", subTypeString) +} + +func formatDisjunction(def ast.DisjunctionType, typesPkg string) string { + subTypes := make([]string, 0, len(def.Branches)) + for _, subType := range def.Branches { + subTypes = append(subTypes, formatType(subType, typesPkg)) + } + + return strings.Join(subTypes, " | ") +} + +func formatMap(def ast.MapType, typesPkg string) string { + keyTypeString := formatType(def.IndexType, typesPkg) + valueTypeString := formatType(def.ValueType, typesPkg) + + return fmt.Sprintf("Record<%s, %s>", keyTypeString, valueTypeString) +} + +func formatAnonymousEnum(def ast.EnumType) string { + values := make([]string, 0, len(def.Values)) + for _, value := range def.Values { + values = append(values, fmt.Sprintf("%#v", value.Value)) + } + + enumeration := strings.Join(values, " | ") + + return enumeration +} + +func prefixLinesWith(input string, prefix string) string { + lines := strings.Split(input, "\n") + prefixed := make([]string, 0, len(lines)) + + for _, line := range lines { + prefixed = append(prefixed, prefix+line) + } + + return strings.Join(prefixed, "\n") +} + +func formatScalar(val any) string { + if list, ok := val.([]any); ok { + items := make([]string, 0, len(list)) + + for _, item := range list { + items = append(items, formatScalar(item)) + } + + // TODO: we can't assume a list of strings + return fmt.Sprintf("[%s]", strings.Join(items, ", ")) + } + + return fmt.Sprintf("%#v", val) +} diff --git a/internal/jsonschema/generator.go b/internal/jsonschema/generator.go new file mode 100644 index 000000000..969c5e629 --- /dev/null +++ b/internal/jsonschema/generator.go @@ -0,0 +1,298 @@ +package jsonschema + +import ( + "errors" + "fmt" + "io" + "strings" + + "github.com/grafana/cog/internal/ast" + schemaparser "github.com/santhosh-tekuri/jsonschema" +) + +var errUndescriptiveSchema = fmt.Errorf("the schema does not appear to be describing anything") + +const ( + typeNull = "null" + typeBoolean = "boolean" + typeObject = "object" + typeArray = "array" + typeString = "string" + typeNumber = "number" + typeInteger = "integer" +) + +type Config struct { + // Package name used to generate code into. + Package string +} + +type newGenerator struct { + file *ast.File +} + +func GenerateAST(schemaReader io.Reader, c Config) (*ast.File, error) { + g := &newGenerator{ + file: &ast.File{ + Package: c.Package, + }, + } + + compiler := schemaparser.NewCompiler() + compiler.ExtractAnnotations = true + if err := compiler.AddResource("schema", schemaReader); err != nil { + return nil, err + } + + schema, err := compiler.Compile("schema") + if err != nil { + return nil, err + } + + // The root of the schema is an actual type/object + if schema.Ref == nil { + if err := g.declareDefinition(c.Package, schema); err != nil { + return nil, err + } + } else { + // The root of the schema contains definitions, and a reference to the "main" object + if err := g.declareDefinition(c.Package, schema.Ref); err != nil { + return nil, err + } + } + + return g.file, nil +} + +func (g *newGenerator) declareDefinition(definitionName string, schema *schemaparser.Schema) error { + def, err := g.walkDefinition(schema) + if err != nil { + return fmt.Errorf("%s: %w", definitionName, err) + } + + g.file.Definitions = append(g.file.Definitions, ast.Object{ + Name: definitionName, + Type: def, + }) + + return nil +} + +func (g *newGenerator) walkDefinition(schema *schemaparser.Schema) (ast.Type, error) { + var def ast.Type + var err error + + if len(schema.Types) == 0 { + if schema.Ref != nil { + return g.walkRef(schema) + } + + if schema.OneOf != nil { + return g.walkOneOf(schema) + } + + if schema.AnyOf != nil { + return g.walkAnyOf(schema) + } + + if schema.AllOf != nil { + return g.walkOneOf(schema) + } + + if schema.Properties != nil || schema.PatternProperties != nil { + return g.walkObject(schema) + } + + if schema.Enum != nil { + return g.walkEnum(schema) + } + + return nil, errUndescriptiveSchema + } + + if len(schema.Types) > 1 { + def, err = g.walkDisjunction(schema) + } else if schema.Enum != nil { + def, err = g.walkEnum(schema) + } else { + switch schema.Types[0] { + case typeNull: + def = ast.ScalarType{ScalarKind: ast.KindNull} + case typeBoolean: + def = ast.ScalarType{ScalarKind: ast.KindBool} + case typeString: + def, err = g.walkString(schema) + case typeObject: + def, err = g.walkObject(schema) + case typeNumber, typeInteger: + def, err = g.walkNumber(schema) + case typeArray: + def, err = g.walkList(schema) + + default: + return nil, fmt.Errorf("unexpected schema with type '%s'", schema.Types[0]) + } + } + + return def, err +} + +func (g *newGenerator) walkDisjunction(schema *schemaparser.Schema) (ast.DisjunctionType, error) { + // TODO: finish implementation + return ast.DisjunctionType{}, nil +} + +func (g *newGenerator) walkDisjunctionBranches(branches []*schemaparser.Schema) ([]ast.Type, error) { + definitions := make([]ast.Type, 0, len(branches)) + for _, oneOf := range branches { + branch, err := g.walkDefinition(oneOf) + if err != nil { + return nil, err + } + + definitions = append(definitions, branch) + } + + return definitions, nil +} + +func (g *newGenerator) walkOneOf(schema *schemaparser.Schema) (ast.DisjunctionType, error) { + if len(schema.OneOf) == 0 { + return ast.DisjunctionType{}, fmt.Errorf("oneOf with no branches") + } + + branches, err := g.walkDisjunctionBranches(schema.OneOf) + if err != nil { + return ast.DisjunctionType{}, err + } + + return ast.DisjunctionType{ + Branches: branches, + }, nil +} + +// TODO: what's the difference between oneOf and anyOf? +func (g *newGenerator) walkAnyOf(schema *schemaparser.Schema) (ast.DisjunctionType, error) { + if len(schema.AnyOf) == 0 { + return ast.DisjunctionType{}, fmt.Errorf("anyOf with no branches") + } + + branches, err := g.walkDisjunctionBranches(schema.AnyOf) + if err != nil { + return ast.DisjunctionType{}, err + } + + return ast.DisjunctionType{ + Branches: branches, + }, nil +} + +func (g *newGenerator) walkAllOf(schema *schemaparser.Schema) (ast.DisjunctionType, error) { + // TODO: finish implementation and use correct type + return ast.DisjunctionType{}, nil +} + +func (g *newGenerator) walkRef(schema *schemaparser.Schema) (ast.RefType, error) { + parts := strings.Split(schema.Ref.Ptr, "/") + referredKindName := parts[len(parts)-1] // Very naive + + if err := g.declareDefinition(referredKindName, schema.Ref); err != nil { + return ast.RefType{}, err + } + + return ast.RefType{ + ReferredType: referredKindName, + //Comments: schemaComments(schema), + }, nil +} + +func (g *newGenerator) walkString(schema *schemaparser.Schema) (ast.ScalarType, error) { + def := ast.ScalarType{ScalarKind: ast.KindString} + + /* + if len(schema.Enum) != 0 { + def.Constraints = append(def.Constraints, ast.TypeConstraint{ + Op: "in", + Args: []any{schema.Enum}, + }) + } + */ + + return def, nil +} + +func (g *newGenerator) walkNumber(schema *schemaparser.Schema) (ast.ScalarType, error) { + // TODO: finish implementation + return ast.ScalarType{ScalarKind: ast.KindInt64}, nil +} + +func (g *newGenerator) walkList(schema *schemaparser.Schema) (ast.ArrayType, error) { + var itemsDef ast.Type + var err error + + if schema.Items == nil { + itemsDef = ast.ScalarType{ + ScalarKind: ast.KindAny, + } + } else { + // TODO: schema.Items might not be a schema? + itemsDef, err = g.walkDefinition(schema.Items.(*schemaparser.Schema)) + // items contains an empty schema: `{}` + if errors.Is(err, errUndescriptiveSchema) { + itemsDef = ast.ScalarType{ + ScalarKind: ast.KindAny, + } + } else if err != nil { + return ast.ArrayType{}, err + } + } + + return ast.ArrayType{ + ValueType: itemsDef, + }, nil +} + +func (g *newGenerator) walkEnum(schema *schemaparser.Schema) (ast.EnumType, error) { + if len(schema.Enum) == 0 { + return ast.EnumType{}, fmt.Errorf("enum with no values") + } + + values := make([]ast.EnumValue, 0, len(schema.Enum)) + for _, enumValue := range schema.Enum { + values = append(values, ast.EnumValue{ + Type: ast.ScalarType{ScalarKind: ast.KindString}, // TODO: identify that correctly + + // Simple mapping of all enum values (which we are assuming are in + // lowerCamelCase) to corresponding CamelCase + Name: enumValue.(string), + Value: enumValue.(string), + }) + } + + return ast.EnumType{ + Values: values, + // TODO: default value? + }, nil +} + +func (g *newGenerator) walkObject(schema *schemaparser.Schema) (ast.StructType, error) { + // TODO: finish implementation + fields := make([]ast.StructField, 0, len(schema.Properties)) + for name, property := range schema.Properties { + fieldDef, err := g.walkDefinition(property) + if err != nil { + return ast.StructType{}, err + } + + fields = append(fields, ast.StructField{ + Name: name, + Comments: schemaComments(schema), + Required: stringInList(schema.Required, name), + Type: fieldDef, + }) + } + + return ast.StructType{ + Fields: fields, + }, nil +} diff --git a/internal/jsonschema/utils.go b/internal/jsonschema/utils.go new file mode 100644 index 000000000..6efb9b705 --- /dev/null +++ b/internal/jsonschema/utils.go @@ -0,0 +1,34 @@ +package jsonschema + +import ( + "strings" + + schemaparser "github.com/santhosh-tekuri/jsonschema" +) + +func stringInList(list []string, input string) bool { + for _, value := range list { + if value == input { + return true + } + } + + return false +} + +func schemaComments(schema *schemaparser.Schema) []string { + comment := schema.Description + + lines := strings.Split(comment, "\n") + filtered := make([]string, 0, len(lines)) + + for _, line := range lines { + if line == "" { + continue + } + + filtered = append(filtered, line) + } + + return filtered +} diff --git a/internal/simplecue/generator.go b/internal/simplecue/generator.go new file mode 100644 index 000000000..9d66563e1 --- /dev/null +++ b/internal/simplecue/generator.go @@ -0,0 +1,619 @@ +package simplecue + +import ( + "fmt" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/format" + "github.com/grafana/cog/internal/ast" +) + +const annotationName = "cog" +const hintKindEnum = "enum" +const annotationKindFieldName = "kind" +const enumMembersAttr = "memberNames" + +type Config struct { + // Package name used to generate code into. + Package string +} + +type newGenerator struct { + file *ast.File +} + +func GenerateAST(val cue.Value, c Config) (*ast.File, error) { + g := &newGenerator{ + file: &ast.File{ + Package: c.Package, + }, + } + + i, err := val.Fields(cue.Definitions(true)) + if err != nil { + return nil, err + } + for i.Next() { + sel := i.Selector() + + n, err := g.declareTopLevelType(selectorLabel(sel), i.Value()) + if err != nil { + return nil, err + } + + g.file.Definitions = append(g.file.Definitions, n) + } + + return g.file, nil +} + +// Do we really need to distinguish top-level types with others? +func (g *newGenerator) declareTopLevelType(name string, v cue.Value) (ast.Object, error) { + typeHint, err := getTypeHint(v) + if err != nil { + return ast.Object{}, err + } + + // Hinted as an enum + if typeHint == hintKindEnum { + return g.declareEnum(name, v) + } + + ik := v.IncompleteKind() + + // Is it a string disjunction that we can turn into an enum? + disjunctions := appendSplit(nil, cue.OrOp, v) + if len(disjunctions) != 1 && ik&cue.StringKind == ik { + return g.declareEnum(name, v) + } + + switch v.IncompleteKind() { + case cue.StringKind: + return g.declareTopLevelString(name, v) + case cue.StructKind: + return g.declareTopLevelStruct(name, v) + default: + return ast.Object{}, errorWithCueRef(v, "unexpected top-level kind '%s'", v.IncompleteKind().String()) + } +} + +func (g *newGenerator) declareTopLevelString(name string, v cue.Value) (ast.Object, error) { + ik := v.IncompleteKind() + if ik&cue.StringKind != ik { + return ast.Object{}, errorWithCueRef(v, "top-level strings may only be generated from concrete strings") + } + + strType, err := g.declareString(v) + if err != nil { + return ast.Object{}, err + } + + return ast.Object{ + Name: name, + Comments: commentsFromCueValue(v), + Type: strType, + }, nil +} + +func (g *newGenerator) declareEnum(name string, v cue.Value) (ast.Object, error) { + // Restrict the expression of enums to ints or strings. + allowed := cue.StringKind | cue.IntKind + ik := v.IncompleteKind() + if ik&allowed != ik { + return ast.Object{}, errorWithCueRef(v, "enums may only be generated from concrete strings, or ints") + } + + values, err := g.extractEnumValues(v) + if err != nil { + return ast.Object{}, err + } + + return ast.Object{ + Name: name, + Comments: commentsFromCueValue(v), + Type: ast.EnumType{ + Values: values, + }, + }, nil +} + +func (g *newGenerator) extractEnumValues(v cue.Value) ([]ast.EnumValue, error) { + _, dvals := v.Expr() + a := v.Attribute(annotationName) + + var attrMemberNameExist bool + var evals []string + if a.Err() == nil { + val, found, err := a.Lookup(0, enumMembersAttr) + if err == nil && found { + attrMemberNameExist = true + evals = strings.Split(val, "|") + if len(evals) != len(dvals) { + return nil, errorWithCueRef(v, "enums and %s attributes size doesn't match", enumMembersAttr) + } + } + } + + // We only allowed String Enum to be generated without memberName attribute + if v.IncompleteKind() != cue.StringKind && !attrMemberNameExist { + return nil, errorWithCueRef(v, "numeric enums may only be generated from memberNames attribute") + } + + subType := ast.ScalarType{ScalarKind: ast.KindString} + if v.IncompleteKind() == cue.IntKind { + subType = ast.ScalarType{ScalarKind: ast.KindInt64} + } + + var fields []ast.EnumValue + for idx, dv := range dvals { + var text string + if attrMemberNameExist { + text = evals[idx] + } else { + text, _ = dv.String() + } + + if !dv.IsConcrete() { + return nil, errorWithCueRef(v, "enums may only be generated from a disjunction of concrete strings or numbers") + } + + val, err := cueConcreteToScalar(dv) + if err != nil { + return nil, err + } + fields = append(fields, ast.EnumValue{ + Type: subType, + + // Simple mapping of all enum values (which we are assuming are in + // lowerCamelCase) to corresponding CamelCase + Name: text, + Value: val, + }) + } + + return fields, nil +} + +func (g *newGenerator) declareTopLevelStruct(name string, v cue.Value) (ast.Object, error) { + // This check might be too restrictive + if v.IncompleteKind() != cue.StructKind { + return ast.Object{}, errorWithCueRef(v, "top-level type definitions may only be generated from structs") + } + + nodeType, err := g.declareNode(v) + if err != nil { + return ast.Object{}, err + } + + typeDef := ast.Object{ + Name: name, + Comments: commentsFromCueValue(v), + Type: nodeType, + } + + return typeDef, nil +} + +func (g *newGenerator) structFields(v cue.Value) ([]ast.StructField, error) { + // This check might be too restrictive + if v.IncompleteKind() != cue.StructKind { + return nil, errorWithCueRef(v, "top-level type definitions may only be generated from structs") + } + + var fields []ast.StructField + + // explore struct fields + for i, _ := v.Fields(cue.Optional(true), cue.Definitions(true)); i.Next(); { + fieldLabel := selectorLabel(i.Selector()) + + node, err := g.declareNode(i.Value()) + if err != nil { + return nil, err + } + + // Extract the default value if it's there + defVal, err := g.extractDefault(i.Value()) + if err != nil { + return nil, err + } + + fields = append(fields, ast.StructField{ + Name: fieldLabel, + Comments: commentsFromCueValue(i.Value()), + Required: !i.IsOptional(), + Type: node, + Default: defVal, + }) + } + + return fields, nil +} + +func (g *newGenerator) declareNode(v cue.Value) (ast.Type, error) { + // This node is referring to another definition + _, path := v.ReferencePath() + if path.String() != "" { + selectors := path.Selectors() + + return ast.RefType{ + ReferredType: selectorLabel(selectors[len(selectors)-1]), + }, nil + } + + disjunctions := appendSplit(nil, cue.OrOp, v) + if len(disjunctions) != 1 { + allowedKindsForAnonymousEnum := cue.StringKind | cue.IntKind + ik := v.IncompleteKind() + if ik&allowedKindsForAnonymousEnum == ik { + return g.declareAnonymousEnum(v) + } + + branches := make([]ast.Type, 0, len(disjunctions)) + for _, subTypeValue := range disjunctions { + subType, err := g.declareNode(subTypeValue) + if err != nil { + return nil, err + } + + branches = append(branches, subType) + } + + return ast.DisjunctionType{ + Branches: branches, + }, nil + } + + switch v.IncompleteKind() { + case cue.TopKind: + return ast.ScalarType{ScalarKind: ast.KindAny}, nil + case cue.NullKind: + return ast.ScalarType{ScalarKind: ast.KindNull}, nil + case cue.BoolKind: + return ast.ScalarType{ + ScalarKind: ast.KindBool, + }, nil + case cue.BytesKind: + return ast.ScalarType{ScalarKind: ast.KindBytes}, nil + case cue.StringKind: + return g.declareString(v) + case cue.FloatKind, cue.NumberKind, cue.IntKind: + return g.declareNumber(v) + case cue.ListKind: + return g.declareList(v) + case cue.StructKind: + op, opArgs := v.Expr() + + // in cue: {...}, {[string]: type}, or inline struct + + if op == cue.NoOp { + // looking for {[string]: type} + // (don't know how to do this properly) + t, ok := opArgs[0].Elem() + if ok && t.IncompleteKind() != cue.TopKind { + typeDef, err := g.declareNode(t) + if err != nil { + return nil, err + } + + return ast.MapType{ + IndexType: ast.ScalarType{ScalarKind: ast.KindString}, + ValueType: typeDef, + }, nil + } + } + + fields, err := g.structFields(v) + if err != nil { + return nil, err + } + + // {...} + if len(fields) == 0 { + return ast.ScalarType{ScalarKind: ast.KindAny}, nil + } + + return ast.StructType{Fields: fields}, nil + default: + return nil, errorWithCueRef(v, "unexpected node with kind '%s'", v.IncompleteKind().String()) + } +} + +func (g *newGenerator) declareAnonymousEnum(v cue.Value) (ast.Type, error) { + allowed := cue.StringKind | cue.IntKind + ik := v.IncompleteKind() + if ik&allowed != ik { + return nil, errorWithCueRef(v, "enums may only be generated from concrete strings, or ints") + } + + values, err := g.extractEnumValues(v) + if err != nil { + return nil, err + } + + return ast.EnumType{ + Values: values, + }, nil +} + +func (g *newGenerator) declareString(v cue.Value) (ast.ScalarType, error) { + typeDef := ast.ScalarType{ + ScalarKind: ast.KindString, + } + + // v.IsConcrete() being true means we're looking at a constant/known value + if v.IsConcrete() { + val, err := cueConcreteToScalar(v) + if err != nil { + return typeDef, err + } + + typeDef.Value = val + } + + // Extract constraints + constraints, err := g.declareStringConstraints(v) + if err != nil { + return typeDef, err + } + + typeDef.Constraints = constraints + + return typeDef, nil +} + +func (g *newGenerator) extractDefault(v cue.Value) (any, error) { + defaultVal, ok := v.Default() + if !ok { + return nil, nil + } + + def, err := cueConcreteToScalar(defaultVal) + if err != nil { + return nil, err + } + + return def, nil +} + +func (g *newGenerator) declareStringConstraints(v cue.Value) ([]ast.TypeConstraint, error) { + typeAndConstraints := appendSplit(nil, cue.AndOp, v) + + // nothing to do + if len(typeAndConstraints) == 1 { + return nil, nil + } + + // the constraint allows cue to infer a concrete value + // ex: #SomeEnumType & "some value from the enum" + if v.IsConcrete() { + stringVal, err := v.String() + if err != nil { + return nil, errorWithCueRef(v, "could not convert concrete value to string") + } + + return []ast.TypeConstraint{ + { + Op: "==", + Args: []any{stringVal}, + }, + }, nil + } + + constraints := make([]ast.TypeConstraint, 0, len(typeAndConstraints)) + + for _, andExpr := range typeAndConstraints { + op, args := andExpr.Expr() + + switch op { + case cue.CallOp: + switch fmt.Sprint(args[0]) { + case "strings.MinRunes": + scalar, err := cueConcreteToScalar(args[1]) + if err != nil { + return nil, err + } + + constraints = append(constraints, ast.TypeConstraint{ + Op: "minLength", + Args: []any{scalar}, + }) + + case "strings.MaxRunes": + scalar, err := cueConcreteToScalar(args[1]) + if err != nil { + return nil, err + } + + constraints = append(constraints, ast.TypeConstraint{ + Op: "maxLength", + Args: []any{scalar}, + }) + // TODO: support more OPs? + } + } + } + + return constraints, nil +} + +func (g *newGenerator) declareNumber(v cue.Value) (ast.ScalarType, error) { + numberTypeWithConstraintsAsString, err := format.Node(v.Syntax()) + if err != nil { + return ast.ScalarType{}, err + } + parts := strings.Split(string(numberTypeWithConstraintsAsString), " ") + if len(parts) == 0 { + return ast.ScalarType{}, errorWithCueRef(v, "something went very wrong while formatting a number expression into a string") + } + + // dirty way of preserving the actual type from cue + // FIXME: fails if the type has a custom bound that further restricts the values + // IE: uint8 & < 12 will be printed as "uint & < 12 + var numberType ast.Kind + switch ast.Kind(parts[0]) { + case ast.KindFloat32, ast.KindFloat64: + numberType = ast.Kind(parts[0]) + case ast.KindUint8, ast.KindUint16, ast.KindUint32, ast.KindUint64: + numberType = ast.Kind(parts[0]) + case ast.KindInt8, ast.KindInt16, ast.KindInt32, ast.KindInt64: + numberType = ast.Kind(parts[0]) + case "uint": + numberType = ast.KindUint64 + case "int": + numberType = ast.KindInt64 + case "number": + numberType = ast.KindFloat64 + default: + return ast.ScalarType{}, errorWithCueRef(v, "unknown number type '%s'", parts[0]) + } + + typeDef := ast.ScalarType{ + ScalarKind: numberType, + } + + // v.IsConcrete() being true means we're looking at a constant/known value + if v.IsConcrete() { + val, err := cueConcreteToScalar(v) + if err != nil { + return typeDef, err + } + + typeDef.Value = val + } + + // If the default (all lists have a default, usually self, ugh) differs from the + // input list, peel it off. Otherwise our AnyIndex lookup may end up getting + // sent on the wrong path. + defv, _ := v.Default() + if !defv.Equals(v) { + _, dvals := v.Expr() + v = dvals[0] + } + + // extract constraints + constraints, err := g.declareNumberConstraints(v) + if err != nil { + return ast.ScalarType{}, err + } + + typeDef.Constraints = constraints + + return typeDef, nil +} + +func (g *newGenerator) declareNumberConstraints(v cue.Value) ([]ast.TypeConstraint, error) { + // typeAndConstraints can contain the following cue expressions: + // - number + // - int|float, number, upper bound, lower bound + typeAndConstraints := appendSplit(nil, cue.AndOp, v) + + // nothing to do + if len(typeAndConstraints) == 1 { + return nil, nil + } + + constraints := make([]ast.TypeConstraint, 0, len(typeAndConstraints)) + + constraintsStartIndex := 1 + + // don't include type-related constraints + if len(typeAndConstraints) > 1 && typeAndConstraints[0].IncompleteKind() != cue.NumberKind { + constraintsStartIndex = 3 + } + + for _, s := range typeAndConstraints[constraintsStartIndex:] { + constraint, err := g.extractConstraint(s) + if err != nil { + return nil, err + } + + constraints = append(constraints, constraint) + } + + return constraints, nil +} + +func (g *newGenerator) extractConstraint(v cue.Value) (ast.TypeConstraint, error) { + toConstraint := func(operator string, arg cue.Value) (ast.TypeConstraint, error) { + scalar, err := cueConcreteToScalar(arg) + if err != nil { + return ast.TypeConstraint{}, err + } + + return ast.TypeConstraint{ + Op: operator, + Args: []any{scalar}, + }, nil + } + + switch op, a := v.Expr(); op { + case cue.LessThanOp: + return toConstraint("<", a[0]) + case cue.LessThanEqualOp: + return toConstraint("<=", a[0]) + case cue.GreaterThanOp: + return toConstraint(">", a[0]) + case cue.GreaterThanEqualOp: + return toConstraint(">=", a[0]) + case cue.NotEqualOp: + return toConstraint("!=", a[0]) + default: + return ast.TypeConstraint{}, errorWithCueRef(v, "unsupported op for number %v", op) + } +} + +func (g *newGenerator) declareList(v cue.Value) (ast.Type, error) { + i, err := v.List() + if err != nil { + return nil, err + } + + typeDef := ast.ArrayType{ + // FIXME: we set a default type because our logic is broken + ValueType: ast.ScalarType{ + ScalarKind: ast.KindAny, + }, + } + + // works only for a closed/concrete list + if v.IsConcrete() { + // fixme: this is wrong + for i.Next() { + node, err := g.declareNode(i.Value()) + if err != nil { + return nil, err + } + + typeDef.ValueType = node + } + + return typeDef, nil + } + + // open list + + // If the default (all lists have a default, usually self, ugh) differs from the + // input list, peel it off. Otherwise our AnyIndex lookup may end up getting + // sent on the wrong path. + defv, _ := v.Default() + if !defv.Equals(v) { + _, dvals := v.Expr() + v = dvals[0] + } + + e := v.LookupPath(cue.MakePath(cue.AnyIndex)) + if !e.Exists() { + // unreachable? + return nil, errorWithCueRef(v, "open list must have a type") + } + + expr, err := g.declareNode(e) + if err != nil { + return nil, err + } + + typeDef.ValueType = expr + + return typeDef, nil +} diff --git a/internal/simplecue/utils.go b/internal/simplecue/utils.go new file mode 100644 index 000000000..8a48c6716 --- /dev/null +++ b/internal/simplecue/utils.go @@ -0,0 +1,173 @@ +package simplecue + +import ( + "fmt" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/format" +) + +func mustDumpsyn(v cue.Value) string { + dump, err := dumpsyn(v) + if err != nil { + panic(err) + } + + return dump +} + +func dumpsyn(v cue.Value) (string, error) { + syn := v.Syntax( + cue.Concrete(false), // allow incomplete values + cue.Definitions(false), + cue.Optional(true), + cue.Attributes(true), + cue.Docs(true), + ) + + byt, err := format.Node(syn, format.TabIndent(true)) + return string(byt), err +} + +func errorWithCueRef(v cue.Value, format string, args ...interface{}) error { + return fmt.Errorf(v.Pos().String() + ": " + fmt.Sprintf(format, args...)) +} + +func selectorLabel(sel cue.Selector) string { + if sel.Type().ConstraintType() == cue.PatternConstraint { + return "*" + } + switch sel.LabelType() { + case cue.StringLabel: + return sel.Unquoted() + case cue.DefinitionLabel: + return sel.String()[1:] + } + // We shouldn't get anything other than non-hidden + // fields and definitions because we've not asked the + // Fields iterator for those or created them explicitly. + panic(fmt.Sprintf("unreachable %v", sel.Type())) +} + +// from https://github.com/cue-lang/cue/blob/99e8578ac45e5e7e6ebf25794303bc916744c0d3/encoding/openapi/build.go#L490 +func appendSplit(a []cue.Value, splitBy cue.Op, v cue.Value) []cue.Value { + op, args := v.Expr() + // dedup elements. + k := 1 +outer: + for i := 1; i < len(args); i++ { + for j := 0; j < k; j++ { + if args[i].Subsume(args[j], cue.Raw()) == nil && + args[j].Subsume(args[i], cue.Raw()) == nil { + continue outer + } + } + args[k] = args[i] + k++ + } + args = args[:k] + + if op == cue.NoOp && len(args) == 1 { + // TODO: this is to deal with default value removal. This may change + // when we completely separate default values from values. + a = append(a, args...) + } else if op != splitBy { + a = append(a, v) + } else { + for _, v := range args { + a = appendSplit(a, splitBy, v) + } + } + return a +} + +func getTypeHint(v cue.Value) (string, error) { + // Direct lookup of attributes with Attribute() seems broken-ish, so do our + // own search as best we can, allowing ValueAttrs, which include both field + // and decl attributes. + var found bool + var attr cue.Attribute + for _, a := range v.Attributes(cue.ValueAttr) { + if a.Name() == annotationName { + found = true + attr = a + } + } + + if !found { + return "", nil + } + + tt, found, err := attr.Lookup(0, annotationKindFieldName) + if err != nil { + return "", err + } + + if !found { + return "", errorWithCueRef(v, "no value for the %q key in @%s attribute", annotationKindFieldName, annotationName) + } + return tt, nil +} + +// ONLY call this function if it has been established that the provided Value is +// Concrete. +func cueConcreteToScalar(v cue.Value) (interface{}, error) { + switch v.Kind() { + case cue.NullKind: + return nil, nil + case cue.StringKind: + return v.String() + case cue.NumberKind, cue.FloatKind: + return v.Float64() + case cue.IntKind: + return v.Int64() + case cue.BoolKind: + return v.Bool() + case cue.ListKind: + var values []any + it, err := v.List() + if err != nil { + return nil, errorWithCueRef(v, "can create list iterator: %s", v.Kind()) + } + + for it.Next() { + current := it.Value() + + val, err := cueConcreteToScalar(current) + if err != nil { + return nil, err + } + + values = append(values, val) + } + + if len(values) == 0 { + return nil, nil + } + + return values, nil + default: + return nil, errorWithCueRef(v, "can not convert kind to scalar: %s", v.Kind()) + } +} + +func commentsFromCueValue(v cue.Value) []string { + docs := v.Doc() + if s, ok := v.Source().(*ast.Field); ok { + for _, c := range s.Comments() { + if !c.Doc && c.Line { + docs = append(docs, c) + } + } + } + + ret := make([]string, 0, len(docs)) + for _, cg := range docs { + for _, line := range strings.Split(strings.Trim(cg.Text(), "\n "), "\n") { + ret = append(ret, line) + } + } + return ret +} diff --git a/internal/tools/arrays.go b/internal/tools/arrays.go new file mode 100644 index 000000000..9276b9573 --- /dev/null +++ b/internal/tools/arrays.go @@ -0,0 +1,11 @@ +package tools + +func ItemInList[T comparable](needle T, haystack []T) bool { + for _, item := range haystack { + if item == needle { + return true + } + } + + return false +} diff --git a/internal/tools/strings.go b/internal/tools/strings.go new file mode 100644 index 000000000..f484c99dc --- /dev/null +++ b/internal/tools/strings.go @@ -0,0 +1,38 @@ +package tools + +import ( + "strings" + + "golang.org/x/text/cases" + "golang.org/x/text/language" +) + +func UpperCamelCase(s string) string { + s = LowerCamelCase(s) + + // Uppercase the first letter + if len(s) > 0 { + s = strings.ToUpper(s[:1]) + s[1:] + } + + return s +} + +func LowerCamelCase(s string) string { + // Replace all underscores/dashes with spaces + s = strings.ReplaceAll(s, "_", " ") + s = strings.ReplaceAll(s, "-", " ") + + // Title case s + s = cases.Title(language.AmericanEnglish, cases.NoLower).String(s) + + // Remove all spaces + s = strings.ReplaceAll(s, " ", "") + + // Lowercase the first letter + if len(s) > 0 { + s = strings.ToLower(s[:1]) + s[1:] + } + + return s +} diff --git a/internal/veneers/builder/actions.go b/internal/veneers/builder/actions.go new file mode 100644 index 000000000..d91275885 --- /dev/null +++ b/internal/veneers/builder/actions.go @@ -0,0 +1,52 @@ +package builder + +import ( + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +type RewriteAction func(builders ast.Builders, builder ast.Builder) ast.Builder + +func OmitAction() RewriteAction { + return func(builders ast.Builders, _ ast.Builder) ast.Builder { + return ast.Builder{} + } +} + +func MergeIntoAction(sourceBuilderName string, underPath string, excludeOptions []string) RewriteAction { + return func(builders ast.Builders, destinationBuilder ast.Builder) ast.Builder { + // we're implicitly saying that this action only works on builders originating from the same package. + // that's probably not good enough. + + sourceBuilder, found := builders.LocateByObject(destinationBuilder.Package, sourceBuilderName) + if !found { + return destinationBuilder + } + + newBuilder := destinationBuilder + + // TODO: initializations + + for _, opt := range sourceBuilder.Options { + if tools.ItemInList(opt.Name, excludeOptions) { + continue + } + + // TODO: assignment paths + newOpt := opt + newOpt.Assignments = nil + + for _, assignment := range opt.Assignments { + newAssignment := assignment + // @FIXME: this only works if no part of the `underPath` path can be nil + newAssignment.Path = underPath + "." + assignment.Path + + newOpt.Assignments = append(newOpt.Assignments, newAssignment) + } + + newBuilder.Options = append(newBuilder.Options, newOpt) + } + + return newBuilder + } +} diff --git a/internal/veneers/builder/rules.go b/internal/veneers/builder/rules.go new file mode 100644 index 000000000..82747607b --- /dev/null +++ b/internal/veneers/builder/rules.go @@ -0,0 +1,20 @@ +package builder + +type RewriteRule struct { + Selector Selector + Action RewriteAction +} + +func Omit(selector Selector) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: OmitAction(), + } +} + +func MergeInto(selector Selector, sourceBuilderName string, underPath string, excludeOptions []string) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: MergeIntoAction(sourceBuilderName, underPath, excludeOptions), + } +} diff --git a/internal/veneers/builder/selectors.go b/internal/veneers/builder/selectors.go new file mode 100644 index 000000000..771e4a589 --- /dev/null +++ b/internal/veneers/builder/selectors.go @@ -0,0 +1,19 @@ +package builder + +import ( + "github.com/grafana/cog/internal/ast" +) + +type Selector func(builder ast.Builder) bool + +func ByName(objectName string) Selector { + return func(builder ast.Builder) bool { + return builder.For.Name == objectName + } +} + +func EveryBuilder() Selector { + return func(builder ast.Builder) bool { + return true + } +} diff --git a/internal/veneers/option/actions.go b/internal/veneers/option/actions.go new file mode 100644 index 000000000..1b232974c --- /dev/null +++ b/internal/veneers/option/actions.go @@ -0,0 +1,146 @@ +package option + +import ( + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/tools" +) + +type RewriteAction func(option ast.Option) []ast.Option + +func RenameAction(newName string) RewriteAction { + return func(option ast.Option) []ast.Option { + newOption := option + newOption.Name = newName + + return []ast.Option{newOption} + } +} + +// FIXME: looks at the first arg only, no way to configure that right now +func ArrayToAppendAction() RewriteAction { + return func(option ast.Option) []ast.Option { + if len(option.Args) < 1 || option.Args[0].Type.Kind() != ast.KindArray { + return []ast.Option{option} + } + + oldArgs := option.Args + + newFirstArg := option.Args[0] + newFirstArg.Type = option.Args[0].Type.(ast.ArrayType).ValueType + + newOpt := option + newOpt.Args = []ast.Argument{newFirstArg} + + if len(oldArgs) > 1 { + newOpt.Args = append(newOpt.Args, oldArgs[1:]...) + } + + return []ast.Option{newOpt} + } +} + +func OmitAction() RewriteAction { + return func(_ ast.Option) []ast.Option { + return nil + } +} + +func PromoteToConstructorAction() RewriteAction { + return func(option ast.Option) []ast.Option { + newOpt := option + newOpt.IsConstructorArg = true + + return []ast.Option{newOpt} + } +} + +// FIXME: looks at the first arg only, no way to configure that right now +func StructFieldsAsArgumentsAction(explicitFields ...string) RewriteAction { + return func(option ast.Option) []ast.Option { + // TODO: handle the case where option.Args[0].Type is a KindRef. Follow the ref and keep working. + if len(option.Args) < 1 || option.Args[0].Type.Kind() != ast.KindStruct { + return []ast.Option{option} + } + + oldArgs := option.Args + oldAssignments := option.Assignments + assignmentPathPrefix := oldAssignments[0].Path + structType := option.Args[0].Type.(ast.StructType) + + newOpt := option + newOpt.Args = nil + newOpt.Assignments = nil + + for _, field := range structType.Fields { + if explicitFields != nil && !tools.ItemInList(field.Name, explicitFields) { + continue + } + + var constraints []ast.TypeConstraint + if scalarType, ok := field.Type.(ast.ScalarType); ok { + constraints = scalarType.Constraints + } + + newOpt.Args = append(newOpt.Args, ast.Argument{ + Name: field.Name, + Type: field.Type, + }) + + newOpt.Assignments = append(newOpt.Assignments, ast.Assignment{ + Path: assignmentPathPrefix + "." + field.Name, + ArgumentName: field.Name, + ValueType: field.Type, + Constraints: constraints, + IntoOptionalField: !field.Required, + }) + } + + if len(oldArgs) > 1 { + newOpt.Args = append(newOpt.Args, oldArgs[1:]...) + newOpt.Assignments = append(newOpt.Assignments, oldAssignments[1:]...) + } + + return []ast.Option{newOpt} + } +} + +type BooleanUnfold struct { + OptionTrue string + OptionFalse string +} + +func UnfoldBooleanAction(unfoldOpts BooleanUnfold) RewriteAction { + return func(option ast.Option) []ast.Option { + return []ast.Option{ + { + Name: unfoldOpts.OptionTrue, + Comments: option.Comments, + Args: nil, + Assignments: []ast.Assignment{ + { + Path: option.Assignments[0].Path, + ValueType: option.Assignments[0].ValueType, + IntoOptionalField: option.Assignments[0].IntoOptionalField, + Value: true, + }, + }, + // TODO: default + }, + + { + Name: unfoldOpts.OptionFalse, + Comments: option.Comments, + Args: nil, + Assignments: []ast.Assignment{ + { + Path: option.Assignments[0].Path, + ValueType: option.Assignments[0].ValueType, + IntoOptionalField: option.Assignments[0].IntoOptionalField, + Value: false, + }, + }, + // TODO: default + }, + } + } +} diff --git a/internal/veneers/option/rules.go b/internal/veneers/option/rules.go new file mode 100644 index 000000000..0e4e7c8ff --- /dev/null +++ b/internal/veneers/option/rules.go @@ -0,0 +1,48 @@ +package option + +type RewriteRule struct { + Selector Selector + Action RewriteAction +} + +func Rename(selector Selector, newName string) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: RenameAction(newName), + } +} + +func ArrayToAppend(selector Selector) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: ArrayToAppendAction(), + } +} + +func Omit(selector Selector) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: OmitAction(), + } +} + +func UnfoldBoolean(selector Selector, unfoldOpts BooleanUnfold) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: UnfoldBooleanAction(unfoldOpts), + } +} + +func PromoteToConstructor(selector Selector) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: PromoteToConstructorAction(), + } +} + +func StructFieldsAsArguments(selector Selector, explicitFields ...string) RewriteRule { + return RewriteRule{ + Selector: selector, + Action: StructFieldsAsArgumentsAction(explicitFields...), + } +} diff --git a/internal/veneers/option/selectors.go b/internal/veneers/option/selectors.go new file mode 100644 index 000000000..4a91bd1bf --- /dev/null +++ b/internal/veneers/option/selectors.go @@ -0,0 +1,19 @@ +package option + +import ( + "github.com/grafana/cog/internal/ast" +) + +type Selector func(builder ast.Builder, option ast.Option) bool + +func ByName(objectName string, optionName string) Selector { + return func(builder ast.Builder, option ast.Option) bool { + return builder.For.Name == objectName && option.Name == optionName + } +} + +func EveryOption() Selector { + return func(builder ast.Builder, option ast.Option) bool { + return true + } +} diff --git a/internal/veneers/rewrite.go b/internal/veneers/rewrite.go new file mode 100644 index 000000000..b1b2a3323 --- /dev/null +++ b/internal/veneers/rewrite.go @@ -0,0 +1,78 @@ +package veneers + +import ( + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/veneers/builder" + "github.com/grafana/cog/internal/veneers/option" +) + +type Rewriter struct { + builderRules []builder.RewriteRule + optionRules []option.RewriteRule +} + +func NewRewrite(builderRules []builder.RewriteRule, optionRules []option.RewriteRule) *Rewriter { + return &Rewriter{ + builderRules: builderRules, + optionRules: optionRules, + } +} + +func (engine *Rewriter) ApplyTo(builders []ast.Builder) []ast.Builder { + newBuilders := make([]ast.Builder, 0, len(builders)) + + for _, b := range builders { + processed := engine.processBuilder(builders, b) + // the builder was dismissed + if len(processed.Options) == 0 { + continue + } + + newBuilders = append(newBuilders, processed) + } + + return newBuilders +} + +func (engine *Rewriter) processBuilder(builders ast.Builders, builder ast.Builder) ast.Builder { + processedBuilder := builder + + for _, rule := range engine.builderRules { + if rule.Selector(processedBuilder) { + // FIXME: passing `builders` here means that rules only get access to a "pre modification" + // set of builders. We should probably pass the most up-to-date list of builders + processedBuilder = rule.Action(builders, processedBuilder) + } + + // this builder is dismissed, let's return early + if len(processedBuilder.Options) == 0 { + return processedBuilder + } + } + + processedOptions := make([]ast.Option, 0, len(processedBuilder.Options)) + for _, opt := range processedBuilder.Options { + processedOptions = append(processedOptions, engine.processOption(processedBuilder, opt)...) + } + + processedBuilder.Options = processedOptions + + return processedBuilder +} + +func (engine *Rewriter) processOption(parentBuilder ast.Builder, opt ast.Option) []ast.Option { + toProcess := []ast.Option{opt} + for _, rule := range engine.optionRules { + if !rule.Selector(parentBuilder, opt) { + continue + } + + var wip []ast.Option + for _, modifiedField := range toProcess { + wip = append(wip, rule.Action(modifiedField)...) + } + toProcess = wip + } + + return toProcess +} diff --git a/internal/veneers/veneers.go b/internal/veneers/veneers.go new file mode 100644 index 000000000..b55a37842 --- /dev/null +++ b/internal/veneers/veneers.go @@ -0,0 +1,107 @@ +package veneers + +import ( + "github.com/grafana/cog/internal/veneers/builder" + "github.com/grafana/cog/internal/veneers/option" +) + +func Engine() *Rewriter { + return NewRewrite( + []builder.RewriteRule{ + // We don't want these builders at all + builder.Omit(builder.ByName("GridPos")), + builder.Omit(builder.ByName("DataSourceRef")), + builder.Omit(builder.ByName("LibraryPanelRef")), + builder.Omit(builder.ByName("StringOrBool")), + builder.Omit(builder.ByName("StringOrArray")), + + // rearrange things a bit + builder.MergeInto( + builder.ByName("Panel"), + "FieldConfig", + "fieldConfig.defaults", + + []string{ + // don't copy these over as they clash with a similarly named options from Panel + "description", "links", + + // TODO: check if these are actually relevant + "displayNameFromDS", "filterable", "path", "writeable", + }, + ), + + // remove builders that were previously merged into something else + builder.Omit(builder.ByName("FieldConfig")), + builder.Omit(builder.ByName("FieldConfigSource")), + }, + + []option.RewriteRule{ + /******************************************** + * Dashboards + ********************************************/ + + // Let's make the dashboard constructor more friendly + option.PromoteToConstructor( + option.ByName("Dashboard", "title"), + ), + + // `Tooltip` looks better than `GraphTooltip` + option.Rename( + option.ByName("Dashboard", "graphTooltip"), + "tooltip", + ), + + // `panels` refers to RowPanel only for now + option.Rename( + option.ByName("Dashboard", "panels"), + "rows", + ), + /* + option.ArrayToAppend( + // FIXME: quirk of the current veneer rewrite engine + option.ByName("Dashboard", "panels"), + ), + */ + + // Editable() + Readonly() instead of Editable(val bool) + option.UnfoldBoolean( + option.ByName("Dashboard", "editable"), + option.BooleanUnfold{OptionTrue: "editable", OptionFalse: "readonly"}, + ), + + // Refresh(string) instead of Refresh(struct StringOrBool) + // FIXME: doesn't work (yet) since the argument is a reference to a struct and not a struct + option.StructFieldsAsArguments( + option.ByName("Dashboard", "refresh"), + "ValString", + ), + + // Time(from, to) instead of time(struct {From string `json:"from"`, To string `json:"to"`}{From: "lala", To: "lala}) + option.StructFieldsAsArguments( + option.ByName("Dashboard", "time"), + ), + + // We don't want these options at all + option.Omit(option.ByName("Dashboard", "schemaVersion")), + + /******************************************** + * Panels + ********************************************/ + + option.Omit(option.ByName("Panel", "id")), // generated by the backend + option.Omit(option.ByName("Panel", "fieldConfig")), // merged with another builder + option.Omit(option.ByName("Panel", "options")), // comes from a panel plugin + option.Omit(option.ByName("Panel", "custom")), // comes from a panel plugin + option.Omit(option.ByName("Panel", "pluginVersion")), // TODO: check if it's relevant or not + + /******************************************** + * Rows + ********************************************/ + + // Let's make the row constructor more friendly + option.PromoteToConstructor( + option.ByName("RowPanel", "title"), + ), + }, + ) +} diff --git a/sandbox/builder/main.go b/sandbox/builder/main.go new file mode 100644 index 000000000..474f55363 --- /dev/null +++ b/sandbox/builder/main.go @@ -0,0 +1,46 @@ +package main + +import ( + "fmt" + + "github.com/grafana/cog/generated/dashboard/dashboard" + "github.com/grafana/cog/generated/dashboard/timepicker" + types "github.com/grafana/cog/generated/types/dashboard" +) + +func main() { + refresh := "1m" + + builder, err := dashboard.New( + "Some title", + dashboard.Uid("test-dashboard-codegen"), + dashboard.Description("Some description"), + dashboard.Time("now-3h", "now"), + dashboard.Timepicker( + timepicker.RefreshIntervals([]string{"30s", "1m", "5m"}), + ), + dashboard.Style(types.StyleDark), + dashboard.Timezone("utc"), + dashboard.Tooltip(types.Crosshair), + dashboard.Tags([]string{"generated", "from", "cue"}), + dashboard.Links([]types.DashboardLink{ + { + Title: "Some link", + Url: "http://google.com", + AsDropdown: false, + TargetBlank: true, + }, + }), + + dashboard.Refresh(types.StringOrBool{ValString: &refresh}), + ) + if err != nil { + panic(err) + } + + dashboardJson, err := builder.Internal().MarshalIndentJSON() + if err != nil { + panic(err) + } + fmt.Println(string(dashboardJson)) +} diff --git a/sandbox/codegen-cue/main.go b/sandbox/codegen-cue/main.go new file mode 100644 index 000000000..121623d1d --- /dev/null +++ b/sandbox/codegen-cue/main.go @@ -0,0 +1,202 @@ +package main + +import ( + "context" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "testing/fstest" + + "cuelang.org/go/cue/cuecontext" + "cuelang.org/go/cue/load" + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/jennies" + "github.com/grafana/cog/internal/simplecue" + "github.com/yalue/merged_fs" +) + +func main() { + entrypoints := []string{ + "./schemas/cue/core/dashboard/", + //"./schemas/cue/core/playlist/", + + "./schemas/cue/composable/timeseries/", + + "github.com/grafana/grafana/packages/grafana-schema/src/common", + } + + cueFsOverlay, err := buildCueOverlay() + if err != nil { + panic(err) + } + + allSchemas := make([]*ast.File, 0, len(entrypoints)) + for _, entrypoint := range entrypoints { + pkg := filepath.Base(entrypoint) + + // Load Cue files into Cue build.Instances slice + // the second arg is a configuration object, we'll see this later + bis := load.Instances([]string{entrypoint}, &load.Config{ + Overlay: cueFsOverlay, + //Module: "github.com/grafana/cog", // TODO: is that needed? + ModuleRoot: "/", + }) + + values, err := cuecontext.New().BuildInstances(bis) + if err != nil { + panic(err) + } + + schemaAst, err := simplecue.GenerateAST(values[0], simplecue.Config{ + Package: pkg, // TODO: extract from input schema/? + }) + if err != nil { + panic(err) + } + + allSchemas = append(allSchemas, schemaAst) + } + + // Here begins the code generation setup + targetsByLanguage := jennies.All() + rootCodeJenFS := codejen.NewFS() + + for language, target := range targetsByLanguage { + fmt.Printf("Running '%s' jennies...\n", language) + + var err error + processedAsts := allSchemas + + for _, compilerPass := range target.CompilerPasses { + processedAsts, err = compilerPass.Process(processedAsts) + if err != nil { + panic(err) + } + } + + fs, err := target.Jennies.GenerateFS(processedAsts) + if err != nil { + panic(err) + } + + err = rootCodeJenFS.Merge(fs) + if err != nil { + panic(err) + } + } + + err = rootCodeJenFS.Write(context.Background(), "generated") + if err != nil { + panic(err) + } +} + +func buildCueOverlay() (map[string]load.Source, error) { + libFs, err := buildBaseFSWithLibraries() + if err != nil { + return nil, err + } + + overlay := make(map[string]load.Source) + if err := ToCueOverlay("/", libFs, overlay); err != nil { + return nil, err + } + + return overlay, nil +} + +func buildBaseFSWithLibraries() (fs.FS, error) { + // TODO: these should be received as inputs/arguments/parameters + importDefinitions := [][2]string{ + { + "github.com/grafana/grafana/packages/grafana-schema/src/common", + "../kind-registry/grafana/next/common", + }, + { + "github.com/grafana/cog", + ".", + }, + } + + var librariesFS []fs.FS + for _, importDefinition := range importDefinitions { + absPath, err := filepath.Abs(importDefinition[1]) + if err != nil { + return nil, err + } + + fmt.Printf("Loading '%s' module from '%s'\n", importDefinition[0], absPath) + + libraryFS, err := dirToPrefixedFS(absPath, "cue.mod/pkg/"+importDefinition[0]) + if err != nil { + return nil, err + } + + librariesFS = append(librariesFS, libraryFS) + } + + return merged_fs.MergeMultiple(librariesFS...), nil +} + +func dirToPrefixedFS(directory string, prefix string) (fs.FS, error) { + dirHandle, err := os.ReadDir(directory) + if err != nil { + return nil, err + } + + commonFS := fstest.MapFS{} + for _, file := range dirHandle { + if file.IsDir() { + continue + } + + content, err := os.ReadFile(filepath.Join(directory, file.Name())) + if err != nil { + return nil, err + } + + commonFS[filepath.Join(prefix, file.Name())] = &fstest.MapFile{Data: content} + } + + return commonFS, nil +} + +// ToOverlay converts an fs.FS into a CUE loader overlay. +func ToCueOverlay(prefix string, vfs fs.FS, overlay map[string]load.Source) error { + // TODO why not just stick the prefix on automatically...? + if !filepath.IsAbs(prefix) { + return fmt.Errorf("must provide absolute path prefix when generating cue overlay, got %q", prefix) + } + err := fs.WalkDir(vfs, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + if d.IsDir() { + return nil + } + + f, err := vfs.Open(path) + if err != nil { + return err + } + defer f.Close() // nolint: errcheck + + b, err := io.ReadAll(f) + if err != nil { + return err + } + + overlay[filepath.Join(prefix, path)] = load.FromBytes(b) + return nil + }) + + if err != nil { + return err + } + + return nil +} diff --git a/sandbox/codegen-jsonschema/main.go b/sandbox/codegen-jsonschema/main.go new file mode 100644 index 000000000..5684b75f4 --- /dev/null +++ b/sandbox/codegen-jsonschema/main.go @@ -0,0 +1,72 @@ +package main + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/jennies" + "github.com/grafana/cog/internal/jsonschema" +) + +func main() { + entrypoints := []string{ + "./schemas/jsonschema/core/playlist/playlist.json", + "./schemas/jsonschema/core/dockerd/dockerd.json", + } + + allSchemas := make([]*ast.File, 0, len(entrypoints)) + for _, entrypoint := range entrypoints { + pkg := filepath.Base(filepath.Dir(entrypoint)) + + reader, err := os.Open(entrypoint) + if err != nil { + panic(err) + } + + schemaAst, err := jsonschema.GenerateAST(reader, jsonschema.Config{ + Package: pkg, // TODO: extract from input schema/folder? + }) + if err != nil { + panic(err) + } + + allSchemas = append(allSchemas, schemaAst) + } + + // Here begins the code generation setup + targetsByLanguage := jennies.All() + rootCodeJenFS := codejen.NewFS() + + for language, target := range targetsByLanguage { + fmt.Printf("Running '%s' jennies...\n", language) + + var err error + processedAsts := allSchemas + + for _, compilerPass := range target.CompilerPasses { + processedAsts, err = compilerPass.Process(processedAsts) + if err != nil { + panic(err) + } + } + + fs, err := target.Jennies.GenerateFS(processedAsts) + if err != nil { + panic(err) + } + + err = rootCodeJenFS.Merge(fs) + if err != nil { + panic(err) + } + } + + err := rootCodeJenFS.Write(context.Background(), "generated") + if err != nil { + panic(err) + } +} diff --git a/sandbox/codegen-kindsys-custom/main.go b/sandbox/codegen-kindsys-custom/main.go new file mode 100644 index 000000000..ec2541201 --- /dev/null +++ b/sandbox/codegen-kindsys-custom/main.go @@ -0,0 +1,117 @@ +package main + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "testing/fstest" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/cuecontext" + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/jennies" + "github.com/grafana/cog/internal/simplecue" + "github.com/grafana/kindsys" + "github.com/grafana/thema" +) + +func main() { + themaRuntime := thema.NewRuntime(cuecontext.New()) + + entrypoints := []string{"./schemas/kindsys/custom/slo"} + pkg := "slo" + + overlayFS, err := dirToPrefixedFS(entrypoints[0], "") + if err != nil { + panic(err) + } + + cueInstance, err := kindsys.BuildInstance(themaRuntime.Context(), ".", pkg, overlayFS) + if err != nil { + panic(fmt.Errorf("could not load kindsys instance: %w", err)) + } + + props, err := kindsys.ToKindProps[kindsys.CustomProperties](cueInstance) + if err != nil { + panic(fmt.Errorf("could not convert cue value to kindsys props: %w", err)) + } + + kindDefinition := kindsys.Def[kindsys.CustomProperties]{ + V: cueInstance, + Properties: props, + } + + boundKind, err := kindsys.BindCustom(themaRuntime, kindDefinition) + if err != nil { + panic(fmt.Errorf("could not bind kind definition to kind: %w", err)) + } + + rawLatestSchemaAsCue := boundKind.Lineage().Latest().Underlying() + latestSchemaAsCue := rawLatestSchemaAsCue.LookupPath(cue.MakePath(cue.Hid("_#schema", "github.com/grafana/thema"))) + + schemaAst, err := simplecue.GenerateAST(latestSchemaAsCue, simplecue.Config{ + Package: pkg, // TODO: extract from input schema/folder? + }) + if err != nil { + panic(err) + } + + // Here begins the code generation setup + targetsByLanguage := jennies.All() + rootCodeJenFS := codejen.NewFS() + + for language, target := range targetsByLanguage { + fmt.Printf("Running '%s' jennies...\n", language) + + var err error + processedAst := []*ast.File{schemaAst} + + for _, compilerPass := range target.CompilerPasses { + processedAst, err = compilerPass.Process(processedAst) + if err != nil { + panic(err) + } + } + + targetFs, err := target.Jennies.GenerateFS(processedAst) + if err != nil { + panic(err) + } + + err = rootCodeJenFS.Merge(targetFs) + if err != nil { + panic(err) + } + } + + err = rootCodeJenFS.Write(context.Background(), "generated") + if err != nil { + panic(err) + } +} + +func dirToPrefixedFS(directory string, prefix string) (fs.FS, error) { + dirHandle, err := os.ReadDir(directory) + if err != nil { + return nil, err + } + + commonFS := fstest.MapFS{} + for _, file := range dirHandle { + if file.IsDir() { + continue + } + + content, err := os.ReadFile(filepath.Join(directory, file.Name())) + if err != nil { + return nil, err + } + + commonFS[filepath.Join(prefix, file.Name())] = &fstest.MapFile{Data: content} + } + + return commonFS, nil +} diff --git a/sandbox/codegen-kindsys/main.go b/sandbox/codegen-kindsys/main.go new file mode 100644 index 000000000..b1e45d5fc --- /dev/null +++ b/sandbox/codegen-kindsys/main.go @@ -0,0 +1,117 @@ +package main + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "testing/fstest" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/cuecontext" + "github.com/grafana/codejen" + "github.com/grafana/cog/internal/ast" + "github.com/grafana/cog/internal/jennies" + "github.com/grafana/cog/internal/simplecue" + "github.com/grafana/kindsys" + "github.com/grafana/thema" +) + +func main() { + themaRuntime := thema.NewRuntime(cuecontext.New()) + + entrypoints := []string{"./schemas/kindsys/core/dashboard"} + pkg := "dashboard" + + overlayFS, err := dirToPrefixedFS(entrypoints[0], "") + if err != nil { + panic(err) + } + + cueInstance, err := kindsys.BuildInstance(themaRuntime.Context(), ".", "kind", overlayFS) + if err != nil { + panic(fmt.Errorf("could not load kindsys instance: %w", err)) + } + + props, err := kindsys.ToKindProps[kindsys.CoreProperties](cueInstance) + if err != nil { + panic(fmt.Errorf("could not convert cue value to kindsys props: %w", err)) + } + + kindDefinition := kindsys.Def[kindsys.CoreProperties]{ + V: cueInstance, + Properties: props, + } + + boundKind, err := kindsys.BindCore(themaRuntime, kindDefinition) + if err != nil { + panic(fmt.Errorf("could not bind kind definition to kind: %w", err)) + } + + rawLatestSchemaAsCue := boundKind.Lineage().Latest().Underlying() + latestSchemaAsCue := rawLatestSchemaAsCue.LookupPath(cue.MakePath(cue.Hid("_#schema", "github.com/grafana/thema"))) + + schemaAst, err := simplecue.GenerateAST(latestSchemaAsCue, simplecue.Config{ + Package: pkg, // TODO: extract from input schema/folder? + }) + if err != nil { + panic(err) + } + + // Here begins the code generation setup + targetsByLanguage := jennies.All() + rootCodeJenFS := codejen.NewFS() + + for language, target := range targetsByLanguage { + fmt.Printf("Running '%s' jennies...\n", language) + + var err error + processedAst := []*ast.File{schemaAst} + + for _, compilerPass := range target.CompilerPasses { + processedAst, err = compilerPass.Process(processedAst) + if err != nil { + panic(err) + } + } + + targetFs, err := target.Jennies.GenerateFS(processedAst) + if err != nil { + panic(err) + } + + err = rootCodeJenFS.Merge(targetFs) + if err != nil { + panic(err) + } + } + + err = rootCodeJenFS.Write(context.Background(), "generated") + if err != nil { + panic(err) + } +} + +func dirToPrefixedFS(directory string, prefix string) (fs.FS, error) { + dirHandle, err := os.ReadDir(directory) + if err != nil { + return nil, err + } + + commonFS := fstest.MapFS{} + for _, file := range dirHandle { + if file.IsDir() { + continue + } + + content, err := os.ReadFile(filepath.Join(directory, file.Name())) + if err != nil { + return nil, err + } + + commonFS[filepath.Join(prefix, file.Name())] = &fstest.MapFile{Data: content} + } + + return commonFS, nil +} diff --git a/schemas/cue/composable/timeseries/timeseries.cue b/schemas/cue/composable/timeseries/timeseries.cue new file mode 100644 index 000000000..29f9194c3 --- /dev/null +++ b/schemas/cue/composable/timeseries/timeseries.cue @@ -0,0 +1,12 @@ +package timeseries + +import ( + "github.com/grafana/grafana/packages/grafana-schema/src/common" +) + +Options: common.OptionsWithTimezones & { + legend: common.VizLegendOptions + tooltip: common.VizTooltipOptions +} + +FieldConfig: common.GraphFieldConfig diff --git a/schemas/cue/core/dashboard/dashboard.cue b/schemas/cue/core/dashboard/dashboard.cue new file mode 100644 index 000000000..25481ada6 --- /dev/null +++ b/schemas/cue/core/dashboard/dashboard.cue @@ -0,0 +1,703 @@ +package dashboard + +import ( + "strings" +) + +// This is a dashboard. +Dashboard: { + // Unique numeric identifier for the dashboard. + // `id` is internal to a specific Grafana instance. `uid` should be used to identify a dashboard across Grafana instances. + id?: int64 | null + + // Unique dashboard identifier that can be generated by anyone. string (8-40) + uid?: string + + // Title of dashboard. + title?: string + + // Description of dashboard. + description?: string + + // This property should only be used in dashboards defined by plugins. It is a quick check + // to see if the version has changed since the last time. + revision?: int64 + + // ID of a dashboard imported from the https://grafana.com/grafana/dashboards/ portal + gnetId?: string + + // Tags associated with dashboard. + tags?: [...string] + + // Theme of dashboard. + style: "light" | *"dark" + + // Timezone of dashboard. Accepted values are IANA TZDB zone ID or "browser" or "utc". + timezone?: string | *"browser" + + // Whether a dashboard is editable or not. + editable: bool | *true + + // Configuration of dashboard cursor sync behavior. + // Accepted values are 0 (sync turned off), 1 (shared crosshair), 2 (shared crosshair and tooltip). + graphTooltip: #DashboardCursorSync + + // Time range for dashboard. + // Accepted values are relative time strings like {from: 'now-6h', to: 'now'} or absolute time strings like {from: '2020-07-10T08:00:00.000Z', to: '2020-07-10T14:00:00.000Z'}. + time?: { + from: string | *"now-6h" + to: string | *"now" + } + + // Configuration of the time picker shown at the top of a dashboard. + timepicker?: #TimePicker + + // The month that the fiscal year starts on. 0 = January, 11 = December + fiscalYearStartMonth?: uint8 & <12 | *0 + + // When set to true, the dashboard will redraw panels at an interval matching the pixel width. + // This will keep data "moving left" regardless of the query refresh rate. This setting helps + // avoid dashboards presenting stale live data + liveNow?: bool + + // Day when the week starts. Expressed by the name of the day in lowercase, e.g. "monday". + weekStart?: string + + // Refresh rate of dashboard. Represented via interval string, e.g. "5s", "1m", "1h", "1d". + refresh?: string | false + + // Version of the JSON schema, incremented each time a Grafana update brings + // changes to said schema. + schemaVersion: uint16 | *36 + + // Version of the dashboard, incremented each time the dashboard is updated. + version?: uint32 + + // List of dashboard panels + panels?: [...#RowPanel] + + // Configured template variables + templating?: #DashboardTemplating + + // Contains the list of annotations that are associated with the dashboard. + // Annotations are used to overlay event markers and overlay event tags on graphs. + // Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. + // See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ + annotations?: #AnnotationContainer + + // Links with references to other dashboards or external websites. + links?: [...#DashboardLink] +} + +#DashboardStyle: "light" | "dark" @cog(kind="enum") + +#DashboardTemplating: { + // List of configured template variables with their saved values along with some other metadata + list?: [...#VariableModel] +} + +#TimePicker: { + // Whether timepicker is visible or not. + hidden: bool | *false + // Interval options available in the refresh picker dropdown. + refresh_intervals: [...string] | *["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"] + // Whether timepicker is collapsed or not. Has no effect on provisioned dashboard. + collapse: bool | *false + // Whether timepicker is enabled or not. Has no effect on provisioned dashboard. + enable: bool | *true + // Selectable options available in the time picker dropdown. Has no effect on provisioned dashboard. + time_options: [...string] | *["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] +} + +#TimeInterval: { + from: string | *"now-6h" + to: string | *"now" +} + +// TODO: this should be a regular DataQuery that depends on the selected dashboard +// these match the properties of the "grafana" datasource that is default in most dashboards +#AnnotationTarget: { + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + limit: int64 + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + matchAny: bool + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + tags: [...string] + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + type: string + ... +} + +#AnnotationPanelFilter: { + // Should the specified panels be included or excluded + exclude?: bool | *false + + // Panel IDs that should be included or excluded + ids: [...uint8] +} + +// Contains the list of annotations that are associated with the dashboard. +// Annotations are used to overlay event markers and overlay event tags on graphs. +// Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. +// See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ +#AnnotationContainer: { + // List of annotations + list?: [...#AnnotationQuery] +} + +// TODO docs +// FROM: AnnotationQuery in grafana-data/src/types/annotations.ts +#AnnotationQuery: { + // Name of annotation. + name: string + + // Datasource where the annotations data is + datasource: #DataSourceRef + + // When enabled the annotation query is issued with every dashboard refresh + enable: bool | *true + + // Annotation queries can be toggled on or off at the top of the dashboard. + // When hide is true, the toggle is not shown in the dashboard. + hide?: bool | *false + + // Color to use for the annotation event markers + iconColor: string + + // Filters to apply when fetching annotations + filter?: #AnnotationPanelFilter + + // TODO.. this should just be a normal query target + target?: #AnnotationTarget + + // TODO -- this should not exist here, it is based on the --grafana-- datasource + type?: string + ... +} + +// A variable is a placeholder for a value. You can use variables in metric queries and in panel titles. +#VariableModel: { + // Unique numeric identifier for the variable. + id: string | *"00000000-0000-0000-0000-000000000000" + // Type of variable + type: #VariableType + // Name of variable + name: string + // Optional display name + label?: string + // Visibility configuration for the variable + hide: #VariableHide + // Whether the variable value should be managed by URL query params or not + skipUrlSync: bool | *false + // Description of variable. It can be defined but `null`. + description?: string + // Query used to fetch values for a variable + query?: { ... } + // Data source used to fetch values for a variable. It can be defined but `null`. + datasource?: #DataSourceRef + // Format to use while fetching all values from data source, eg: wildcard, glob, regex, pipe, etc. + allFormat?: string + // Shows current selected variable text/value on the dashboard + current?: #VariableOption + // Whether multiple values can be selected or not from variable value list + multi?: bool | *false + // Options that can be selected for a variable. + options?: [...#VariableOption] + refresh?: #VariableRefresh +} + +// Option to be selected in a variable. +#VariableOption: { + // Whether the option is selected or not + selected?: bool + // Text to be displayed for the option + text: string | [...string] + // Value of the option + value: string | [...string] +} + +// Options to config when to refresh a variable +// `0`: Never refresh the variable +// `1`: Queries the data source every time the dashboard loads. +// `2`: Queries the data source when the dashboard time range changes. +#VariableRefresh: 0 | 1 | 2 @cog(kind="enum",memberNames="never|onDashboardLoad|onTimeRangeChanged") + +// Determine if the variable shows on dashboard +// Accepted values are 0 (show label and value), 1 (show value only), 2 (show nothing). +#VariableHide: 0 | 1 | 2 @cog(kind="enum",memberNames="dontHide|hideLabel|hideVariable") + +// Sort variable options +// Accepted values are: +// `0`: No sorting +// `1`: Alphabetical ASC +// `2`: Alphabetical DESC +// `3`: Numerical ASC +// `4`: Numerical DESC +// `5`: Alphabetical Case Insensitive ASC +// `6`: Alphabetical Case Insensitive DESC +#VariableSort: 0 | 1 | 2 | 3 | 4 | 5 | 6 @cog(kind="enum",memberNames="disabled|alphabeticalAsc|alphabeticalDesc|numericalAsc|numericalDesc|alphabeticalCaseInsensitiveAsc|alphabeticalCaseInsensitiveDesc") + +// Loading status +// Accepted values are `NotStarted` (the request is not started), `Loading` (waiting for response), `Streaming` (pulling continuous data), `Done` (response received successfully) or `Error` (failed request). +#LoadingState: "NotStarted" | "Loading" | "Streaming" | "Done" | "Error" @cog(kind="enum") + +// Ref to a DataSource instance +#DataSourceRef: { + // The plugin type-id + type?: string + + // Specific datasource instance + uid?: string +} + +// Links with references to other dashboards or external resources +#DashboardLink: { + // Title to display with the link + title: string + // Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) + type: #DashboardLinkType + // Icon name to be displayed with the link + icon: string + // Tooltip to display when the user hovers their mouse over it + tooltip: string + // Link URL. Only required/valid if the type is link + url: string + // List of tags to limit the linked dashboards. If empty, all dashboards will be displayed. Only valid if the type is dashboards + tags: [...string] + // If true, all dashboards links will be displayed in a dropdown. If false, all dashboards links will be displayed side by side. Only valid if the type is dashboards + asDropdown: bool | *false + // If true, the link will be opened in a new tab + targetBlank: bool | *false + // If true, includes current template variables values in the link as query params + includeVars: bool | *false + // If true, includes current time range in the link as query params + keepTime: bool | *false +} + +// Dashboard Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) +#DashboardLinkType: "link" | "dashboards" @cog(kind="enum") + +// Dashboard variable type +// `query`: Query-generated list of values such as metric names, server names, sensor IDs, data centers, and so on. +// `adhoc`: Key/value filters that are automatically added to all metric queries for a data source (Prometheus, Loki, InfluxDB, and Elasticsearch only). +// `constant`: Define a hidden constant. +// `datasource`: Quickly change the data source for an entire dashboard. +// `interval`: Interval variables represent time spans. +// `textbox`: Display a free text input field with an optional default value. +// `custom`: Define the variable options manually using a comma-separated list. +// `system`: Variables defined by Grafana. See: https://grafana.com/docs/grafana/latest/dashboards/variables/add-template-variables/#global-variables +#VariableType: "query" | "adhoc" | "constant" | "datasource" | "interval" | "textbox" | "custom" | "system" @cog(kind="enum") + +// Color mode for a field. You can specify a single color, or select a continuous (gradient) color schemes, based on a value. +// Continuous color interpolates a color using the percentage of a value relative to min and max. +// Accepted values are: +// `thresholds`: From thresholds. Informs Grafana to take the color from the matching threshold +// `palette-classic`: Classic palette. Grafana will assign color by looking up a color in a palette by series index. Useful for Graphs and pie charts and other categorical data visualizations +// `palette-classic-by-name`: Classic palette (by name). Grafana will assign color by looking up a color in a palette by series name. Useful for Graphs and pie charts and other categorical data visualizations +// `continuous-GrYlRd`: ontinuous Green-Yellow-Red palette mode +// `continuous-RdYlGr`: Continuous Red-Yellow-Green palette mode +// `continuous-BlYlRd`: Continuous Blue-Yellow-Red palette mode +// `continuous-YlRd`: Continuous Yellow-Red palette mode +// `continuous-BlPu`: Continuous Blue-Purple palette mode +// `continuous-YlBl`: Continuous Yellow-Blue palette mode +// `continuous-blues`: Continuous Blue palette mode +// `continuous-reds`: Continuous Red palette mode +// `continuous-greens`: Continuous Green palette mode +// `continuous-purples`: Continuous Purple palette mode +// `shades`: Shades of a single color. Specify a single color, useful in an override rule. +// `fixed`: Fixed color mode. Specify a single color, useful in an override rule. +#FieldColorModeId: "thresholds" | "palette-classic" | "palette-classic-by-name" | "continuous-GrYlRd" | "continuous-RdYlGr" | "continuous-BlYlRd" | "continuous-YlRd" | "continuous-BlPu" | "continuous-YlBl" | "continuous-blues" | "continuous-reds" | "continuous-greens" | "continuous-purples" | "fixed" | "shades" @cog(kind="enum",memberNames="Thresholds|PaletteClassic|PaletteClassicByName|ContinuousGrYlRd|ContinuousRdYlGr|ContinuousBlYlRd|ContinuousYlRd|ContinuousBlPu|ContinuousYlBl|ContinuousBlues|ContinuousReds|ContinuousGreens|ContinuousPurples|Fixed|Shades") + +// Defines how to assign a series color from "by value" color schemes. For example for an aggregated data points like a timeseries, the color can be assigned by the min, max or last value. +#FieldColorSeriesByMode: "min" | "max" | "last" @cog(kind="enum") + +// Map a field to a color. +#FieldColor: { + // The main color scheme mode. + mode: #FieldColorModeId + // The fixed color value for fixed or shades color modes. + fixedColor?: string + // Some visualizations need to know how to assign a series color from by value color schemes. + seriesBy?: #FieldColorSeriesByMode +} + +// Position and dimensions of a panel in the grid +#GridPos: { + // Panel height. The height is the number of rows from the top edge of the panel. + h: uint32 & >0 | *9 + // Panel width. The width is the number of columns from the left edge of the panel. + w: uint32 & >0 & <=24 | *12 + // Panel x. The x coordinate is the number of columns from the left edge of the grid + x: uint32 & >=0 & <24 | *0 + // Panel y. The y coordinate is the number of rows from the top edge of the grid + y: uint32 & >=0 | *0 + // Whether the panel is fixed within the grid. If true, the panel will not be affected by other panels' interactions + static?: bool +} + +// User-defined value for a metric that triggers visual changes in a panel when this value is met or exceeded +// They are used to conditionally style and color visualizations based on query results , and can be applied to most visualizations. +#Threshold: { + // Value represents a specified metric for the threshold, which triggers a visual change in the dashboard when this value is met or exceeded. + // Nulls currently appear here when serializing -Infinity to JSON. + value: number | null + // Color represents the color of the visual change that will occur in the dashboard when the threshold value is met or exceeded. + color: string +} + +// Thresholds can either be `absolute` (specific number) or `percentage` (relative to min or max, it will be values between 0 and 1). +#ThresholdsMode: "absolute" | "percentage" @cog(kind="enum",memberNames="Absolute|Percentage") + +// Thresholds configuration for the panel +#ThresholdsConfig: { + // Thresholds mode. + mode: #ThresholdsMode + + // Must be sorted by 'value', first value is always -Infinity + steps: [...#Threshold] +} + +// Supported value mapping types +// `value`: Maps text values to a color or different display text and color. For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. +// `range`: Maps numerical ranges to a display text and color. For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. +// `regex`: Maps regular expressions to replacement text and a color. For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. +// `special`: Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. See SpecialValueMatch to see the list of special values. For example, you can configure a special value mapping so that null values appear as N/A. +#MappingType: "value" | "range" | "regex" | "special" @cog(kind="enum",memberNames="ValueToText|RangeToText|RegexToText|SpecialValue") + +// Maps text values to a color or different display text and color. +// For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. +#ValueMap: { + type: #MappingType & { + "value" + } + // Map with : ValueMappingResult. For example: { "10": { text: "Perfection!", color: "green" } } + options: { + [string]: #ValueMappingResult + } +} + +// Maps numerical ranges to a display text and color. +// For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. +#RangeMap: { + type: #MappingType & { + "range" + } + // Range to match against and the result to apply when the value is within the range + options: { + // Min value of the range. It can be null which means -Infinity + from: float64 | null + // Max value of the range. It can be null which means +Infinity + to: float64 | null + // Config to apply when the value is within the range + result: #ValueMappingResult + } +} + +// Maps regular expressions to replacement text and a color. +// For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. +#RegexMap: { + type: #MappingType & { + "regex" + } + // Regular expression to match against and the result to apply when the value matches the regex + options: { + // Regular expression to match against + pattern: string + // Config to apply when the value matches the regex + result: #ValueMappingResult + } +} + +// Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. +// See SpecialValueMatch to see the list of special values. +// For example, you can configure a special value mapping so that null values appear as N/A. +#SpecialValueMap: { + type: #MappingType & { + "special" + } + options: { + // Special value to match against + match: #SpecialValueMatch + // Config to apply when the value matches the special value + result: #ValueMappingResult + } +} + +// Special value types supported by the `SpecialValueMap` +#SpecialValueMatch: "true" | "false" | "null" | "nan" | "null+nan" | "empty" @cog(kind="enum",memberNames="True|False|Null|NaN|NullAndNan|Empty") + +// Result used as replacement with text and color when the value matches +#ValueMappingResult: { + // Text to display when the value matches + text?: string + // Text to use when the value matches + color?: string + // Icon to display when the value matches. Only specific visualizations. + icon?: string + // Position in the mapping array. Only used internally. + index?: int32 +} + +// Transformations allow to manipulate data returned by a query before the system applies a visualization. +// Using transformations you can: rename fields, join time series data, perform mathematical operations across queries, +// use the output of one transformation as the input to another transformation, etc. +#DataTransformerConfig: { + // Unique identifier of transformer + id: string + // Disabled transformations are skipped + disabled?: bool + // Optional frame matcher. When missing it will be applied to all results + filter?: #MatcherConfig + // Options to be passed to the transformer + // Valid options depend on the transformer id + options: _ +} + +// 0 for no shared crosshair or tooltip (default). +// 1 for shared crosshair. +// 2 for shared crosshair AND shared tooltip. +#DashboardCursorSync: *0 | 1 | 2 @cog(kind="enum",memberNames="Off|Crosshair|Tooltip") + +// Schema for panel targets is specified by datasource +// plugins. We use a placeholder definition, which the Go +// schema loader either left open/as-is with the Base +// variant of the Dashboard and Panel families, or filled +// with types derived from plugins in the Instance variant. +// When working directly from CUE, importers can extend this +// type directly to achieve the same effect. +#Target: { + ... +} + +// Dashboard panels are the basic visualization building blocks. +#Panel: { + // The panel plugin type id. This is used to find the plugin to display the panel. + type: strings.MinRunes(1) & { + string + } + + // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. + id?: uint32 + + // The version of the plugin that is used for this panel. This is used to find the plugin to display the panel and to migrate old panel configs. + pluginVersion?: string + + // Tags for the panel. + tags?: [...string] + + // Depends on the panel plugin. See the plugin documentation for details. + targets?: [...#Target] + + // Panel title. + title?: string + + // Panel description. + description?: string + + // Whether to display the panel without a background. + transparent: bool | *false + + // The datasource used in all targets. + datasource?: #DataSourceRef + + // Grid position. + gridPos?: #GridPos + + // Panel links. + links?: [...#DashboardLink] + + // Name of template variable to repeat for. + repeat?: string + + // Direction to repeat in if 'repeat' is set. + // `h` for horizontal, `v` for vertical. + repeatDirection?: #PanelRepeatDirection + + // Id of the repeating panel. + repeatPanelId?: int64 + + // The maximum number of data points that the panel queries are retrieving. + maxDataPoints?: number + + // List of transformations that are applied to the panel data before rendering. + // When there are multiple transformations, Grafana applies them in the order they are listed. + // Each transformation creates a result set that then passes on to the next transformation in the processing pipeline. + transformations: [...#DataTransformerConfig] + + // The min time interval setting defines a lower limit for the $__interval and $__interval_ms variables. + // This value must be formatted as a number followed by a valid time + // identifier like: "40s", "3d", etc. + // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options + interval?: string + + // Overrides the relative time range for individual panels, + // which causes them to be different than what is selected in + // the dashboard time picker in the top-right corner of the dashboard. You can use this to show metrics from different + // time periods or days on the same dashboard. + // The value is formatted as time operation like: `now-5m` (Last 5 minutes), `now/d` (the day so far), + // `now-5d/d`(Last 5 days), `now/w` (This week so far), `now-2y/y` (Last 2 years). + // Note: Panel time overrides have no effect when the dashboard’s time range is absolute. + // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options + timeFrom?: string + + // Overrides the time range for individual panels by shifting its start and end relative to the time picker. + // For example, you can shift the time range for the panel to be two hours earlier than the dashboard time picker setting `2h`. + // Note: Panel time overrides have no effect when the dashboard’s time range is absolute. + // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options + timeShift?: string + + // Dynamically load the panel + libraryPanel?: #LibraryPanelRef + + // It depends on the panel plugin. They are specified by the Options field in panel plugin schemas. + options: { + ... + } + + // Field options allow you to change how the data is displayed in your visualizations. + fieldConfig: #FieldConfigSource +} + +#PanelRepeatDirection: "h" | "v" @cog(kind="enum",memberNames="horizontal|vertical") + +// The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. +// Each column within this structure is called a field. A field can represent a single time series or table column. +// Field options allow you to change how the data is displayed in your visualizations. +#FieldConfigSource: { + // Defaults are the options applied to all fields. + defaults: #FieldConfig + // Overrides are the options applied to specific fields overriding the defaults. + overrides: [...#FieldConfigSourceOverride] +} + +#FieldConfigSourceOverride: { + matcher: #MatcherConfig + properties: [...#DynamicConfigValue] +} + +// A library panel is a reusable panel that you can use in any dashboard. +// When you make a change to a library panel, that change propagates to all instances of where the panel is used. +// Library panels streamline reuse of panels across multiple dashboards. +#LibraryPanelRef: { + // Library panel name + name: string + // Library panel uid + uid: string +} + +// Matcher is a predicate configuration. Based on the config a set of field(s) or values is filtered in order to apply override / transformation. +// It comes with in id ( to resolve implementation from registry) and a configuration that’s specific to a particular matcher type. +#MatcherConfig: { + // The matcher id. This is used to find the matcher implementation from registry. + id: string | *"" + // The matcher options. This is specific to the matcher implementation. + options?: _ +} + +#DynamicConfigValue: { + id: string | *"" + value?: _ +} + +// The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. +// Each column within this structure is called a field. A field can represent a single time series or table column. +// Field options allow you to change how the data is displayed in your visualizations. +#FieldConfig: { + // The display value for this field. This supports template variables blank is auto + displayName?: string + + // This can be used by data sources that return and explicit naming structure for values and labels + // When this property is configured, this value is used rather than the default naming strategy. + displayNameFromDS?: string + + // Human readable field metadata + description?: string + + // An explicit path to the field in the datasource. When the frame meta includes a path, + // This will default to `${frame.meta.path}/${field.name} + // + // When defined, this value can be used as an identifier within the datasource scope, and + // may be used to update the results + path?: string + + // True if data source can write a value to the path. Auth/authz are supported separately + writeable?: bool + + // True if data source field supports ad-hoc filters + filterable?: bool + + // Unit a field should use. The unit you select is applied to all fields except time. + // You can use the units ID availables in Grafana or a custom unit. + // Available units in Grafana: https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts + // As custom unit, you can use the following formats: + // `suffix:` for custom unit that should go after value. + // `prefix:` for custom unit that should go before value. + // `time:` For custom date time formats type for example `time:YYYY-MM-DD`. + // `si:` for custom SI units. For example: `si: mF`. This one is a bit more advanced as you can specify both a unit and the source data scale. So if your source data is represented as milli (thousands of) something prefix the unit with that SI scale character. + // `count:` for a custom count unit. + // `currency:` for custom a currency unit. + unit?: string + + // Specify the number of decimals Grafana includes in the rendered value. + // If you leave this field blank, Grafana automatically truncates the number of decimals based on the value. + // For example 1.1234 will display as 1.12 and 100.456 will display as 100. + // To display all decimals, set the unit to `String`. + decimals?: number + + // The minimum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + min?: number + // The maximum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + max?: number + + // Convert input values into a display string + mappings?: [...(#ValueMap | #RangeMap | #RegexMap | #SpecialValueMap)] + + // Map numeric values to states + thresholds?: #ThresholdsConfig + + // Panel color configuration + color?: #FieldColor + + // The behavior when clicking on a result + links?: [...] + + // Alternative to empty string + noValue?: string + + // custom is specified by the FieldConfig field + // in panel plugin schemas. + custom?: { + ... + } +} + +// Row panel +#RowPanel: { + // The panel type + type: "row" + + // Whether this row should be collapsed or not. + collapsed: bool | *false + + // Row title + title?: string + + // Name of default datasource for the row + datasource?: #DataSourceRef + + // Row grid position + gridPos?: #GridPos + + // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. + id: uint32 + + // List of panels in the row + panels: [...#Panel] + + // Name of template variable to repeat for. + repeat?: string +} diff --git a/schemas/cue/core/playlist/playlist.cue b/schemas/cue/core/playlist/playlist.cue new file mode 100644 index 000000000..7ca8aaace --- /dev/null +++ b/schemas/cue/core/playlist/playlist.cue @@ -0,0 +1,40 @@ +package playlist + +Playlist: { + // Name of the playlist. + name: string + + // Interval sets the time between switching views in a playlist. + // FIXME: Is this based on a standardized format or what options are available? Can datemath be used? + interval: string | *"5m" + + // The ordered list of items that the playlist will iterate over. + // FIXME! This should not be optional, but changing it makes the godegen awkward + items?: [...#PlaylistItem] + + // Adding a required new field... + // This is only hear so that thema breaking change detection allows + // defining this as a new major version + xxx: string +} + +/////////////////////////////////////// +// Definitions (referenced above) are declared below + +#PlaylistItem: { + // Type of the item. + type: "dashboard_by_uid" | "dashboard_by_id" | "dashboard_by_tag" + + // Value depends on type and describes the playlist item. + // + // - dashboard_by_id: The value is an internal numerical identifier set by Grafana. This + // is not portable as the numerical identifier is non-deterministic between different instances. + // Will be replaced by dashboard_by_uid in the future. (deprecated) + // - dashboard_by_tag: The value is a tag which is set on any number of dashboards. All + // dashboards behind the tag will be added to the playlist. + // - dashboard_by_uid: The value is the dashboard UID + value: string + + // Title is an unused property -- it will be removed in the future + title?: string +} diff --git a/schemas/cue/core/sandbox/sandbox.cue b/schemas/cue/core/sandbox/sandbox.cue new file mode 100644 index 000000000..5af2d18ac --- /dev/null +++ b/schemas/cue/core/sandbox/sandbox.cue @@ -0,0 +1,34 @@ +package sandbox + +Sandbox: { + name: string + + nestedStruct: { + foo: string + } + + anythingPlz: { ... } + + someMap?: { + [string]: int32 + } + + #OperatorState: { + // lastEvaluation is the ResourceVersion last evaluated + lastEvaluation: string + // state describes the state of the lastEvaluation. + // It is limited to three possible states for machine evaluation. + state: "success" | "in_progress" | "failed" + // descriptiveState is an optional more descriptive state field which has no requirements on format + descriptiveState?: string + // details contains any extra information that is operator-specific + details?: { + [string]: _ + } + } + // operatorStates is a map of operator ID to operator state evaluations. + // Any operator which consumes this kind SHOULD add its state evaluation information to this field. + operatorStates?: { + [string]: #OperatorState + } +} diff --git a/schemas/jsonschema/core/dockerd/dockerd.json b/schemas/jsonschema/core/dockerd/dockerd.json new file mode 100644 index 000000000..22b80665d --- /dev/null +++ b/schemas/jsonschema/core/dockerd/dockerd.json @@ -0,0 +1,472 @@ +{ + "$id": "https://json.schemastore.org/dockerd.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "description": "https://docs.docker.com/engine/reference/commandline/dockerd/#daemon", + "properties": { + "allow-nondistributable-artifacts": { + "type": "array", + "items": {} + }, + "api-cors-header": { + "type": "string" + }, + "authorization-plugins": { + "type": "array", + "items": {} + }, + "bip": { + "type": "string" + }, + "bridge": { + "type": "string" + }, + "cgroup-parent": { + "type": "string" + }, + "cluster-advertise": { + "type": "string" + }, + "cluster-store": { + "type": "string" + }, + "cluster-store-opts": { + "type": "object" + }, + "containerd": { + "type": "string", + "examples": ["/run/containerd/containerd.sock"] + }, + "containerd-namespace": { + "type": "string", + "examples": ["docker"] + }, + "containerd-plugin-namespace": { + "type": "string", + "examples": ["docker-plugins"] + }, + "data-root": { + "type": "string" + }, + "debug": { + "type": "boolean", + "default": false, + "examples": [true] + }, + "default-address-pools": { + "type": "array", + "items": { + "type": "object", + "properties": { + "base": { + "type": "string", + "examples": ["172.30.0.0/16", "172.31.0.0/16"] + }, + "size": { + "type": "number", + "examples": [24] + } + }, + "examples": [ + { + "base": "172.30.0.0/16", + "size": 24 + }, + { + "base": "172.31.0.0/16", + "size": 24 + } + ] + }, + "examples": [ + [ + { + "base": "172.30.0.0/16", + "size": 24 + }, + { + "base": "172.31.0.0/16", + "size": 24 + } + ] + ] + }, + "default-cgroupns-mode": { + "type": "string", + "default": "host", + "examples": ["host", "private"], + "enum": ["private", "host"] + }, + "default-ipc-mode": { + "type": "string", + "default": "private", + "examples": ["shareable", "private"], + "enum": ["shareable", "private"] + }, + "default-gateway": { + "type": "string" + }, + "default-gateway-v6": { + "type": "string" + }, + "default-runtime": { + "type": "string", + "default": "containerd", + "examples": ["runc", "containerd"] + }, + "default-shm-size": { + "type": "string", + "examples": ["64M"] + }, + "default-ulimits": { + "type": "object", + "properties": { + "nofile": { + "type": "object", + "properties": { + "Hard": { + "type": "number", + "default": 0, + "examples": [64000] + }, + "Name": { + "type": "string", + "examples": ["nofile"] + }, + "Soft": { + "type": "number", + "default": 0, + "examples": [64000] + } + }, + "examples": [ + { + "Hard": 64000, + "Name": "nofile", + "Soft": 64000 + } + ] + } + }, + "examples": [ + { + "nofile": { + "Hard": 64000, + "Name": "nofile", + "Soft": 64000 + } + } + ] + }, + "dns": { + "type": "array", + "items": {} + }, + "dns-opts": { + "type": "array", + "items": { + "type": "string" + } + }, + "dns-search": { + "type": "array", + "items": { + "type": "string" + } + }, + "exec-opts": { + "type": "array", + "items": { + "type": "string" + } + }, + "exec-root": { + "type": "string" + }, + "experimental": { + "type": "boolean", + "default": false, + "examples": [true] + }, + "features": { + "type": "object" + }, + "fixed-cidr": { + "type": "string" + }, + "fixed-cidr-v6": { + "type": "string" + }, + "group": { + "type": "string" + }, + "hosts": { + "type": "array", + "items": { + "type": "string" + } + }, + "icc": { + "type": "boolean", + "default": false + }, + "init": { + "type": "boolean", + "default": false + }, + "init-path": { + "type": "string", + "examples": ["/usr/libexec/docker-init"] + }, + "insecure-registries": { + "type": "array", + "items": { + "type": "string" + } + }, + "ip": { + "type": "string", + "examples": ["0.0.0.0"] + }, + "ip-forward": { + "type": "boolean", + "default": false + }, + "ip-masq": { + "type": "boolean", + "default": false + }, + "iptables": { + "type": "boolean", + "default": false + }, + "ip6tables": { + "type": "boolean", + "default": false + }, + "ipv6": { + "type": "boolean", + "default": false + }, + "labels": { + "type": "array", + "items": {} + }, + "live-restore": { + "type": "boolean", + "default": false, + "examples": [true] + }, + "log-driver": { + "type": "string", + "examples": ["json-file"] + }, + "log-level": { + "type": "string" + }, + "log-opts": { + "type": "object", + "properties": { + "cache-disabled": { + "type": "string", + "examples": ["false"] + }, + "cache-max-file": { + "type": "string", + "examples": ["5"] + }, + "cache-max-size": { + "type": "string", + "examples": ["20m"] + }, + "cache-compress": { + "type": "string", + "examples": ["true"] + }, + "env": { + "type": "string", + "examples": ["os,customer"] + }, + "labels": { + "type": "string", + "examples": ["somelabel"] + }, + "max-file": { + "type": "string", + "examples": ["5"] + }, + "max-size": { + "type": "string", + "examples": ["10m"] + } + }, + "examples": [ + { + "cache-disabled": "false", + "cache-max-file": "5", + "cache-max-size": "20m", + "cache-compress": "true", + "env": "os,customer", + "labels": "somelabel", + "max-file": "5", + "max-size": "10m" + } + ] + }, + "max-concurrent-downloads": { + "type": "number", + "default": 0, + "examples": [3] + }, + "max-concurrent-uploads": { + "type": "number", + "default": 0, + "examples": [5] + }, + "max-download-attempts": { + "type": "number", + "default": 0, + "examples": [5] + }, + "mtu": { + "type": "number", + "default": 0, + "examples": [0] + }, + "no-new-privileges": { + "type": "boolean", + "default": false + }, + "node-generic-resources": { + "type": "array", + "items": { + "type": "string", + "examples": ["NVIDIA-GPU=UUID1", "NVIDIA-GPU=UUID2"] + }, + "examples": [["NVIDIA-GPU=UUID1", "NVIDIA-GPU=UUID2"]] + }, + "oom-score-adjust": { + "type": "number", + "examples": [-500] + }, + "pidfile": { + "type": "string" + }, + "raw-logs": { + "type": "boolean", + "default": false + }, + "registry-mirrors": { + "type": "array", + "items": { + "type": "string" + } + }, + "runtimes": { + "type": "object", + "properties": { + "cc-runtime": { + "type": "object", + "properties": { + "path": { + "type": "string", + "examples": ["/usr/bin/cc-runtime"] + } + }, + "examples": [ + { + "path": "/usr/bin/cc-runtime" + } + ] + }, + "custom": { + "type": "object", + "properties": { + "path": { + "type": "string", + "examples": ["/usr/local/bin/my-runc-replacement"] + }, + "runtimeArgs": { + "type": "array", + "items": { + "type": "string", + "examples": ["--debug"] + }, + "examples": [["--debug"]] + } + }, + "examples": [ + { + "path": "/usr/local/bin/my-runc-replacement", + "runtimeArgs": ["--debug"] + } + ] + } + }, + "examples": [ + { + "cc-runtime": { + "path": "/usr/bin/cc-runtime" + }, + "custom": { + "path": "/usr/local/bin/my-runc-replacement", + "runtimeArgs": ["--debug"] + } + } + ] + }, + "seccomp-profile": { + "type": "string" + }, + "selinux-enabled": { + "type": "boolean", + "default": false + }, + "shutdown-timeout": { + "type": "number", + "default": 0, + "examples": [15] + }, + "storage-driver": { + "type": "string" + }, + "storage-opts": { + "type": "array", + "items": {} + }, + "swarm-default-advertise-addr": { + "type": "string" + }, + "tls": { + "type": "boolean", + "default": false, + "examples": [true] + }, + "tlscacert": { + "type": "string" + }, + "tlscert": { + "type": "string" + }, + "tlskey": { + "type": "string" + }, + "tlsverify": { + "type": "boolean", + "default": false, + "examples": [true] + }, + "userland-proxy": { + "type": "boolean", + "default": false + }, + "userland-proxy-path": { + "type": "string", + "examples": ["/usr/libexec/docker-proxy"] + }, + "userns-remap": { + "type": "string" + } + }, + "title": "Docker Daemon configuration schema", + "type": "object" +} diff --git a/schemas/jsonschema/core/playlist/playlist.json b/schemas/jsonschema/core/playlist/playlist.json new file mode 100644 index 000000000..2913b89fb --- /dev/null +++ b/schemas/jsonschema/core/playlist/playlist.json @@ -0,0 +1,63 @@ +{ + "$ref": "#/definitions/Playlist", + "$schema": "http://json-schema.org/draft-07/schema#", + "definitions": { + "Playlist": { + "additionalProperties": false, + "properties": { + "interval": { + "default": "5m", + "description": "Interval sets the time between switching views in a playlist. FIXME: Is this based on a standardized format or what options are available? Can datemath be used?", + "type": "string" + }, + "items": { + "description": "The ordered list of items that the playlist will iterate over.", + "items": { + "$ref": "#/definitions/PlaylistItem" + }, + "type": "array" + }, + "name": { + "description": "Name of the playlist.", + "type": "string" + }, + "xxx": { + "description": "dummy value so thema allows a breaking change version.", + "type": "string" + } + }, + "required": [ + "interval", + "items", + "name", + "xxx" + ], + "type": "object" + }, + "PlaylistItem": { + "additionalProperties": false, + "properties": { + "type": { + "$ref": "#/definitions/PlaylistItemType", + "description": "Type of the item." + }, + "value": { + "description": "Value depends on type and describes the playlist item.\n\n - dashboard_by_id: The value is an internal numerical identifier set by Grafana. This is not portable as the numerical identifier is non-deterministic between different instances. Will be replaced by dashboard_by_uid in the future. (deprecated) - dashboard_by_tag: The value is a tag which is set on any number of dashboards. All dashboards behind the tag will be added to the playlist. - dashboard_by_uid: The value is the dashboard UID", + "type": "string" + } + }, + "required": [ + "type", + "value" + ], + "type": "object" + }, + "PlaylistItemType": { + "enum": [ + "dashboard_by_tag", + "dashboard_by_uid" + ], + "type": "string" + } + } +} diff --git a/schemas/kindsys/core/dashboard/dashboard.cue b/schemas/kindsys/core/dashboard/dashboard.cue new file mode 100644 index 000000000..055811074 --- /dev/null +++ b/schemas/kindsys/core/dashboard/dashboard.cue @@ -0,0 +1,765 @@ +package kind + +import ( + "github.com/grafana/kindsys" + t "time" + "strings" +) + +kindsys.Core +name: "Dashboard" +maturity: "experimental" +description: "A Grafana dashboard." +crd: { + dummySchema: true +} +lineage: { + schemas: [{ + version: [0, 0] + schema: { + spec: { + // Unique numeric identifier for the dashboard. + // `id` is internal to a specific Grafana instance. `uid` should be used to identify a dashboard across Grafana instances. + id?: int64 | null + + // Unique dashboard identifier that can be generated by anyone. string (8-40) + uid?: string + + // Title of dashboard. + title?: string + + // Description of dashboard. + description?: string + + // This property should only be used in dashboards defined by plugins. It is a quick check + // to see if the version has changed since the last time. + revision?: int64 + + // ID of a dashboard imported from the https://grafana.com/grafana/dashboards/ portal + gnetId?: string + + // Tags associated with dashboard. + tags?: [...string] + + // Theme of dashboard. + // Default value: dark. + style: "light" | *"dark" + + // Timezone of dashboard. Accepted values are IANA TZDB zone ID or "browser" or "utc". + timezone?: string | *"browser" + + // Whether a dashboard is editable or not. + editable: bool | *true + + // Configuration of dashboard cursor sync behavior. + // Accepted values are 0 (sync turned off), 1 (shared crosshair), 2 (shared crosshair and tooltip). + graphTooltip: #DashboardCursorSync + + // Time range for dashboard. + // Accepted values are relative time strings like {from: 'now-6h', to: 'now'} or absolute time strings like {from: '2020-07-10T08:00:00.000Z', to: '2020-07-10T14:00:00.000Z'}. + time?: { + from: string | *"now-6h" + to: string | *"now" + } + + // Configuration of the time picker shown at the top of a dashboard. + timepicker?: { + // Whether timepicker is visible or not. + hidden: bool | *false + // Interval options available in the refresh picker dropdown. + refresh_intervals: [...string] | *["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"] + // Whether timepicker is collapsed or not. Has no effect on provisioned dashboard. + collapse: bool | *false + // Whether timepicker is enabled or not. Has no effect on provisioned dashboard. + enable: bool | *true + // Selectable options available in the time picker dropdown. Has no effect on provisioned dashboard. + time_options: [...string] | *["5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"] + } + + // The month that the fiscal year starts on. 0 = January, 11 = December + fiscalYearStartMonth?: uint8 & <12 | *0 + + // When set to true, the dashboard will redraw panels at an interval matching the pixel width. + // This will keep data "moving left" regardless of the query refresh rate. This setting helps + // avoid dashboards presenting stale live data + liveNow?: bool + + // Day when the week starts. Expressed by the name of the day in lowercase, e.g. "monday". + weekStart?: string + + // Refresh rate of dashboard. Represented via interval string, e.g. "5s", "1m", "1h", "1d". + refresh?: string | false + + // Version of the JSON schema, incremented each time a Grafana update brings + // changes to said schema. + schemaVersion: uint16 | *36 + + // Version of the dashboard, incremented each time the dashboard is updated. + version?: uint32 + + // List of dashboard panels + panels?: [...#Panel | #RowPanel | #GraphPanel | #HeatmapPanel] + + // Configured template variables + templating?: { + // List of configured template variables with their saved values along with some other metadata + list?: [...#VariableModel] + } + + // Contains the list of annotations that are associated with the dashboard. + // Annotations are used to overlay event markers and overlay event tags on graphs. + // Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. + // See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ + annotations?: #AnnotationContainer + + // Links with references to other dashboards or external websites. + links?: [...#DashboardLink] + + // Snapshot options. They are present only if the dashboard is a snapshot. + snapshot?: #Snapshot @grafanamaturity(NeedsExpertReview) + } + + // TODO: this should be a regular DataQuery that depends on the selected dashboard + // these match the properties of the "grafana" datasouce that is default in most dashboards + #AnnotationTarget: { + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + limit: int64 + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + matchAny: bool + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + tags: [...string] + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + type: string + ... + } @grafanamaturity(NeedsExpertReview) + #AnnotationPanelFilter: { + // Should the specified panels be included or excluded + exclude?: bool | *false + + // Panel IDs that should be included or excluded + ids: [...uint8] + } + + // Contains the list of annotations that are associated with the dashboard. + // Annotations are used to overlay event markers and overlay event tags on graphs. + // Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. + // See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ + #AnnotationContainer: { + // List of annotations + list?: [...#AnnotationQuery] + } + + // TODO docs + // FROM: AnnotationQuery in grafana-data/src/types/annotations.ts + #AnnotationQuery: { + // Name of annotation. + name: string + + // Datasource where the annotations data is + datasource: #DataSourceRef + + // When enabled the annotation query is issued with every dashboard refresh + enable: bool | *true + + // Annotation queries can be toggled on or off at the top of the dashboard. + // When hide is true, the toggle is not shown in the dashboard. + hide?: bool | *false + + // Color to use for the annotation event markers + iconColor: string + + // Filters to apply when fetching annotations + filter?: #AnnotationPanelFilter + + // TODO.. this should just be a normal query target + target?: #AnnotationTarget + + // TODO -- this should not exist here, it is based on the --grafana-- datasource + type?: string @grafanamaturity(NeedsExpertReview) + ... + } @grafanamaturity(NeedsExpertReview) + + // A variable is a placeholder for a value. You can use variables in metric queries and in panel titles. + #VariableModel: { + // Unique numeric identifier for the variable. + id: string | *"00000000-0000-0000-0000-000000000000" + // Type of variable + type: #VariableType + // Name of variable + name: string + // Optional display name + label?: string + // Visibility configuration for the variable + hide: #VariableHide + // Whether the variable value should be managed by URL query params or not + skipUrlSync: bool | *false + // Description of variable. It can be defined but `null`. + description?: string + // Query used to fetch values for a variable + query?: string | { + ... + } + // Data source used to fetch values for a variable. It can be defined but `null`. + datasource?: #DataSourceRef + // Format to use while fetching all values from data source, eg: wildcard, glob, regex, pipe, etc. + allFormat?: string + // Shows current selected variable text/value on the dashboard + current?: #VariableOption + // Whether multiple values can be selected or not from variable value list + multi?: bool | *false + // Options that can be selected for a variable. + options?: [...#VariableOption] + refresh?: #VariableRefresh + ... + } @grafanamaturity(NeedsExpertReview) + + // Option to be selected in a variable. + #VariableOption: { + // Whether the option is selected or not + selected?: bool + // Text to be displayed for the option + text: string | [...string] + // Value of the option + value: string | [...string] + } + + // Options to config when to refresh a variable + // `0`: Never refresh the variable + // `1`: Queries the data source every time the dashboard loads. + // `2`: Queries the data source when the dashboard time range changes. + #VariableRefresh: 0 | 1 | 2 @grabana(kind="enum",memberNames="never|onDashboardLoad|onTimeRangeChanged") + + // Determine if the variable shows on dashboard + // Accepted values are 0 (show label and value), 1 (show value only), 2 (show nothing). + #VariableHide: 0 | 1 | 2 @grabana(kind="enum",memberNames="dontHide|hideLabel|hideVariable") + + // Sort variable options + // Accepted values are: + // `0`: No sorting + // `1`: Alphabetical ASC + // `2`: Alphabetical DESC + // `3`: Numerical ASC + // `4`: Numerical DESC + // `5`: Alphabetical Case Insensitive ASC + // `6`: Alphabetical Case Insensitive DESC + #VariableSort: 0 | 1 | 2 | 3 | 4 | 5 | 6 @grabana(kind="enum",memberNames="disabled|alphabeticalAsc|alphabeticalDesc|numericalAsc|numericalDesc|alphabeticalCaseInsensitiveAsc|alphabeticalCaseInsensitiveDesc") + + // Loading status + // Accepted values are `NotStarted` (the request is not started), `Loading` (waiting for response), `Streaming` (pulling continuous data), `Done` (response received successfully) or `Error` (failed request). + #LoadingState: "NotStarted" | "Loading" | "Streaming" | "Done" | "Error" @grabana(kind="enum") + + // Ref to a DataSource instance + #DataSourceRef: { + // The plugin type-id + type?: string + + // Specific datasource instance + uid?: string + } + + // Links with references to other dashboards or external resources + #DashboardLink: { + // Title to display with the link + title: string + // Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) + type: #DashboardLinkType + // Icon name to be displayed with the link + icon: string + // Tooltip to display when the user hovers their mouse over it + tooltip: string + // Link URL. Only required/valid if the type is link + url: string + // List of tags to limit the linked dashboards. If empty, all dashboards will be displayed. Only valid if the type is dashboards + tags: [...string] + // If true, all dashboards links will be displayed in a dropdown. If false, all dashboards links will be displayed side by side. Only valid if the type is dashboards + asDropdown: bool | *false + // If true, the link will be opened in a new tab + targetBlank: bool | *false + // If true, includes current template variables values in the link as query params + includeVars: bool | *false + // If true, includes current time range in the link as query params + keepTime: bool | *false + } + + // Dashboard Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) + #DashboardLinkType: "link" | "dashboards" + + // Dashboard variable type + // `query`: Query-generated list of values such as metric names, server names, sensor IDs, data centers, and so on. + // `adhoc`: Key/value filters that are automatically added to all metric queries for a data source (Prometheus, Loki, InfluxDB, and Elasticsearch only). + // `constant`: Define a hidden constant. + // `datasource`: Quickly change the data source for an entire dashboard. + // `interval`: Interval variables represent time spans. + // `textbox`: Display a free text input field with an optional default value. + // `custom`: Define the variable options manually using a comma-separated list. + // `system`: Variables defined by Grafana. See: https://grafana.com/docs/grafana/latest/dashboards/variables/add-template-variables/#global-variables + #VariableType: "query" | "adhoc" | "constant" | "datasource" | "interval" | "textbox" | "custom" | "system" @grafanamaturity(NeedsExpertReview) + + // Color mode for a field. You can specify a single color, or select a continuous (gradient) color schemes, based on a value. + // Continuous color interpolates a color using the percentage of a value relative to min and max. + // Accepted values are: + // `thresholds`: From thresholds. Informs Grafana to take the color from the matching threshold + // `palette-classic`: Classic palette. Grafana will assign color by looking up a color in a palette by series index. Useful for Graphs and pie charts and other categorical data visualizations + // `palette-classic-by-name`: Classic palette (by name). Grafana will assign color by looking up a color in a palette by series name. Useful for Graphs and pie charts and other categorical data visualizations + // `continuous-GrYlRd`: ontinuous Green-Yellow-Red palette mode + // `continuous-RdYlGr`: Continuous Red-Yellow-Green palette mode + // `continuous-BlYlRd`: Continuous Blue-Yellow-Red palette mode + // `continuous-YlRd`: Continuous Yellow-Red palette mode + // `continuous-BlPu`: Continuous Blue-Purple palette mode + // `continuous-YlBl`: Continuous Yellow-Blue palette mode + // `continuous-blues`: Continuous Blue palette mode + // `continuous-reds`: Continuous Red palette mode + // `continuous-greens`: Continuous Green palette mode + // `continuous-purples`: Continuous Purple palette mode + // `shades`: Shades of a single color. Specify a single color, useful in an override rule. + // `fixed`: Fixed color mode. Specify a single color, useful in an override rule. + #FieldColorModeId: "thresholds" | "palette-classic" | "palette-classic-by-name" | "continuous-GrYlRd" | "continuous-RdYlGr" | "continuous-BlYlRd" | "continuous-YlRd" | "continuous-BlPu" | "continuous-YlBl" | "continuous-blues" | "continuous-reds" | "continuous-greens" | "continuous-purples" | "fixed" | "shades" @grabana(kind="enum",memberNames="Thresholds|PaletteClassic|PaletteClassicByName|ContinuousGrYlRd|ContinuousRdYlGr|ContinuousBlYlRd|ContinuousYlRd|ContinuousBlPu|ContinuousYlBl|ContinuousBlues|ContinuousReds|ContinuousGreens|ContinuousPurples|Fixed|Shades") @grafanamaturity(NeedsExpertReview) + + // Defines how to assign a series color from "by value" color schemes. For example for an aggregated data points like a timeseries, the color can be assigned by the min, max or last value. + #FieldColorSeriesByMode: "min" | "max" | "last" + + // Map a field to a color. + #FieldColor: { + // The main color scheme mode. + mode: #FieldColorModeId + // The fixed color value for fixed or shades color modes. + fixedColor?: string + // Some visualizations need to know how to assign a series color from by value color schemes. + seriesBy?: #FieldColorSeriesByMode + } + + // Position and dimensions of a panel in the grid + #GridPos: { + // Panel height. The height is the number of rows from the top edge of the panel. + h: uint32 & >0 | *9 + // Panel width. The width is the number of columns from the left edge of the panel. + w: uint32 & >0 & <=24 | *12 + // Panel x. The x coordinate is the number of columns from the left edge of the grid + x: uint32 & >=0 & <24 | *0 + // Panel y. The y coordinate is the number of rows from the top edge of the grid + y: uint32 & >=0 | *0 + // Whether the panel is fixed within the grid. If true, the panel will not be affected by other panels' interactions + static?: bool + } + + // User-defined value for a metric that triggers visual changes in a panel when this value is met or exceeded + // They are used to conditionally style and color visualizations based on query results , and can be applied to most visualizations. + #Threshold: { + // Value represents a specified metric for the threshold, which triggers a visual change in the dashboard when this value is met or exceeded. + // Nulls currently appear here when serializing -Infinity to JSON. + value: number | null @grafanamaturity(NeedsExpertReview) + // Color represents the color of the visual change that will occur in the dashboard when the threshold value is met or exceeded. + color: string @grafanamaturity(NeedsExpertReview) + } @grafanamaturity(NeedsExpertReview) + + // Thresholds can either be `absolute` (specific number) or `percentage` (relative to min or max, it will be values between 0 and 1). + #ThresholdsMode: "absolute" | "percentage" @grabana(kind="enum",memberNames="Absolute|Percentage") + + // Thresholds configuration for the panel + #ThresholdsConfig: { + // Thresholds mode. + mode: #ThresholdsMode + + // Must be sorted by 'value', first value is always -Infinity + steps: [...#Threshold] @grafanamaturity(NeedsExpertReview) + } @grafanamaturity(NeedsExpertReview) + + // Allow to transform the visual representation of specific data values in a visualization, irrespective of their original units + #ValueMapping: #ValueMap | #RangeMap | #RegexMap | #SpecialValueMap @grafanamaturity(NeedsExpertReview) + + // Supported value mapping types + // `value`: Maps text values to a color or different display text and color. For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. + // `range`: Maps numerical ranges to a display text and color. For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. + // `regex`: Maps regular expressions to replacement text and a color. For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. + // `special`: Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. See SpecialValueMatch to see the list of special values. For example, you can configure a special value mapping so that null values appear as N/A. + #MappingType: "value" | "range" | "regex" | "special" @grabana(kind="enum",memberNames="ValueToText|RangeToText|RegexToText|SpecialValue") @grafanamaturity(NeedsExpertReview) + + // Maps text values to a color or different display text and color. + // For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. + #ValueMap: { + type: #MappingType & { + "value" + } + // Map with : ValueMappingResult. For example: { "10": { text: "Perfection!", color: "green" } } + options: { + [string]: #ValueMappingResult + } + } + + // Maps numerical ranges to a display text and color. + // For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. + #RangeMap: { + type: #MappingType & { + "range" + } + // Range to match against and the result to apply when the value is within the range + options: { + // Min value of the range. It can be null which means -Infinity + from: float64 | null + // Max value of the range. It can be null which means +Infinity + to: float64 | null + // Config to apply when the value is within the range + result: #ValueMappingResult + } + } @grafanamaturity(NeedsExpertReview) + + // Maps regular expressions to replacement text and a color. + // For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. + #RegexMap: { + type: #MappingType & { + "regex" + } + // Regular expression to match against and the result to apply when the value matches the regex + options: { + // Regular expression to match against + pattern: string + // Config to apply when the value matches the regex + result: #ValueMappingResult + } + } @grafanamaturity(NeedsExpertReview) + + // Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. + // See SpecialValueMatch to see the list of special values. + // For example, you can configure a special value mapping so that null values appear as N/A. + #SpecialValueMap: { + type: #MappingType & { + "special" + } + options: { + // Special value to match against + match: #SpecialValueMatch + // Config to apply when the value matches the special value + result: #ValueMappingResult + } + } @grafanamaturity(NeedsExpertReview) + + // Special value types supported by the `SpecialValueMap` + #SpecialValueMatch: "true" | "false" | "null" | "nan" | "null+nan" | "empty" @grabana(kind="enum",memberNames="True|False|Null|NaN|NullAndNan|Empty") + + // Result used as replacement with text and color when the value matches + #ValueMappingResult: { + // Text to display when the value matches + text?: string + // Text to use when the value matches + color?: string + // Icon to display when the value matches. Only specific visualizations. + icon?: string + // Position in the mapping array. Only used internally. + index?: int32 + } + + // Transformations allow to manipulate data returned by a query before the system applies a visualization. + // Using transformations you can: rename fields, join time series data, perform mathematical operations across queries, + // use the output of one transformation as the input to another transformation, etc. + #DataTransformerConfig: { + // Unique identifier of transformer + id: string + // Disabled transformations are skipped + disabled?: bool + // Optional frame matcher. When missing it will be applied to all results + filter?: #MatcherConfig + // Options to be passed to the transformer + // Valid options depend on the transformer id + options: _ + } + + // 0 for no shared crosshair or tooltip (default). + // 1 for shared crosshair. + // 2 for shared crosshair AND shared tooltip. + #DashboardCursorSync: *0 | 1 | 2 @grabana(kind="enum",memberNames="Off|Crosshair|Tooltip") + + // Schema for panel targets is specified by datasource + // plugins. We use a placeholder definition, which the Go + // schema loader either left open/as-is with the Base + // variant of the Dashboard and Panel families, or filled + // with types derived from plugins in the Instance variant. + // When working directly from CUE, importers can extend this + // type directly to achieve the same effect. + #Target: { + ... + } + + // A dashboard snapshot shares an interactive dashboard publicly. + // It is a read-only version of a dashboard, and is not editable. + // It is possible to create a snapshot of a snapshot. + // Grafana strips away all sensitive information from the dashboard. + // Sensitive information stripped: queries (metric, template,annotation) and panel links. + #Snapshot: { + // Time when the snapshot was created + created: t.Time & { + string + } + // Time when the snapshot expires, default is never to expire + expires: string @grafanamaturity(NeedsExpertReview) + // Is the snapshot saved in an external grafana instance + external: bool @grafanamaturity(NeedsExpertReview) + // external url, if snapshot was shared in external grafana instance + externalUrl: string @grafanamaturity(NeedsExpertReview) + // Unique identifier of the snapshot + id: uint32 @grafanamaturity(NeedsExpertReview) + // Optional, defined the unique key of the snapshot, required if external is true + key: string @grafanamaturity(NeedsExpertReview) + // Optional, name of the snapshot + name: string @grafanamaturity(NeedsExpertReview) + // org id of the snapshot + orgId: uint32 @grafanamaturity(NeedsExpertReview) + // last time when the snapshot was updated + updated: t.Time & { + string + } + // url of the snapshot, if snapshot was shared internally + url?: string @grafanamaturity(NeedsExpertReview) + // user id of the snapshot creator + userId: uint32 @grafanamaturity(NeedsExpertReview) + } @grafanamaturity(NeedsExpertReview) + + // Dashboard panels are the basic visualization building blocks. + #Panel: { + // The panel plugin type id. This is used to find the plugin to display the panel. + type: strings.MinRunes(1) & { + string + } + + // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. + id?: uint32 + + // The version of the plugin that is used for this panel. This is used to find the plugin to display the panel and to migrate old panel configs. + pluginVersion?: string + + // Tags for the panel. + tags?: [...string] + + // Depends on the panel plugin. See the plugin documentation for details. + targets?: [...#Target] + + // Panel title. + title?: string + + // Panel description. + description?: string + + // Whether to display the panel without a background. + transparent: bool | *false + + // The datasource used in all targets. + datasource?: #DataSourceRef + + // Grid position. + gridPos?: #GridPos + + // Panel links. + links?: [...#DashboardLink] + + // Name of template variable to repeat for. + repeat?: string + + // Direction to repeat in if 'repeat' is set. + // `h` for horizontal, `v` for vertical. + repeatDirection?: *"h" | "v" + + // Id of the repeating panel. + repeatPanelId?: int64 + + // The maximum number of data points that the panel queries are retrieving. + maxDataPoints?: number + + // List of transformations that are applied to the panel data before rendering. + // When there are multiple transformations, Grafana applies them in the order they are listed. + // Each transformation creates a result set that then passes on to the next transformation in the processing pipeline. + transformations: [...#DataTransformerConfig] + + // The min time interval setting defines a lower limit for the $__interval and $__interval_ms variables. + // This value must be formatted as a number followed by a valid time + // identifier like: "40s", "3d", etc. + // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options + interval?: string + + // Overrides the relative time range for individual panels, + // which causes them to be different than what is selected in + // the dashboard time picker in the top-right corner of the dashboard. You can use this to show metrics from different + // time periods or days on the same dashboard. + // The value is formatted as time operation like: `now-5m` (Last 5 minutes), `now/d` (the day so far), + // `now-5d/d`(Last 5 days), `now/w` (This week so far), `now-2y/y` (Last 2 years). + // Note: Panel time overrides have no effect when the dashboard’s time range is absolute. + // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options + timeFrom?: string + + // Overrides the time range for individual panels by shifting its start and end relative to the time picker. + // For example, you can shift the time range for the panel to be two hours earlier than the dashboard time picker setting `2h`. + // Note: Panel time overrides have no effect when the dashboard’s time range is absolute. + // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options + timeShift?: string + + // Dynamically load the panel + libraryPanel?: #LibraryPanelRef + + // It depends on the panel plugin. They are specified by the Options field in panel plugin schemas. + options: { + ... + } @grafanamaturity(NeedsExpertReview) + + // Field options allow you to change how the data is displayed in your visualizations. + fieldConfig: #FieldConfigSource + } @grafanamaturity(NeedsExpertReview) + + // The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. + // Each column within this structure is called a field. A field can represent a single time series or table column. + // Field options allow you to change how the data is displayed in your visualizations. + #FieldConfigSource: { + // Defaults are the options applied to all fields. + defaults: #FieldConfig + // Overrides are the options applied to specific fields overriding the defaults. + overrides: [...{ + matcher: #MatcherConfig + properties: [...#DynamicConfigValue] + }] @grafanamaturity(NeedsExpertReview) + } @grafanamaturity(NeedsExpertReview) + + // A library panel is a reusable panel that you can use in any dashboard. + // When you make a change to a library panel, that change propagates to all instances of where the panel is used. + // Library panels streamline reuse of panels across multiple dashboards. + #LibraryPanelRef: { + // Library panel name + name: string + // Library panel uid + uid: string + } + + // Matcher is a predicate configuration. Based on the config a set of field(s) or values is filtered in order to apply override / transformation. + // It comes with in id ( to resolve implementation from registry) and a configuration that’s specific to a particular matcher type. + #MatcherConfig: { + // The matcher id. This is used to find the matcher implementation from registry. + id: string | *"" @grafanamaturity(NeedsExpertReview) + // The matcher options. This is specific to the matcher implementation. + options?: _ @grafanamaturity(NeedsExpertReview) + } + #DynamicConfigValue: { + id: string | *"" @grafanamaturity(NeedsExpertReview) + value?: _ @grafanamaturity(NeedsExpertReview) + } + + // The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. + // Each column within this structure is called a field. A field can represent a single time series or table column. + // Field options allow you to change how the data is displayed in your visualizations. + #FieldConfig: { + // The display value for this field. This supports template variables blank is auto + displayName?: string @grafanamaturity(NeedsExpertReview) + + // This can be used by data sources that return and explicit naming structure for values and labels + // When this property is configured, this value is used rather than the default naming strategy. + displayNameFromDS?: string @grafanamaturity(NeedsExpertReview) + + // Human readable field metadata + description?: string @grafanamaturity(NeedsExpertReview) + + // An explicit path to the field in the datasource. When the frame meta includes a path, + // This will default to `${frame.meta.path}/${field.name} + // + // When defined, this value can be used as an identifier within the datasource scope, and + // may be used to update the results + path?: string @grafanamaturity(NeedsExpertReview) + + // True if data source can write a value to the path. Auth/authz are supported separately + writeable?: bool @grafanamaturity(NeedsExpertReview) + + // True if data source field supports ad-hoc filters + filterable?: bool @grafanamaturity(NeedsExpertReview) + + // Unit a field should use. The unit you select is applied to all fields except time. + // You can use the units ID availables in Grafana or a custom unit. + // Available units in Grafana: https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts + // As custom unit, you can use the following formats: + // `suffix:` for custom unit that should go after value. + // `prefix:` for custom unit that should go before value. + // `time:` For custom date time formats type for example `time:YYYY-MM-DD`. + // `si:` for custom SI units. For example: `si: mF`. This one is a bit more advanced as you can specify both a unit and the source data scale. So if your source data is represented as milli (thousands of) something prefix the unit with that SI scale character. + // `count:` for a custom count unit. + // `currency:` for custom a currency unit. + unit?: string @grafanamaturity(NeedsExpertReview) + + // Specify the number of decimals Grafana includes in the rendered value. + // If you leave this field blank, Grafana automatically truncates the number of decimals based on the value. + // For example 1.1234 will display as 1.12 and 100.456 will display as 100. + // To display all decimals, set the unit to `String`. + decimals?: number @grafanamaturity(NeedsExpertReview) + + // The minimum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + min?: number @grafanamaturity(NeedsExpertReview) + // The maximum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + max?: number @grafanamaturity(NeedsExpertReview) + + // Convert input values into a display string + mappings?: [...#ValueMapping] @grafanamaturity(NeedsExpertReview) + + // Map numeric values to states + thresholds?: #ThresholdsConfig @grafanamaturity(NeedsExpertReview) + + // Panel color configuration + color?: #FieldColor + + // The behavior when clicking on a result + links?: [...] @grafanamaturity(NeedsExpertReview) + + // Alternative to empty string + noValue?: string @grafanamaturity(NeedsExpertReview) + + // custom is specified by the FieldConfig field + // in panel plugin schemas. + custom?: { + ... + } @grafanamaturity(NeedsExpertReview) + } @grafanamaturity(NeedsExpertReview) + + // Row panel + #RowPanel: { + // The panel type + type: "row" + + // Whether this row should be collapsed or not. + collapsed: bool | *false + + // Row title + title?: string + + // Name of default datasource for the row + datasource?: #DataSourceRef + + // Row grid position + gridPos?: #GridPos + + // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. + id: uint32 + + // List of panels in the row + panels: [...#Panel | #GraphPanel | #HeatmapPanel] + + // Name of template variable to repeat for. + repeat?: string + } + + // Support for legacy graph panel. + // @deprecated this a deprecated panel type + #GraphPanel: { + type: "graph" + // @deprecated this is part of deprecated graph panel + legend?: { + show: bool | *true + sort?: string + sortDesc?: bool + } + ... + } + + // Support for legacy heatmap panel. + // @deprecated this a deprecated panel type + #HeatmapPanel: { + type: "heatmap" + ... + } + } + }] +} diff --git a/schemas/kindsys/custom/slo/slo.cue b/schemas/kindsys/custom/slo/slo.cue new file mode 100644 index 000000000..24adaf1ab --- /dev/null +++ b/schemas/kindsys/custom/slo/slo.cue @@ -0,0 +1,178 @@ +package slo + +import ( + "github.com/grafana/kindsys" +) + +// Schema for a Grafana SLO represented as a custom resource. +kindsys.Custom +name: "Slo" +group: "grafana-slo-app" +crd: { + scope: "Namespaced" + groupOverride: "grafana-slo-app.plugins.grafana.com" +} +codegen: { + frontend: false +} +lineage: { + schemas: [ + { + version: [0, 0] + schema: + { + #Objective: { + // is a value between 0 and 1 if the value of the query's output + // is above the objective, the SLO is met. + value: float64 + + // is a Prometheus-parsable time duration string like 24h, 60m. This is the time + // window the objective is measured over. + window: string + } + + #Query: #ThresholdQuery | #RatioQuery | #HistogramQuery | #FreeformQuery + + #ThresholdQuery: { + groupByLabels?: [...string] + thresholdMetric: #MetricDef + threshold: #Threshold + } + + #RatioQuery: { + groupByLabels?: [...string] + successMetric: #MetricDef + totalMetric: #MetricDef + } + + #HistogramQuery: { + groupByLabels?: [...string] + histogramMetric: #MetricDef + percentile: float64 + threshold: #Threshold + } + + #FreeformQuery: { + freeformQuery: string + } + + #Threshold: { + value: float64 + operator: "<" | "<=" | "==" | ">=" | ">" @grabana(kind="enum",memberNames="LessThan|LessThanOrEqual|Equal|GreaterThanOrEqual|GreaterThan") + } + + #MetricDef: { + prometheusMetric: string + type?: string + } + + #GrafanaMetadata: { + organizationId: string + userEmail: string + userName: string + provenance: string + } + + #Label: {key: =~"^[a-zA-Z_][a-zA-Z0-9_]*$", value: string} + + #AlertingMetadata: { + labels?: [...#Label] + annotations?: [...#Label] + } + + #Alerting: { + // will be attached to all alerts generated by any of these rules. + labels?: [...#Label] + + // will be attached to all alerts generated by any of these rules. + annotations?: [...#Label] + + // Metadata to attach only to fastBurn alerts. + fastBurn?: #AlertingMetadata + + // Metadata to attach only to slowBurn alerts. + slowBurn?: #AlertingMetadata + } + + spec: { + // This is used internally by the plugin for permission management and + // similar functions. + // + grafanaMetadata?: #GrafanaMetadata + + // A unique, random identifier. This value will also be the name of the + // resource stored in the API server. Must be set for a PUT. + uuid: string + + // should be a short description of your indicator. Consider names like + // "API Availability" + name: string + + // is a free-text field that can provide more context to an + // SLO. It is shown on SLO drill-down dashboards and in hover text on + // the SLO summary dashboard. + description: string + + // describes the indicator that will be measured against the + // objective. Four query types are supported: + // 1. Ratio Queries provide a successMetric and totalMetric whose ratio is the SLI. + // 2. Threshold Queries provide a thresholdMetric and a threshold. The + // SLI is the boolean result of evaluating the threshould. + // 3. Histogram Queries are similar to threshold queries, but the use a + // Prometheus histogram metric, percentile value, and a threshold to + // generate the boolean output. + // 4. Freeform Queries supply a single freeFormQuery string that is + // evaluated to produce the SLI output. The value should range beween 0 + // and 1.0. Freeform queries should include a time variable named + // either `$__rate_interval`,`$__interval` or `$__range`. This will be used by the + // tool to evaluate the burn rate of an SLO over various time + // windows. Queries that don't include this interval will have + // sensitive and imprecise alerting. + // Additionally, "groupByLabels" are used in the first three query types + // to define how to group series for evaluation. They are discarded for + // freeform queries. + query: #Query + + // You can have multiple time windows and objectives associated with an + // SLO. Over each rolling time window, the remaining error budget will + // be calculated, and separate alerts can be generated for each time + // window based on the SLO burn rate or remaining error budget. + objectives: [...#Objective] + + // Any additional labels that will be attached to all metrics generated + // from the query. These labels are useful for grouping SLOs in + // dashboard views that you create by hand. + // The key must match the prometheus label requirements regex: + // "^[a-zA-Z_][a-zA-Z0-9_]*$" + labels?: [...#Label] + + // Configures the alerting rules that will be generated for each + // time window associated with the SLO. Grafana SLOs can generate + // alerts when the short-term error budget burn is very high, the + // long-term error budget burn rate is high, or when the remaining + // error budget is below a certain threshold. + alerting?: #Alerting + } + + // Status is a common kubernetes subresource that is used to provide + // information about the current state, that isn't a direct part of the + // resource. Here we use it to provide a pointer to the generated + // dashboard. + status: { + drillDownDashboard: { + uid: string + // The generation of the SLO when this dashboard was last updated. + reconciledForGeneration: string + lastError: string + } + prometheusRules: { + // The generation of the SLO when these rules were last updated. + reconciledForGeneration: string + lastError: string + } + + } + } + }, + ] +}