diff --git a/go.mod b/go.mod index 3f8c44304..291183fbb 100644 --- a/go.mod +++ b/go.mod @@ -3,6 +3,7 @@ module github.com/operator-framework/api go 1.15 require ( + cuelang.org/go v0.3.0-beta.6 github.com/blang/semver/v4 v4.0.0 github.com/ghodss/yaml v1.0.0 github.com/go-bindata/go-bindata/v3 v3.1.3 diff --git a/go.sum b/go.sum index c507de685..e04f5f860 100644 --- a/go.sum +++ b/go.sum @@ -21,7 +21,12 @@ cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIA cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cuelang.org/go v0.2.2 h1:i/wFo48WDibGHKQTRZ08nB8PqmGpVpQ2sRflZPj73nQ= +cuelang.org/go v0.2.2/go.mod h1:Dyjk8Y/B3CfFT1jQKJU0g5PpCeMiDe0yMOhk57oXwqo= +cuelang.org/go v0.3.0-beta.6 h1:od1S/Hbl2S45TLSONl95X3O4TXN1za6CUSD13bTxCVk= +cuelang.org/go v0.3.0-beta.6/go.mod h1:Ikvs157igkGV5gFUdYSFa+lWp/CDteVhubPTXyvPRtA= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= @@ -83,6 +88,10 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/cockroachdb/apd/v2 v2.0.1 h1:y1Rh3tEU89D+7Tgbw+lp52T6p/GJLpDmNvr10UWqLTE= +github.com/cockroachdb/apd/v2 v2.0.1/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= @@ -114,6 +123,8 @@ github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25Kn github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/emicklei/proto v1.6.15 h1:XbpwxmuOPrdES97FrSfpyy67SSCV/wBIKXqgJzh6hNw= +github.com/emicklei/proto v1.6.15/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= @@ -329,6 +340,8 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -373,6 +386,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto= +github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= @@ -430,6 +445,8 @@ github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.0/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.6.2-0.20200830194709-1115b6af0369/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -523,6 +540,7 @@ golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= @@ -530,6 +548,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200513190911-00229845015e/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/exp v0.0.0-20210126221216-84987778548c/go.mod h1:I6l2HNBLBZEcrOoCpyKLdY2lHoRZ8lI4x60KMCQDft4= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -545,13 +565,17 @@ golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7 golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mobile v0.0.0-20201217150744-e6ae53a27f4f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449 h1:xUIPaMhvROX9dhPvRCenIJtU78+lbEenGbgqB5hfHCQ= +golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -694,6 +718,7 @@ golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -703,6 +728,7 @@ golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200612220849-54c614fe050c/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200616133436-c1934b75d054/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200616195046-dc31b401abb5 h1:UaoXseXAWUJUcuJ2E2oczJdLxAJXL0lOmVaBl7kuk+I= golang.org/x/tools v0.0.0-20200616195046-dc31b401abb5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= @@ -800,6 +826,7 @@ gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20190905181640-827449938966/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200121175148-a6ecf24a6d71/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pkg/schema/manifests/schema.cue b/pkg/schema/manifests/schema.cue new file mode 100644 index 000000000..fa19086f0 --- /dev/null +++ b/pkg/schema/manifests/schema.cue @@ -0,0 +1,91 @@ +package schema + +#icon: { + base64data: !="" + mediatype: !="" +} + +// Generic gvk struct +#gvk: { + group: !="" + version: !="" + kind: !="" +} + +#olmgvkprovided: #property & { + type: "olm.gvk.provided" + value: #gvk +} + +#olmgvkrequired: #property & { + type: "olm.gvk.required" + value: #gvk +} + +// Generic package struct +#package: { + packageName: !="" + version: !="" +} + +#packageproperty: #property & { + type: "olm.package" + value: #package +} + +// Generic channel struct +#channel: { + name: !="" + replaces?: !="" +} + +#olmchannel: #property & { + type: "olm.channel" + value: #channel +} + +#olmskips: #property & { + type: "olm.skips" + value: !="" +} + +#olmskipRange: #property & { + type: "olm.skipRange" + value: !="" +} + +// Generic property struct +#property: { + type: !="" + ... +} + +#relatedImages: { + name: !="" + image: !="" +} + +// schema: "olm.package" +#olmpackage: #item & { + schema: "olm.package" + name: !="" + defaultChannel: !="" + icon: #icon + description: !="" + ... +} + +// schema: "olm.bundle" +#olmbundle: #item & { + schema: "olm.bundle" + name: !="" + image: !="" + relatedImages: [...#relatedImages] + ... +} + +#item: { + schema: !="" + properties: [...#property] + ... +} diff --git a/pkg/schema/validation/interface.go b/pkg/schema/validation/interface.go new file mode 100644 index 000000000..b023be6f3 --- /dev/null +++ b/pkg/schema/validation/interface.go @@ -0,0 +1,21 @@ +package schema + +import ( + "github.com/sirupsen/logrus" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/build" +) + +type ConfigValidator interface { + Validate(b []byte, key string) error +} + +// NewConfigValidator is a constructor that returns a ConfigValidator +func NewConfigValidator(instance *build.Instance, logger *logrus.Entry) ConfigValidator { + return configValidator{ + runtime: &cue.Runtime{}, + instance: instance, + logger: logger, + } +} diff --git a/pkg/schema/validation/testdata/invalid/bundle.json b/pkg/schema/validation/testdata/invalid/bundle.json new file mode 100644 index 000000000..ff690dbb1 --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/bundle.json @@ -0,0 +1,32 @@ +{ + "schema": "olm.bundle", + "name": "etcdoperator-community.v0.6.1", + "image": "quay.io/operatorhubio/etcd:v0.6.1", + "properties":[ + { + "type": "olm.package.provided", + "value": { + "version": "0.6.1" + } + }, + { + "type":"olm.gvk.provided", + "value": { + "group": "etcd.database.coreos.com", + "kind": "EtcdCluster", + "version": "v1beta2" + } + }, + { + "type": "olm.channel", + "value": { + "name": "alpha" + } + } + ], + "relatedImages": [ + { + "image": "quay.io/coreos/etcd-operator@sha256:bd944a211eaf8f31da5e6d69e8541e7cada8f16a9f7a5a570b22478997819943" + } + ] +} diff --git a/pkg/schema/validation/testdata/invalid/channel_property.json b/pkg/schema/validation/testdata/invalid/channel_property.json new file mode 100644 index 000000000..7275dcdad --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/channel_property.json @@ -0,0 +1,6 @@ +{ + "type": "olm.channel", + "value": { + "name": 1 + } +} diff --git a/pkg/schema/validation/testdata/invalid/gvk_property.json b/pkg/schema/validation/testdata/invalid/gvk_property.json new file mode 100644 index 000000000..455f8d65f --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/gvk_property.json @@ -0,0 +1,7 @@ +{ + "type":"olm.gvk.provided", + "value": { + "group": "etcd.database.coreos.com", + "kind": "EtcdCluster" + } +} diff --git a/pkg/schema/validation/testdata/invalid/package.json b/pkg/schema/validation/testdata/invalid/package.json new file mode 100644 index 000000000..6d72345cd --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/package.json @@ -0,0 +1,9 @@ +{ + "schema": "olm.package", + "name": "etcd", + "icon": { + "base64data":"iVBORw0KGgoAAAANSUhEUgAAA.....", + "mediatype":"image/png" + }, + "description": "A message about etcd operator, a description of channels" +} diff --git a/pkg/schema/validation/testdata/invalid/package_property.json b/pkg/schema/validation/testdata/invalid/package_property.json new file mode 100644 index 000000000..e3d8a00bb --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/package_property.json @@ -0,0 +1,6 @@ +{ + "type": "olm.package", + "value": { + "version": "0.6.1" + } +} diff --git a/pkg/schema/validation/testdata/invalid/skiprange_property.json b/pkg/schema/validation/testdata/invalid/skiprange_property.json new file mode 100644 index 000000000..5ad69e5f8 --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/skiprange_property.json @@ -0,0 +1,4 @@ +{ + "type": "olm.skipRange", + "value": 1 +} diff --git a/pkg/schema/validation/testdata/invalid/skips_property.json b/pkg/schema/validation/testdata/invalid/skips_property.json new file mode 100644 index 000000000..d33d50495 --- /dev/null +++ b/pkg/schema/validation/testdata/invalid/skips_property.json @@ -0,0 +1,3 @@ +{ + "type": "olm.skips" +} diff --git a/pkg/schema/validation/testdata/valid/bundle.json b/pkg/schema/validation/testdata/valid/bundle.json new file mode 100644 index 000000000..009a7b93d --- /dev/null +++ b/pkg/schema/validation/testdata/valid/bundle.json @@ -0,0 +1,34 @@ +{ + "schema": "olm.bundle", + "name": "etcdoperator-community.v0.6.1", + "image": "quay.io/operatorhubio/etcd:v0.6.1", + "properties":[ + { + "type": "olm.package.provided", + "value": { + "packageName": "etcd", + "version": "0.6.1" + } + }, + { + "type":"olm.gvk.provided", + "value": { + "group": "etcd.database.coreos.com", + "kind": "EtcdCluster", + "version": "v1beta2" + } + }, + { + "type": "olm.channel", + "value": { + "name": "alpha" + } + } + ], + "relatedImages": [ + { + "name": "etcdv0.6.1", + "image": "quay.io/coreos/etcd-operator@sha256:bd944a211eaf8f31da5e6d69e8541e7cada8f16a9f7a5a570b22478997819943" + } + ] +} diff --git a/pkg/schema/validation/testdata/valid/channel_property.json b/pkg/schema/validation/testdata/valid/channel_property.json new file mode 100644 index 000000000..edb59f584 --- /dev/null +++ b/pkg/schema/validation/testdata/valid/channel_property.json @@ -0,0 +1,6 @@ +{ + "type": "olm.channel", + "value": { + "name": "alpha" + } +} diff --git a/pkg/schema/validation/testdata/valid/gvk_property.json b/pkg/schema/validation/testdata/valid/gvk_property.json new file mode 100644 index 000000000..b7f4f6a95 --- /dev/null +++ b/pkg/schema/validation/testdata/valid/gvk_property.json @@ -0,0 +1,8 @@ +{ + "type":"olm.gvk.provided", + "value": { + "group": "etcd.database.coreos.com", + "kind": "EtcdCluster", + "version": "v1beta2" + } +} diff --git a/pkg/schema/validation/testdata/valid/package.json b/pkg/schema/validation/testdata/valid/package.json new file mode 100644 index 000000000..d9298061e --- /dev/null +++ b/pkg/schema/validation/testdata/valid/package.json @@ -0,0 +1,10 @@ +{ + "schema": "olm.package", + "name": "etcd", + "defaultChannel": "singlenamespace-alpha", + "icon": { + "base64data":"iVBORw0KGgoAAAANSUhEUgAAA.....", + "mediatype":"image/png" + }, + "description": "A message about etcd operator, a description of channels" +} diff --git a/pkg/schema/validation/testdata/valid/package_property.json b/pkg/schema/validation/testdata/valid/package_property.json new file mode 100644 index 000000000..db62cec4f --- /dev/null +++ b/pkg/schema/validation/testdata/valid/package_property.json @@ -0,0 +1,7 @@ +{ + "type": "olm.package", + "value": { + "packageName": "etcd", + "version": "0.6.1" + } +} diff --git a/pkg/schema/validation/testdata/valid/skiprange_property.json b/pkg/schema/validation/testdata/valid/skiprange_property.json new file mode 100644 index 000000000..7ee883a13 --- /dev/null +++ b/pkg/schema/validation/testdata/valid/skiprange_property.json @@ -0,0 +1,4 @@ +{ + "type": "olm.skipRange", + "value": ">=0.9.0 <=0.9.2-0" +} diff --git a/pkg/schema/validation/testdata/valid/skips_property.json b/pkg/schema/validation/testdata/valid/skips_property.json new file mode 100644 index 000000000..69a617f24 --- /dev/null +++ b/pkg/schema/validation/testdata/valid/skips_property.json @@ -0,0 +1,4 @@ +{ + "type": "olm.skips", + "value" : "etcdoperator.v0.6.1" +} diff --git a/pkg/schema/validation/validate.go b/pkg/schema/validation/validate.go new file mode 100644 index 000000000..f3b9e6947 --- /dev/null +++ b/pkg/schema/validation/validate.go @@ -0,0 +1,52 @@ +package schema + +import ( + "fmt" + + "github.com/sirupsen/logrus" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/build" + "cuelang.org/go/encoding/json" +) + +const ( + olmBundle = "olmbundle" + olmPackage = "olmpackage" + olmChannel = "olmchannel" + olmSkips = "olmskips" + olmSkipRange = "olmskipRange" + olmGVKProvided = "olmgvkprovided" + olmGVKRequired = "olmgvkrequired" + olmPackageProperty = "packageproperty" +) + +type configValidator struct { + runtime *cue.Runtime + instance *build.Instance + logger *logrus.Entry +} + +func (c configValidator) Validate(b []byte, key string) error { + inst, err := c.runtime.Build(c.instance) + if err != nil { + return err + } + + v := inst.LookupDef(key) + if !v.Exists() { + err := fmt.Errorf("Unable to find the definition %s in schema", key) + c.logger.WithError(err).Debugf(key) + return err + } + jsonAsCue, err := json.Decode(c.runtime, c.instance.Dir, b) + if err != nil { + return fmt.Errorf("could not parse json: %v", err) + } + if err := v.Unify(jsonAsCue.Value()).Validate(cue.Concrete(true)); err != nil { + c.logger.WithError(err).Debugf("Validation error") + return err + } + + return nil +} diff --git a/pkg/schema/validation/validate_test.go b/pkg/schema/validation/validate_test.go new file mode 100644 index 000000000..d6bb38256 --- /dev/null +++ b/pkg/schema/validation/validate_test.go @@ -0,0 +1,160 @@ +package schema + +import ( + "io/ioutil" + "testing" + + "github.com/sirupsen/logrus" + "github.com/stretchr/testify/require" + + "cuelang.org/go/cue/load" +) + +const schemaPath = "../manifests" + +func TestValidateConfig(t *testing.T) { + var table = []struct { + description string + filename string + kind string + hasError bool + errString string + }{ + { + description: "valid bundle config", + filename: "./testdata/valid/bundle.json", + kind: olmBundle, + hasError: false, + }, + { + description: "valid package config", + filename: "./testdata/valid/package.json", + kind: olmPackage, + hasError: false, + }, + { + description: "valid channel property config", + filename: "./testdata/valid/channel_property.json", + kind: olmChannel, + hasError: false, + }, + { + description: "valid gvk property config", + filename: "./testdata/valid/gvk_property.json", + kind: olmGVKProvided, + hasError: false, + }, + { + description: "valid gvk property config", + filename: "./testdata/valid/package_property.json", + kind: olmPackageProperty, + hasError: false, + }, + { + description: "valid package config", + filename: "./testdata/valid/package.json", + kind: olmPackage, + hasError: false, + }, + { + description: "valid skiprange property config", + filename: "./testdata/valid/skiprange_property.json", + kind: olmSkipRange, + hasError: false, + }, + { + description: "valid skips prpoperty config", + filename: "./testdata/valid/skips_property.json", + kind: olmSkips, + hasError: false, + }, + { + description: "invalid bundle config", + filename: "./testdata/invalid/bundle.json", + kind: olmBundle, + hasError: true, + errString: `#olmbundle.relatedImages.0.name: incomplete value !=""`, + }, + { + description: "invalid channel property config", + filename: "./testdata/invalid/channel_property.json", + kind: olmChannel, + hasError: true, + errString: `#olmchannel.value.name: conflicting values !="" and 1 (mismatched types string and int)`, + }, + { + description: "invalid gvk property config", + filename: "./testdata/invalid/gvk_property.json", + kind: olmGVKProvided, + hasError: true, + errString: `#olmgvkprovided.value.version: incomplete value !=""`, + }, + { + description: "invalid package property config", + filename: "./testdata/invalid/package_property.json", + kind: olmPackageProperty, + hasError: true, + errString: `#packageproperty.value.packageName: incomplete value !=""`, + }, + { + description: "invalid package config", + filename: "./testdata/invalid/package.json", + kind: olmPackage, + hasError: true, + errString: `#olmpackage.defaultChannel: incomplete value !=""`, + }, + { + description: "invalid skiprange property config", + filename: "./testdata/invalid/skiprange_property.json", + kind: olmSkipRange, + hasError: true, + errString: `#olmskipRange.value: conflicting values !="" and 1 (mismatched types string and int)`, + }, + { + description: "invalid skips prpoperty config", + filename: "./testdata/invalid/skips_property.json", + kind: olmSkips, + hasError: true, + errString: `#olmskips.value: incomplete value !=""`, + }, + { + description: "mismatch schema config", + filename: "./testdata/invalid/bundle.json", + kind: olmPackage, + hasError: true, + errString: `#olmpackage.schema: conflicting values "olm.bundle" and "olm.package"`, + }, + } + + for _, tt := range table { + t.Run(tt.description, func(t *testing.T) { + logger := logrus.NewEntry(logrus.New()) + // load schema for config definitions + instance := load.Instances([]string{"."}, &load.Config{ + Dir: schemaPath, + }) + if len(instance) > 1 { + t.Fatalf("multiple instance loading currently not supported: %s", schemaPath) + } + if len(instance) < 1 { + t.Fatalf("no instances found: %s", schemaPath) + } + + // Config validator + configValidator := NewConfigValidator(instance[0], logger) + // Read json file + content, err := ioutil.ReadFile(tt.filename) + require.NoError(t, err) + + // Validate json against schema + err = configValidator.Validate(content, tt.kind) + + if tt.hasError { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errString) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/vendor/cuelang.org/go/AUTHORS b/vendor/cuelang.org/go/AUTHORS new file mode 100644 index 000000000..884392fca --- /dev/null +++ b/vendor/cuelang.org/go/AUTHORS @@ -0,0 +1,6 @@ +# This is the list of CUE authors for copyright purposes. +# +# This does not necessarily list everyone who has contributed code, since in +# some cases, their employer may be the copyright holder. To see the full list +# of contributors, see the revision history in source control. +Google LLC diff --git a/vendor/cuelang.org/go/LICENSE b/vendor/cuelang.org/go/LICENSE new file mode 100644 index 000000000..d64569567 --- /dev/null +++ b/vendor/cuelang.org/go/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/cuelang.org/go/cue/ast/ast.go b/vendor/cuelang.org/go/cue/ast/ast.go new file mode 100644 index 000000000..4bab829da --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/ast.go @@ -0,0 +1,1081 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package ast declares the types used to represent syntax trees for CUE +// packages. +package ast // import "cuelang.org/go/cue/ast" + +import ( + "fmt" + "strings" + + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" +) + +// ---------------------------------------------------------------------------- +// Interfaces +// +// There are two main classes of nodes: expressions, clauses, and declaration +// nodes. The node names usually match the corresponding CUE spec production +// names to which they correspond. The node fields correspond to the individual +// parts of the respective productions. +// +// All nodes contain position information marking the beginning of the +// corresponding source text segment; it is accessible via the Pos accessor +// method. Nodes may contain additional position info for language constructs +// where comments may be found between parts of the construct (typically any +// larger, parenthesized subpart). That position information is needed to +// properly position comments when printing the construct. + +// A Node represents any node in the abstract syntax tree. +type Node interface { + Pos() token.Pos // position of first character belonging to the node + End() token.Pos // position of first character immediately after the node + + // pos reports the pointer to the position of first character belonging to + // the node or nil if there is no such position. + pos() *token.Pos + + // Deprecated: use ast.Comments + Comments() []*CommentGroup + + // Deprecated: use ast.AddComment + AddComment(*CommentGroup) + commentInfo() *comments +} + +// Name describes the type of n. +func Name(n Node) string { + s := fmt.Sprintf("%T", n) + return strings.ToLower(s[strings.Index(s, "ast.")+4:]) +} + +func getPos(n Node) token.Pos { + p := n.pos() + if p == nil { + return token.NoPos + } + return *p +} + +// SetPos sets a node to the given position, if possible. +func SetPos(n Node, p token.Pos) { + ptr := n.pos() + if ptr == nil { + return + } + *ptr = p +} + +// SetRelPos sets the relative position of a node without modifying its +// file position. Setting it to token.NoRelPos allows a node to adopt default +// formatting. +func SetRelPos(n Node, p token.RelPos) { + ptr := n.pos() + if ptr == nil { + return + } + pos := *ptr + *ptr = pos.WithRel(p) +} + +// An Expr is implemented by all expression nodes. +type Expr interface { + Node + declNode() // An expression can be used as a declaration. + exprNode() +} + +type expr struct{ decl } + +func (expr) exprNode() {} + +// A Decl node is implemented by all declarations. +type Decl interface { + Node + declNode() +} + +type decl struct{} + +func (decl) declNode() {} + +// A Label is any production that can be used as a LHS label. +type Label interface { + Node + labelNode() +} + +type label struct{} + +func (l label) labelNode() {} + +// Clause nodes are part of comprehensions. +type Clause interface { + Node + clauseNode() +} + +type clause struct{} + +func (clause) clauseNode() {} + +func (x *ForClause) clauseNode() {} +func (x *IfClause) clauseNode() {} +func (x *Alias) clauseNode() {} + +// Comments + +type comments struct { + groups *[]*CommentGroup +} + +func (c *comments) commentInfo() *comments { return c } + +func (c *comments) Comments() []*CommentGroup { + if c.groups == nil { + return []*CommentGroup{} + } + return *c.groups +} + +// // AddComment adds the given comments to the fields. +// // If line is true the comment is inserted at the preceding token. + +func (c *comments) AddComment(cg *CommentGroup) { + if cg == nil { + return + } + if c.groups == nil { + a := []*CommentGroup{cg} + c.groups = &a + return + } + *c.groups = append(*c.groups, cg) +} + +func (c *comments) SetComments(cgs []*CommentGroup) { + if c.groups == nil { + a := cgs + c.groups = &a + return + } + *c.groups = cgs +} + +// A Comment node represents a single //-style or /*-style comment. +type Comment struct { + Slash token.Pos // position of "/" starting the comment + Text string // comment text (excluding '\n' for //-style comments) +} + +func (c *Comment) Comments() []*CommentGroup { return nil } +func (c *Comment) AddComment(*CommentGroup) {} +func (c *Comment) commentInfo() *comments { return nil } + +func (c *Comment) Pos() token.Pos { return c.Slash } +func (c *Comment) pos() *token.Pos { return &c.Slash } +func (c *Comment) End() token.Pos { return c.Slash.Add(len(c.Text)) } + +// A CommentGroup represents a sequence of comments +// with no other tokens and no empty lines between. +type CommentGroup struct { + // TODO: remove and use the token position of the first comment. + Doc bool + Line bool // true if it is on the same line as the node's end pos. + + // Position indicates where a comment should be attached if a node has + // multiple tokens. 0 means before the first token, 1 means before the + // second, etc. For instance, for a field, the positions are: + // <0> Label <1> ":" <2> Expr <3> "," <4> + Position int8 + List []*Comment // len(List) > 0 + + decl +} + +func (g *CommentGroup) Pos() token.Pos { return getPos(g) } +func (g *CommentGroup) pos() *token.Pos { return g.List[0].pos() } +func (g *CommentGroup) End() token.Pos { return g.List[len(g.List)-1].End() } + +func (g *CommentGroup) Comments() []*CommentGroup { return nil } +func (g *CommentGroup) AddComment(*CommentGroup) {} +func (g *CommentGroup) commentInfo() *comments { return nil } + +func isWhitespace(ch byte) bool { return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' } + +func stripTrailingWhitespace(s string) string { + i := len(s) + for i > 0 && isWhitespace(s[i-1]) { + i-- + } + return s[0:i] +} + +// Text returns the text of the comment. +// Comment markers (//, /*, and */), the first space of a line comment, and +// leading and trailing empty lines are removed. Multiple empty lines are +// reduced to one, and trailing space on lines is trimmed. Unless the result +// is empty, it is newline-terminated. +func (g *CommentGroup) Text() string { + if g == nil { + return "" + } + comments := make([]string, len(g.List)) + for i, c := range g.List { + comments[i] = c.Text + } + + lines := make([]string, 0, 10) // most comments are less than 10 lines + for _, c := range comments { + // Remove comment markers. + // The parser has given us exactly the comment text. + switch c[1] { + case '/': + //-style comment (no newline at the end) + c = c[2:] + // strip first space - required for Example tests + if len(c) > 0 && c[0] == ' ' { + c = c[1:] + } + case '*': + /*-style comment */ + c = c[2 : len(c)-2] + } + + // Split on newlines. + cl := strings.Split(c, "\n") + + // Walk lines, stripping trailing white space and adding to list. + for _, l := range cl { + lines = append(lines, stripTrailingWhitespace(l)) + } + } + + // Remove leading blank lines; convert runs of + // interior blank lines to a single blank line. + n := 0 + for _, line := range lines { + if line != "" || n > 0 && lines[n-1] != "" { + lines[n] = line + n++ + } + } + lines = lines[0:n] + + // Add final "" entry to get trailing newline from Join. + if n > 0 && lines[n-1] != "" { + lines = append(lines, "") + } + + return strings.Join(lines, "\n") +} + +// An Attribute provides meta data about a field. +type Attribute struct { + At token.Pos + Text string // must be a valid attribute format. + + comments + decl +} + +func (a *Attribute) Pos() token.Pos { return a.At } +func (a *Attribute) pos() *token.Pos { return &a.At } +func (a *Attribute) End() token.Pos { return a.At.Add(len(a.Text)) } + +func (a *Attribute) Split() (key, body string) { + s := a.Text + p := strings.IndexByte(s, '(') + if p < 0 || !strings.HasPrefix(s, "@") || !strings.HasSuffix(s, ")") { + return "", "" + } + return a.Text[1:p], a.Text[p+1 : len(s)-1] +} + +// A Field represents a field declaration in a struct. +type Field struct { + Label Label // must have at least one element. + Optional token.Pos + + // No TokenPos: Value must be an StructLit with one field. + TokenPos token.Pos + Token token.Token // ':' or '::', ILLEGAL implies ':' + + Value Expr // the value associated with this field. + + Attrs []*Attribute + + comments + decl +} + +func (d *Field) Pos() token.Pos { return d.Label.Pos() } +func (d *Field) pos() *token.Pos { return d.Label.pos() } +func (d *Field) End() token.Pos { + if len(d.Attrs) > 0 { + return d.Attrs[len(d.Attrs)-1].End() + } + return d.Value.End() +} + +// TODO: make Alias a type of Field. This is possible now we have different +// separator types. + +// An Alias binds another field to the alias name in the current struct. +type Alias struct { + Ident *Ident // field name, always an Ident + Equal token.Pos // position of "=" + Expr Expr // An Ident or SelectorExpr + + comments + decl + expr + label +} + +func (a *Alias) Pos() token.Pos { return a.Ident.Pos() } +func (a *Alias) pos() *token.Pos { return a.Ident.pos() } +func (a *Alias) End() token.Pos { return a.Expr.End() } + +// A Comprehension node represents a comprehension declaration. +type Comprehension struct { + Clauses []Clause // There must be at least one clause. + Value Expr // Must be a struct TODO: change to Struct + + comments + decl + expr // TODO: only allow Comprehension in "Embedding" productions. +} + +func (x *Comprehension) Pos() token.Pos { return getPos(x) } +func (x *Comprehension) pos() *token.Pos { return x.Clauses[0].pos() } +func (x *Comprehension) End() token.Pos { + return x.Value.End() +} + +// ---------------------------------------------------------------------------- +// Expressions and types +// +// An expression is represented by a tree consisting of one +// or more of the following concrete expression nodes. + +// A BadExpr node is a placeholder for expressions containing +// syntax errors for which no correct expression nodes can be +// created. This is different from an ErrorExpr which represents +// an explicitly marked error in the source. +type BadExpr struct { + From, To token.Pos // position range of bad expression + + comments + expr +} + +// A BottomLit indicates an error. +type BottomLit struct { + Bottom token.Pos + + comments + expr +} + +// An Ident node represents an left-hand side identifier. +type Ident struct { + NamePos token.Pos // identifier position + + // This LHS path element may be an identifier. Possible forms: + // foo: a normal identifier + // "foo": JSON compatible + // : a template shorthand + Name string + + Scope Node // scope in which node was found or nil if referring directly + Node Node + + comments + label + expr +} + +// A TemplateLabel represents a field template declaration in a struct. +// +// Deprecated: use square bracket notation through ListLit. +type TemplateLabel struct { + Langle token.Pos + Ident *Ident + Rangle token.Pos + + comments + label +} + +// A BasicLit node represents a literal of basic type. +type BasicLit struct { + ValuePos token.Pos // literal position + Kind token.Token // INT, FLOAT, DURATION, or STRING + Value string // literal string; e.g. 42, 0x7f, 3.14, 1_234_567, 1e-9, 2.4i, 'a', '\x7f', "foo", or '\m\n\o' + + comments + expr + label +} + +// TODO: introduce and use NewLabel and NewBytes and perhaps NewText (in the +// later case NewString would return a string or bytes type) to distinguish from +// NewString. Consider how to pass indentation information. + +// NewString creates a new BasicLit with a string value without position. +// It quotes the given string. +// Useful for ASTs generated by code other than the CUE parser. +func NewString(str string) *BasicLit { + str = literal.String.Quote(str) + return &BasicLit{Kind: token.STRING, ValuePos: token.NoPos, Value: str} +} + +// NewNull creates a new BasicLit configured to be a null value. +// Useful for ASTs generated by code other than the CUE parser. +func NewNull() *BasicLit { + return &BasicLit{Kind: token.NULL, Value: "null"} +} + +// NewLit creates a new BasicLit with from a token type and string without +// position. +// Useful for ASTs generated by code other than the CUE parser. +func NewLit(tok token.Token, s string) *BasicLit { + return &BasicLit{Kind: tok, Value: s} +} + +// NewBool creates a new BasicLit with a bool value without position. +// Useful for ASTs generated by code other than the CUE parser. +func NewBool(b bool) *BasicLit { + x := &BasicLit{} + if b { + x.Kind = token.TRUE + x.Value = "true" + } else { + x.Kind = token.FALSE + x.Value = "false" + } + return x +} + +// TODO: +// - use CUE-specific quoting (hoist functionality in export) +// - NewBytes + +// A Interpolation node represents a string or bytes interpolation. +type Interpolation struct { + Elts []Expr // interleaving of strings and expressions. + + comments + expr + label +} + +// A StructLit node represents a literal struct. +type StructLit struct { + Lbrace token.Pos // position of "{" + Elts []Decl // list of elements; or nil + Rbrace token.Pos // position of "}" + + comments + expr +} + +// NewStruct creates a struct from the given fields. +// +// A field is either a *Field, an *Elipsis, *LetClause, a *CommentGroup, or a +// Label, optionally followed by a a token.OPTION to indicate the field is +// optional, optionally followed by a token.ISA to indicate the field is a +// definition followed by an expression for the field value. +// +// It will panic if a values not matching these patterns are given. Useful for +// ASTs generated by code other than the CUE parser. +func NewStruct(fields ...interface{}) *StructLit { + s := &StructLit{ + // Set default positions so that comment attachment is as expected. + Lbrace: token.NoSpace.Pos(), + } + for i := 0; i < len(fields); i++ { + var ( + label Label + optional = token.NoPos + tok = token.ILLEGAL + expr Expr + ) + + switch x := fields[i].(type) { + case *Field: + s.Elts = append(s.Elts, x) + continue + case *CommentGroup: + s.Elts = append(s.Elts, x) + continue + case *Ellipsis: + s.Elts = append(s.Elts, x) + continue + case *LetClause: + s.Elts = append(s.Elts, x) + continue + case *embedding: + s.Elts = append(s.Elts, (*EmbedDecl)(x)) + continue + case Label: + label = x + case string: + label = NewString(x) + default: + panic(fmt.Sprintf("unsupported label type %T", x)) + } + + inner: + for i++; i < len(fields); i++ { + switch x := (fields[i]).(type) { + case Expr: + expr = x + break inner + case token.Token: + switch x { + case token.ISA: + tok = x + case token.OPTION: + optional = token.Blank.Pos() + case token.COLON, token.ILLEGAL: + default: + panic(fmt.Sprintf("invalid token %s", x)) + } + default: + panic(fmt.Sprintf("unsupported expression type %T", x)) + } + } + if expr == nil { + panic("label not matched with expression") + } + s.Elts = append(s.Elts, &Field{ + Label: label, + Optional: optional, + Token: tok, + Value: expr, + }) + } + return s +} + +// Embed can be used in conjunction with NewStruct to embed values. +func Embed(x Expr) *embedding { + return (*embedding)(&EmbedDecl{Expr: x}) +} + +type embedding EmbedDecl + +// A ListLit node represents a literal list. +type ListLit struct { + Lbrack token.Pos // position of "[" + + // TODO: change to embedding or similar. + Elts []Expr // list of composite elements; or nil + Rbrack token.Pos // position of "]" + + comments + expr + label +} + +// NewList creates a list of Expressions. +// Useful for ASTs generated by code other than the CUE parser. +func NewList(exprs ...Expr) *ListLit { + return &ListLit{Elts: exprs} +} + +type Ellipsis struct { + Ellipsis token.Pos // open list if set + Type Expr // type for the remaining elements + + comments + decl + expr +} + +// A ListComprehension node represents as list comprehension. +type ListComprehension struct { + Lbrack token.Pos // position of "[" + Expr Expr + Clauses []Clause // Feed or Guard (TODO let) + Rbrack token.Pos // position of "]" + + comments + expr +} + +// A ForClause node represents a for clause in a comprehension. +type ForClause struct { + For token.Pos + Key *Ident // allow pattern matching? + // TODO: change to Comma + Colon token.Pos + Value *Ident // allow pattern matching? + In token.Pos + Source Expr + + comments + clause +} + +// A IfClause node represents an if guard clause in a comprehension. +type IfClause struct { + If token.Pos + Condition Expr + + comments + clause +} + +// A LetClause node represents a let clause in a comprehension. +type LetClause struct { + Let token.Pos + Ident *Ident + Equal token.Pos + Expr Expr + + comments + clause + decl +} + +// A ParenExpr node represents a parenthesized expression. +type ParenExpr struct { + Lparen token.Pos // position of "(" + X Expr // parenthesized expression + Rparen token.Pos // position of ")" + + comments + expr +} + +// A SelectorExpr node represents an expression followed by a selector. +type SelectorExpr struct { + X Expr // expression + Sel Label // field selector + + comments + expr +} + +// NewSel creates a sequence of selectors. +// Useful for ASTs generated by code other than the CUE parser. +func NewSel(x Expr, sel ...string) Expr { + for _, s := range sel { + x = &SelectorExpr{X: x, Sel: NewIdent(s)} + } + return x +} + +// An IndexExpr node represents an expression followed by an index. +type IndexExpr struct { + X Expr // expression + Lbrack token.Pos // position of "[" + Index Expr // index expression + Rbrack token.Pos // position of "]" + + comments + expr +} + +// An SliceExpr node represents an expression followed by slice indices. +type SliceExpr struct { + X Expr // expression + Lbrack token.Pos // position of "[" + Low Expr // begin of slice range; or nil + High Expr // end of slice range; or nil + Rbrack token.Pos // position of "]" + + comments + expr +} + +// A CallExpr node represents an expression followed by an argument list. +type CallExpr struct { + Fun Expr // function expression + Lparen token.Pos // position of "(" + Args []Expr // function arguments; or nil + Rparen token.Pos // position of ")" + + comments + expr +} + +// NewCall creates a new CallExpr. +// Useful for ASTs generated by code other than the CUE parser. +func NewCall(fun Expr, args ...Expr) *CallExpr { + return &CallExpr{Fun: fun, Args: args} +} + +// A UnaryExpr node represents a unary expression. +type UnaryExpr struct { + OpPos token.Pos // position of Op + Op token.Token // operator + X Expr // operand + + comments + expr +} + +// A BinaryExpr node represents a binary expression. +type BinaryExpr struct { + X Expr // left operand + OpPos token.Pos // position of Op + Op token.Token // operator + Y Expr // right operand + + comments + expr +} + +// NewBinExpr creates for list of expressions of length 2 or greater a chained +// binary expression of the form (((x1 op x2) op x3) ...). For lists of length +// 1 it returns the expression itself. It panics for empty lists. +// Useful for ASTs generated by code other than the CUE parser. +func NewBinExpr(op token.Token, operands ...Expr) Expr { + if len(operands) == 0 { + return nil + } + expr := operands[0] + for _, e := range operands[1:] { + expr = &BinaryExpr{X: expr, Op: op, Y: e} + } + return expr +} + +// token.Pos and End implementations for expression/type nodes. + +func (x *BadExpr) Pos() token.Pos { return x.From } +func (x *BadExpr) pos() *token.Pos { return &x.From } +func (x *Ident) Pos() token.Pos { return x.NamePos } +func (x *Ident) pos() *token.Pos { return &x.NamePos } +func (x *TemplateLabel) Pos() token.Pos { return x.Langle } +func (x *TemplateLabel) pos() *token.Pos { return &x.Langle } +func (x *BasicLit) Pos() token.Pos { return x.ValuePos } +func (x *BasicLit) pos() *token.Pos { return &x.ValuePos } +func (x *Interpolation) Pos() token.Pos { return x.Elts[0].Pos() } +func (x *Interpolation) pos() *token.Pos { return x.Elts[0].pos() } +func (x *StructLit) Pos() token.Pos { return getPos(x) } +func (x *StructLit) pos() *token.Pos { + if x.Lbrace == token.NoPos && len(x.Elts) > 0 { + return x.Elts[0].pos() + } + return &x.Lbrace +} + +func (x *ListLit) Pos() token.Pos { return x.Lbrack } +func (x *ListLit) pos() *token.Pos { return &x.Lbrack } +func (x *Ellipsis) Pos() token.Pos { return x.Ellipsis } +func (x *Ellipsis) pos() *token.Pos { return &x.Ellipsis } +func (x *ListComprehension) Pos() token.Pos { return x.Lbrack } +func (x *ListComprehension) pos() *token.Pos { return &x.Lbrack } +func (x *LetClause) Pos() token.Pos { return x.Let } +func (x *LetClause) pos() *token.Pos { return &x.Let } +func (x *ForClause) Pos() token.Pos { return x.For } +func (x *ForClause) pos() *token.Pos { return &x.For } +func (x *IfClause) Pos() token.Pos { return x.If } +func (x *IfClause) pos() *token.Pos { return &x.If } +func (x *ParenExpr) Pos() token.Pos { return x.Lparen } +func (x *ParenExpr) pos() *token.Pos { return &x.Lparen } +func (x *SelectorExpr) Pos() token.Pos { return x.X.Pos() } +func (x *SelectorExpr) pos() *token.Pos { return x.X.pos() } +func (x *IndexExpr) Pos() token.Pos { return x.X.Pos() } +func (x *IndexExpr) pos() *token.Pos { return x.X.pos() } +func (x *SliceExpr) Pos() token.Pos { return x.X.Pos() } +func (x *SliceExpr) pos() *token.Pos { return x.X.pos() } +func (x *CallExpr) Pos() token.Pos { return x.Fun.Pos() } +func (x *CallExpr) pos() *token.Pos { return x.Fun.pos() } +func (x *UnaryExpr) Pos() token.Pos { return x.OpPos } +func (x *UnaryExpr) pos() *token.Pos { return &x.OpPos } +func (x *BinaryExpr) Pos() token.Pos { return x.X.Pos() } +func (x *BinaryExpr) pos() *token.Pos { return x.X.pos() } +func (x *BottomLit) Pos() token.Pos { return x.Bottom } +func (x *BottomLit) pos() *token.Pos { return &x.Bottom } + +func (x *BadExpr) End() token.Pos { return x.To } +func (x *Ident) End() token.Pos { + return x.NamePos.Add(len(x.Name)) +} +func (x *TemplateLabel) End() token.Pos { return x.Rangle } +func (x *BasicLit) End() token.Pos { return x.ValuePos.Add(len(x.Value)) } + +func (x *Interpolation) End() token.Pos { return x.Elts[len(x.Elts)-1].Pos() } +func (x *StructLit) End() token.Pos { + if x.Rbrace == token.NoPos && len(x.Elts) > 0 { + return x.Elts[len(x.Elts)-1].Pos() + } + return x.Rbrace.Add(1) +} +func (x *ListLit) End() token.Pos { return x.Rbrack.Add(1) } +func (x *Ellipsis) End() token.Pos { + if x.Type != nil { + return x.Type.End() + } + return x.Ellipsis.Add(3) // len("...") +} +func (x *ListComprehension) End() token.Pos { return x.Rbrack } +func (x *LetClause) End() token.Pos { return x.Expr.End() } +func (x *ForClause) End() token.Pos { return x.Source.End() } +func (x *IfClause) End() token.Pos { return x.Condition.End() } +func (x *ParenExpr) End() token.Pos { return x.Rparen.Add(1) } +func (x *SelectorExpr) End() token.Pos { return x.Sel.End() } +func (x *IndexExpr) End() token.Pos { return x.Rbrack.Add(1) } +func (x *SliceExpr) End() token.Pos { return x.Rbrack.Add(1) } +func (x *CallExpr) End() token.Pos { return x.Rparen.Add(1) } +func (x *UnaryExpr) End() token.Pos { return x.X.End() } +func (x *BinaryExpr) End() token.Pos { return x.Y.End() } +func (x *BottomLit) End() token.Pos { return x.Bottom.Add(1) } + +// ---------------------------------------------------------------------------- +// Convenience functions for Idents + +// NewIdent creates a new Ident without position. +// Useful for ASTs generated by code other than the CUE parser. +func NewIdent(name string) *Ident { + return &Ident{token.NoPos, name, nil, nil, comments{}, label{}, expr{}} +} + +func (id *Ident) String() string { + if id != nil { + return id.Name + } + return "" +} + +// ---------------------------------------------------------------------------- +// Declarations + +// An ImportSpec node represents a single package import. +type ImportSpec struct { + Name *Ident // local package name (including "."); or nil + Path *BasicLit // import path + EndPos token.Pos // end of spec (overrides Path.Pos if nonzero) + + comments +} + +func (*ImportSpec) specNode() {} + +func NewImport(name *Ident, importPath string) *ImportSpec { + importPath = literal.String.Quote(importPath) + path := &BasicLit{Kind: token.STRING, Value: importPath} + return &ImportSpec{Name: name, Path: path} +} + +// Pos and End implementations for spec nodes. + +func (s *ImportSpec) Pos() token.Pos { return getPos(s) } +func (s *ImportSpec) pos() *token.Pos { + if s.Name != nil { + return s.Name.pos() + } + return s.Path.pos() +} + +// func (s *AliasSpec) Pos() token.Pos { return s.Name.Pos() } +// func (s *ValueSpec) Pos() token.Pos { return s.Names[0].Pos() } +// func (s *TypeSpec) Pos() token.Pos { return s.Name.Pos() } + +func (s *ImportSpec) End() token.Pos { + if s.EndPos != token.NoPos { + return s.EndPos + } + return s.Path.End() +} + +// A BadDecl node is a placeholder for declarations containing +// syntax errors for which no correct declaration nodes can be +// created. +type BadDecl struct { + From, To token.Pos // position range of bad declaration + + comments + decl +} + +// A ImportDecl node represents a series of import declarations. A valid +// Lparen position (Lparen.Line > 0) indicates a parenthesized declaration. +type ImportDecl struct { + Import token.Pos + Lparen token.Pos // position of '(', if any + Specs []*ImportSpec + Rparen token.Pos // position of ')', if any + + comments + decl +} + +type Spec interface { + Node + specNode() +} + +// An EmbedDecl node represents a single expression used as a declaration. +// The expressions in this declaration is what will be emitted as +// configuration output. +// +// An EmbedDecl may only appear at the top level. +type EmbedDecl struct { + Expr Expr + + comments + decl +} + +// Pos and End implementations for declaration nodes. + +func (d *BadDecl) Pos() token.Pos { return d.From } +func (d *BadDecl) pos() *token.Pos { return &d.From } +func (d *ImportDecl) Pos() token.Pos { return d.Import } +func (d *ImportDecl) pos() *token.Pos { return &d.Import } +func (d *EmbedDecl) Pos() token.Pos { return d.Expr.Pos() } +func (d *EmbedDecl) pos() *token.Pos { return d.Expr.pos() } + +func (d *BadDecl) End() token.Pos { return d.To } +func (d *ImportDecl) End() token.Pos { + if d.Rparen.IsValid() { + return d.Rparen.Add(1) + } + if len(d.Specs) == 0 { + return token.NoPos + } + return d.Specs[0].End() +} +func (d *EmbedDecl) End() token.Pos { return d.Expr.End() } + +// ---------------------------------------------------------------------------- +// Files and packages + +// A File node represents a Go source file. +// +// The Comments list contains all comments in the source file in order of +// appearance, including the comments that are pointed to from other nodes +// via Doc and Comment fields. +type File struct { + Filename string + Decls []Decl // top-level declarations; or nil + + Imports []*ImportSpec // imports in this file + Unresolved []*Ident // unresolved identifiers in this file + + comments +} + +// Preamble returns the declarations of the preamble. +func (f *File) Preamble() []Decl { + p := 0 +outer: + for i, d := range f.Decls { + switch d.(type) { + default: + break outer + + case *Package: + p = i + 1 + case *CommentGroup: + case *Attribute: + case *ImportDecl: + p = i + 1 + } + } + return f.Decls[:p] +} + +func (f *File) VisitImports(fn func(d *ImportDecl)) { + for _, d := range f.Decls { + switch x := d.(type) { + case *CommentGroup: + case *Package: + case *Attribute: + case *ImportDecl: + fn(x) + default: + return + } + } +} + +// PackageName returns the package name associated with this file or "" if no +// package is associated. +func (f *File) PackageName() string { + for _, d := range f.Decls { + switch x := d.(type) { + case *Package: + return x.Name.Name + case *CommentGroup, *Attribute: + default: + return "" + } + } + return "" +} + +func (f *File) Pos() token.Pos { + if len(f.Decls) > 0 { + return f.Decls[0].Pos() + } + if f.Filename != "" { + // TODO. Do something more principled and efficient. + return token.NewFile(f.Filename, -1, 1).Pos(0, 0) + } + return token.NoPos +} + +func (f *File) pos() *token.Pos { + if len(f.Decls) > 0 { + return f.Decls[0].pos() + } + if f.Filename != "" { + return nil + } + return nil +} + +func (f *File) End() token.Pos { + if n := len(f.Decls); n > 0 { + return f.Decls[n-1].End() + } + return token.NoPos +} + +// A Package represents a package clause. +type Package struct { + PackagePos token.Pos // position of "package" pseudo-keyword + Name *Ident // package name + + comments + decl +} + +func (p *Package) Pos() token.Pos { return getPos(p) } +func (p *Package) pos() *token.Pos { + if p.PackagePos != token.NoPos { + return &p.PackagePos + } + if p.Name != nil { + return p.Name.pos() + } + return nil +} + +func (p *Package) End() token.Pos { + if p.Name != nil { + return p.Name.End() + } + return token.NoPos +} diff --git a/vendor/cuelang.org/go/cue/ast/astutil/apply.go b/vendor/cuelang.org/go/cue/ast/astutil/apply.go new file mode 100644 index 000000000..fa0572d6c --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/astutil/apply.go @@ -0,0 +1,522 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package astutil + +import ( + "encoding/hex" + "fmt" + "hash/fnv" + "reflect" + + "cuelang.org/go/cue/ast" +) + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, and Index methods. +// +// The methods Replace, Delete, InsertBefore, and InsertAfter +// can be used to change the AST without disrupting Apply. +// Delete, InsertBefore, and InsertAfter are only defined for modifying +// a StructLit and will panic in any other context. +type Cursor interface { + // Node returns the current Node. + Node() ast.Node + + // Parent returns the parent of the current Node. + Parent() Cursor + + // Index reports the index >= 0 of the current Node in the slice of Nodes + // that contains it, or a value < 0 if the current Node is not part of a + // list. + Index() int + + // Import reports an opaque identifier that refers to the given package. It + // may only be called if the input to apply was an ast.File. If the import + // does not exist, it will be added. + Import(path string) *ast.Ident + + // Replace replaces the current Node with n. + // The replacement node is not walked by Apply. Comments of the old node + // are copied to the new node if it has not yet an comments associated + // with it. + Replace(n ast.Node) + + // Delete deletes the current Node from its containing struct. + // If the current Node is not part of a struct, Delete panics. + Delete() + + // InsertAfter inserts n after the current Node in its containing struct. + // If the current Node is not part of a struct, InsertAfter panics. + // Unless n is wrapped by ApplyRecursively, Apply does not walk n. + InsertAfter(n ast.Node) + + // InsertBefore inserts n before the current Node in its containing struct. + // If the current Node is not part of a struct, InsertBefore panics. + // Unless n is wrapped by ApplyRecursively, Apply does not walk n. + InsertBefore(n ast.Node) + + self() *cursor +} + +// ApplyRecursively indicates that a node inserted with InsertBefore, +// or InsertAfter should be processed recursively. +func ApplyRecursively(n ast.Node) ast.Node { + return recursive{n} +} + +type recursive struct { + ast.Node +} + +type info struct { + f *ast.File + current *declsCursor + + importPatch []*ast.Ident +} + +type cursor struct { + file *info + parent Cursor + node ast.Node + typ interface{} // the type of the node + index int // position of any of the sub types. + replaced bool +} + +func newCursor(parent Cursor, n ast.Node, typ interface{}) *cursor { + return &cursor{ + parent: parent, + typ: typ, + node: n, + index: -1, + } +} + +func fileInfo(c Cursor) (info *info) { + for ; c != nil; c = c.Parent() { + if i := c.self().file; i != nil { + return i + } + } + return nil +} + +func (c *cursor) self() *cursor { return c } +func (c *cursor) Parent() Cursor { return c.parent } +func (c *cursor) Index() int { return c.index } +func (c *cursor) Node() ast.Node { return c.node } + +func (c *cursor) Import(importPath string) *ast.Ident { + info := fileInfo(c) + if info == nil { + return nil + } + + name := importPathName(importPath) + + // TODO: come up with something much better. + // For instance, hoist the uniquer form cue/export.go to + // here and make export.go use this. + hash := fnv.New32() + name += hex.EncodeToString(hash.Sum([]byte(importPath)))[:6] + + spec := insertImport(&info.current.decls, &ast.ImportSpec{ + Name: ast.NewIdent(name), + Path: ast.NewString(importPath), + }) + + ident := &ast.Ident{Node: spec} // Name is set later. + info.importPatch = append(info.importPatch, ident) + + ident.Name = name + + return ident +} + +func (c *cursor) Replace(n ast.Node) { + // panic if the value cannot convert to the original type. + reflect.ValueOf(n).Convert(reflect.TypeOf(c.typ).Elem()) + if ast.Comments(n) != nil { + CopyComments(n, c.node) + } + if r, ok := n.(recursive); ok { + n = r.Node + } else { + c.replaced = true + } + c.node = n +} + +func (c *cursor) InsertAfter(n ast.Node) { panic("unsupported") } +func (c *cursor) InsertBefore(n ast.Node) { panic("unsupported") } +func (c *cursor) Delete() { panic("unsupported") } + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +// +// Only fields that refer to AST nodes are considered children; +// i.e., token.Pos, Scopes, Objects, and fields of basic types +// (strings, etc.) are ignored. +// +// Children are traversed in the order in which they appear in the +// respective node's struct definition. +// +func Apply(node ast.Node, before, after func(Cursor) bool) ast.Node { + apply(&applier{before: before, after: after}, nil, &node) + return node +} + +// A applyVisitor's before method is invoked for each node encountered by Walk. +// If the result applyVisitor w is true, Walk visits each of the children +// of node with the applyVisitor w, followed by a call of w.After. +type applyVisitor interface { + Before(Cursor) applyVisitor + After(Cursor) bool +} + +// Helper functions for common node lists. They may be empty. + +func applyExprList(v applyVisitor, parent Cursor, ptr interface{}, list []ast.Expr) { + c := newCursor(parent, nil, nil) + for i, x := range list { + c.index = i + c.node = x + c.typ = &list[i] + applyCursor(v, c) + if x != c.node { + list[i] = c.node.(ast.Expr) + } + } +} + +type declsCursor struct { + *cursor + decls, after, process []ast.Decl + delete bool +} + +func (c *declsCursor) InsertAfter(n ast.Node) { + if r, ok := n.(recursive); ok { + n = r.Node + c.process = append(c.process, n.(ast.Decl)) + } + c.after = append(c.after, n.(ast.Decl)) +} + +func (c *declsCursor) InsertBefore(n ast.Node) { + if r, ok := n.(recursive); ok { + n = r.Node + c.process = append(c.process, n.(ast.Decl)) + } + c.decls = append(c.decls, n.(ast.Decl)) +} + +func (c *declsCursor) Delete() { c.delete = true } + +func applyDeclList(v applyVisitor, parent Cursor, list []ast.Decl) []ast.Decl { + c := &declsCursor{ + cursor: newCursor(parent, nil, nil), + decls: make([]ast.Decl, 0, len(list)), + } + if file, ok := parent.Node().(*ast.File); ok { + c.cursor.file = &info{f: file, current: c} + } + for i, x := range list { + c.node = x + c.typ = &list[i] + applyCursor(v, c) + if !c.delete { + c.decls = append(c.decls, c.node.(ast.Decl)) + } + c.delete = false + for i := 0; i < len(c.process); i++ { + x := c.process[i] + c.node = x + c.typ = &c.process[i] + applyCursor(v, c) + if c.delete { + panic("cannot delete a node that was added with InsertBefore or InsertAfter") + } + } + c.decls = append(c.decls, c.after...) + c.after = c.after[:0] + c.process = c.process[:0] + } + + // TODO: ultimately, programmatically linked nodes have to be resolved + // at the end. + // if info := c.cursor.file; info != nil { + // done := map[*ast.ImportSpec]bool{} + // for _, ident := range info.importPatch { + // spec := ident.Node.(*ast.ImportSpec) + // if done[spec] { + // continue + // } + // done[spec] = true + + // path, _ := strconv.Unquote(spec.Path) + + // ident.Name = + // } + // } + + return c.decls +} + +func apply(v applyVisitor, parent Cursor, nodePtr interface{}) { + res := reflect.Indirect(reflect.ValueOf(nodePtr)) + n := res.Interface() + node := n.(ast.Node) + c := newCursor(parent, node, nodePtr) + applyCursor(v, c) + if node != c.node { + res.Set(reflect.ValueOf(c.node)) + } +} + +// applyCursor traverses an AST in depth-first order: It starts by calling +// v.Visit(node); node must not be nil. If the visitor w returned by +// v.Visit(node) is not nil, apply is invoked recursively with visitor +// w for each of the non-nil children of node, followed by a call of +// w.Visit(nil). +// +func applyCursor(v applyVisitor, c Cursor) { + if v = v.Before(c); v == nil { + return + } + + node := c.Node() + + // TODO: record the comment groups and interleave with the values like for + // parsing and printing? + comments := node.Comments() + for _, cm := range comments { + apply(v, c, &cm) + } + + // apply children + // (the order of the cases matches the order + // of the corresponding node types in go) + switch n := node.(type) { + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + for _, cg := range n.List { + apply(v, c, &cg) + } + + case *ast.Attribute: + // nothing to do + + case *ast.Field: + apply(v, c, &n.Label) + if n.Value != nil { + apply(v, c, &n.Value) + } + for _, a := range n.Attrs { + apply(v, c, &a) + } + + case *ast.StructLit: + n.Elts = applyDeclList(v, c, n.Elts) + + // Expressions + case *ast.BottomLit, *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.TemplateLabel: + apply(v, c, &n.Ident) + + case *ast.Interpolation: + applyExprList(v, c, &n, n.Elts) + + case *ast.ListLit: + applyExprList(v, c, &n, n.Elts) + + case *ast.Ellipsis: + if n.Type != nil { + apply(v, c, &n.Type) + } + + case *ast.ParenExpr: + apply(v, c, &n.X) + + case *ast.SelectorExpr: + apply(v, c, &n.X) + apply(v, c, &n.Sel) + + case *ast.IndexExpr: + apply(v, c, &n.X) + apply(v, c, &n.Index) + + case *ast.SliceExpr: + apply(v, c, &n.X) + if n.Low != nil { + apply(v, c, &n.Low) + } + if n.High != nil { + apply(v, c, &n.High) + } + + case *ast.CallExpr: + apply(v, c, &n.Fun) + applyExprList(v, c, &n, n.Args) + + case *ast.UnaryExpr: + apply(v, c, &n.X) + + case *ast.BinaryExpr: + apply(v, c, &n.X) + apply(v, c, &n.Y) + + // Declarations + case *ast.ImportSpec: + if n.Name != nil { + apply(v, c, &n.Name) + } + apply(v, c, &n.Path) + + case *ast.BadDecl: + // nothing to do + + case *ast.ImportDecl: + for _, s := range n.Specs { + apply(v, c, &s) + } + + case *ast.EmbedDecl: + apply(v, c, &n.Expr) + + case *ast.LetClause: + apply(v, c, &n.Ident) + apply(v, c, &n.Expr) + + case *ast.Alias: + apply(v, c, &n.Ident) + apply(v, c, &n.Expr) + + case *ast.Comprehension: + clauses := n.Clauses + for i := range n.Clauses { + apply(v, c, &clauses[i]) + } + apply(v, c, &n.Value) + + // Files and packages + case *ast.File: + n.Decls = applyDeclList(v, c, n.Decls) + + case *ast.Package: + apply(v, c, &n.Name) + + case *ast.ListComprehension: + apply(v, c, &n.Expr) + clauses := n.Clauses + for i := range clauses { + apply(v, c, &clauses[i]) + } + + case *ast.ForClause: + if n.Key != nil { + apply(v, c, &n.Key) + } + apply(v, c, &n.Value) + apply(v, c, &n.Source) + + case *ast.IfClause: + apply(v, c, &n.Condition) + + default: + panic(fmt.Sprintf("Walk: unexpected node type %T", n)) + } + + v.After(c) +} + +type applier struct { + before func(Cursor) bool + after func(Cursor) bool + + commentStack []commentFrame + current commentFrame +} + +type commentFrame struct { + cg []*ast.CommentGroup + pos int8 +} + +func (f *applier) Before(c Cursor) applyVisitor { + node := c.Node() + if f.before == nil || (f.before(c) && node == c.Node()) { + f.commentStack = append(f.commentStack, f.current) + f.current = commentFrame{cg: node.Comments()} + f.visitComments(c, f.current.pos) + return f + } + return nil +} + +func (f *applier) After(c Cursor) bool { + f.visitComments(c, 127) + p := len(f.commentStack) - 1 + f.current = f.commentStack[p] + f.commentStack = f.commentStack[:p] + f.current.pos++ + if f.after != nil { + f.after(c) + } + return true +} + +func (f *applier) visitComments(p Cursor, pos int8) { + c := &f.current + for i := 0; i < len(c.cg); i++ { + cg := c.cg[i] + if cg.Position == pos { + continue + } + cursor := newCursor(p, cg, cg) + if f.before == nil || (f.before(cursor) && !cursor.replaced) { + for j, c := range cg.List { + cursor := newCursor(p, c, &c) + if f.before == nil || (f.before(cursor) && !cursor.replaced) { + if f.after != nil { + f.after(cursor) + } + } + cg.List[j] = cursor.node.(*ast.Comment) + } + if f.after != nil { + f.after(cursor) + } + } + c.cg[i] = cursor.node.(*ast.CommentGroup) + } +} diff --git a/vendor/cuelang.org/go/cue/ast/astutil/file.go b/vendor/cuelang.org/go/cue/ast/astutil/file.go new file mode 100644 index 000000000..e060b7119 --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/astutil/file.go @@ -0,0 +1,38 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package astutil + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +// ToFile converts an expression to a File. It will create an import section for +// any of the identifiers in x that refer to an import and will unshadow +// references as appropriate. +func ToFile(x ast.Expr) (*ast.File, error) { + var f *ast.File + if st, ok := x.(*ast.StructLit); ok { + f = &ast.File{Decls: st.Elts} + } else { + ast.SetRelPos(x, token.NoSpace) + f = &ast.File{Decls: []ast.Decl{&ast.EmbedDecl{Expr: x}}} + } + + if err := Sanitize(f); err != nil { + return nil, err + } + return f, nil +} diff --git a/vendor/cuelang.org/go/cue/ast/astutil/resolve.go b/vendor/cuelang.org/go/cue/ast/astutil/resolve.go new file mode 100644 index 000000000..00f33c96c --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/astutil/resolve.go @@ -0,0 +1,458 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file implements scopes and the objects they contain. + +package astutil + +import ( + "bytes" + "fmt" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +// An ErrFunc processes errors. +type ErrFunc func(pos token.Pos, msg string, args ...interface{}) + +// TODO: future development +// +// Resolution currently assigns values along the table below. This is based on +// Go's resolver and is not quite convenient for CUE's purposes. For one, CUE +// allows manually setting resolution and than call astutil.Sanitize to +// normalize the ast.File. Manually assigning resolutions according to the +// below table is rather tedious though. +// +// Instead of using the Scope and Node fields in identifiers, we suggest the +// following assignments: +// +// Reference Node // an Decl or Clause +// Ident *Ident // The identifier in References (optional) +// +// References always refers to the direct element in the scope in which the +// identifier occurs, not the final value, so: *Field, *LetClause, *ForClause, +// etc. In case Ident is defined, it must be the same pointer as the +// referencing identifier. In case it is not defined, the Name of the +// referencing identifier can be used to locate the proper identifier in the +// referenced node. +// +// The Scope field in the original design then loses its function. +// +// Type of reference Scope Node +// Let Clause File/Struct LetClause +// Alias declaration File/Struct Alias (deprecated) +// Illegal Reference File/Struct +// Fields +// X in X: y File/Struct Expr (y) +// X in X=x: y File/Struct Field +// X in X="\(x)": y File/Struct Field +// X in [X=x]: y Field Expr (x) +// X in X=[x]: y Field Field +// +// for k, v in Field ForClause +// +// Template Field Template +// Fields inside lambda +// Label Field Expr +// Value Field Field +// Pkg nil ImportSpec + +// Resolve resolves all identifiers in a file. Unresolved identifiers are +// recorded in Unresolved. It will not overwrite already resolved values. +func Resolve(f *ast.File, errFn ErrFunc) { + walk(&scope{errFn: errFn, identFn: resolveIdent}, f) +} + +// Resolve resolves all identifiers in an expression. +// It will not overwrite already resolved values. +func ResolveExpr(e ast.Expr, errFn ErrFunc) { + f := &ast.File{} + walk(&scope{file: f, errFn: errFn, identFn: resolveIdent}, e) +} + +// A Scope maintains the set of named language entities declared +// in the scope and a link to the immediately surrounding (outer) +// scope. +// +type scope struct { + file *ast.File + outer *scope + node ast.Node + index map[string]entry + inField bool + + identFn func(s *scope, n *ast.Ident) bool + nameFn func(name string) + errFn func(p token.Pos, msg string, args ...interface{}) +} + +type entry struct { + node ast.Node + link ast.Node // Alias, LetClause, or Field +} + +func newScope(f *ast.File, outer *scope, node ast.Node, decls []ast.Decl) *scope { + const n = 4 // initial scope capacity + s := &scope{ + file: f, + outer: outer, + node: node, + index: make(map[string]entry, n), + identFn: outer.identFn, + nameFn: outer.nameFn, + errFn: outer.errFn, + } + for _, d := range decls { + switch x := d.(type) { + case *ast.Field: + label := x.Label + + if a, ok := x.Label.(*ast.Alias); ok { + // TODO(legacy): use name := a.Ident.Name once quoted + // identifiers are no longer supported. + if name, _, _ := ast.LabelName(a.Ident); name != "" { + s.insert(name, x, a) + } + label, _ = a.Expr.(ast.Label) + } + + switch y := label.(type) { + // TODO: support *ast.ParenExpr? + case *ast.ListLit: + // In this case, it really should be scoped like a template. + if len(y.Elts) != 1 { + break + } + if a, ok := y.Elts[0].(*ast.Alias); ok { + s.insert(a.Ident.Name, x, a) + } + } + + // default: + name, isIdent, _ := ast.LabelName(label) + if isIdent { + s.insert(name, x.Value, x) + } + case *ast.LetClause: + name, isIdent, _ := ast.LabelName(x.Ident) + if isIdent { + s.insert(name, x, x) + } + case *ast.Alias: + name, isIdent, _ := ast.LabelName(x.Ident) + if isIdent { + s.insert(name, x, x) + } + case *ast.ImportDecl: + for _, spec := range x.Specs { + info, _ := ParseImportSpec(spec) + s.insert(info.Ident, spec, spec) + } + } + } + return s +} + +func (s *scope) isLet(n ast.Node) bool { + if _, ok := s.node.(*ast.Field); ok { + return true + } + switch n.(type) { + case *ast.LetClause, *ast.Alias, *ast.Field: + return true + } + return false +} + +func (s *scope) mustBeUnique(n ast.Node) bool { + if _, ok := s.node.(*ast.Field); ok { + return true + } + switch n.(type) { + // TODO: add *ast.ImportSpec when some implementations are moved over to + // Sanitize. + case *ast.ImportSpec, *ast.LetClause, *ast.Alias, *ast.Field: + return true + } + return false +} + +func (s *scope) insert(name string, n, link ast.Node) { + if name == "" { + return + } + if s.nameFn != nil { + s.nameFn(name) + } + // TODO: record both positions. + if outer, _, existing := s.lookup(name); existing.node != nil { + if s.isLet(n) != outer.isLet(existing.node) { + s.errFn(n.Pos(), "cannot have both alias and field with name %q in same scope", name) + return + } else if s.mustBeUnique(n) || outer.mustBeUnique(existing.node) { + if outer == s { + if _, ok := existing.node.(*ast.ImportSpec); ok { + return + // TODO: + s.errFn(n.Pos(), "conflicting declaration %s\n"+ + "\tprevious declaration at %s", + name, existing.node.Pos()) + } else { + s.errFn(n.Pos(), "alias %q redeclared in same scope", name) + } + return + } + // TODO: Should we disallow shadowing of aliases? + // This was the case, but it complicates the transition to + // square brackets. The spec says allow it. + // s.errFn(n.Pos(), "alias %q already declared in enclosing scope", name) + } + } + s.index[name] = entry{node: n, link: link} +} + +func (s *scope) resolveScope(name string, node ast.Node) (scope ast.Node, e entry, ok bool) { + last := s + for s != nil { + if n, ok := s.index[name]; ok && node == n.node { + if last.node == n.node { + return nil, n, true + } + return s.node, n, true + } + s, last = s.outer, s + } + return nil, entry{}, false +} + +func (s *scope) lookup(name string) (p *scope, obj ast.Node, node entry) { + // TODO(#152): consider returning nil for obj if it is a reference to root. + // last := s + for s != nil { + if n, ok := s.index[name]; ok { + if _, ok := n.node.(*ast.ImportSpec); ok { + return s, nil, n + } + return s, s.node, n + } + // s, last = s.outer, s + s = s.outer + } + return nil, nil, entry{} +} + +func (s *scope) After(n ast.Node) {} +func (s *scope) Before(n ast.Node) (w visitor) { + switch x := n.(type) { + case *ast.File: + s := newScope(x, s, x, x.Decls) + // Support imports. + for _, d := range x.Decls { + walk(s, d) + } + return nil + + case *ast.StructLit: + return newScope(s.file, s, x, x.Elts) + + case *ast.Comprehension: + s = scopeClauses(s, x.Clauses) + + case *ast.ListComprehension: + s = scopeClauses(s, x.Clauses) + + case *ast.Field: + var n ast.Node = x.Label + alias, ok := x.Label.(*ast.Alias) + if ok { + n = alias.Expr + } + + switch label := n.(type) { + case *ast.Interpolation: + walk(s, label) + + case *ast.ListLit: + if len(label.Elts) != 1 { + break + } + s = newScope(s.file, s, x, nil) + if alias != nil { + if name, _, _ := ast.LabelName(alias.Ident); name != "" { + s.insert(name, x, alias) + } + } + + expr := label.Elts[0] + + if a, ok := expr.(*ast.Alias); ok { + expr = a.Expr + + // Add to current scope, instead of the value's, and allow + // references to bind to these illegally. + // We need this kind of administration anyway to detect + // illegal name clashes, and it allows giving better error + // messages. This puts the burdon on clients of this library + // to detect illegal usage, though. + name, err := ast.ParseIdent(a.Ident) + if err == nil { + s.insert(name, a.Expr, a) + } + } + + ast.Walk(expr, nil, func(n ast.Node) { + if x, ok := n.(*ast.Ident); ok { + for s := s; s != nil && !s.inField; s = s.outer { + if _, ok := s.index[x.Name]; ok { + s.errFn(n.Pos(), + "reference %q in label expression refers to field against which it would be matched", x.Name) + } + } + } + }) + walk(s, expr) + + case *ast.TemplateLabel: + s = newScope(s.file, s, x, nil) + name, err := ast.ParseIdent(label.Ident) + if err == nil { + s.insert(name, x.Label, x) // Field used for entire lambda. + } + } + + if x.Value != nil { + s.inField = true + walk(s, x.Value) + s.inField = false + } + + return nil + + case *ast.LetClause: + // Disallow referring to the current LHS name. + name := x.Ident.Name + saved := s.index[name] + delete(s.index, name) // The same name may still appear in another scope + + if x.Expr != nil { + walk(s, x.Expr) + } + s.index[name] = saved + return nil + + case *ast.Alias: + // Disallow referring to the current LHS name. + name := x.Ident.Name + saved := s.index[name] + delete(s.index, name) // The same name may still appear in another scope + + if x.Expr != nil { + walk(s, x.Expr) + } + s.index[name] = saved + return nil + + case *ast.ImportSpec: + return nil + + case *ast.Attribute: + // TODO: tokenize attributes, resolve identifiers and store the ones + // that resolve in a list. + + case *ast.SelectorExpr: + walk(s, x.X) + return nil + + case *ast.Ident: + if s.identFn(s, x) { + return nil + } + } + return s +} + +func resolveIdent(s *scope, x *ast.Ident) bool { + name, ok, _ := ast.LabelName(x) + if !ok { + // TODO: generate error + return false + } + if _, obj, node := s.lookup(name); node.node != nil { + switch { + case x.Node == nil: + x.Node = node.node + x.Scope = obj + + case x.Node == node.node: + x.Scope = obj + + default: // x.Node != node + scope, _, ok := s.resolveScope(name, x.Node) + if !ok { + s.file.Unresolved = append(s.file.Unresolved, x) + } + x.Scope = scope + } + } else { + s.file.Unresolved = append(s.file.Unresolved, x) + } + return true +} + +func scopeClauses(s *scope, clauses []ast.Clause) *scope { + for _, c := range clauses { + switch x := c.(type) { + case *ast.ForClause: + walk(s, x.Source) + s = newScope(s.file, s, x, nil) + if x.Key != nil { + name, err := ast.ParseIdent(x.Key) + if err == nil { + s.insert(name, x.Key, x) + } + } + name, err := ast.ParseIdent(x.Value) + if err == nil { + s.insert(name, x.Value, x) + } + + case *ast.LetClause: + walk(s, x.Expr) + s = newScope(s.file, s, x, nil) + name, err := ast.ParseIdent(x.Ident) + if err == nil { + s.insert(name, x.Ident, x) + } + + default: + walk(s, c) + } + } + return s +} + +// Debugging support +func (s *scope) String() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, "scope %p {", s) + if s != nil && len(s.index) > 0 { + fmt.Fprintln(&buf) + for name := range s.index { + fmt.Fprintf(&buf, "\t%v\n", name) + } + } + fmt.Fprintf(&buf, "}\n") + return buf.String() +} diff --git a/vendor/cuelang.org/go/cue/ast/astutil/sanitize.go b/vendor/cuelang.org/go/cue/ast/astutil/sanitize.go new file mode 100644 index 000000000..061a46b6f --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/astutil/sanitize.go @@ -0,0 +1,354 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package astutil + +import ( + "fmt" + "math/rand" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// TODO: +// - handle comprehensions +// - change field from foo to "foo" if it isn't referenced, rather than +// relying on introducing a unique alias. +// - change a predeclared identifier reference to use the __ident form, +// instead of introducing an alias. + +// Sanitize rewrites File f in place to be well formed after automated +// construction of an AST. +// +// Rewrites: +// - auto inserts imports associated with Idents +// - unshadows imports associated with idents +// - unshadows references for identifiers that were already resolved. +// +func Sanitize(f *ast.File) error { + z := &sanitizer{ + file: f, + rand: rand.New(rand.NewSource(808)), + + names: map[string]bool{}, + importMap: map[string]*ast.ImportSpec{}, + referenced: map[ast.Node]bool{}, + altMap: map[ast.Node]string{}, + } + + // Gather all names. + walk(&scope{ + errFn: z.errf, + nameFn: z.addName, + identFn: z.markUsed, + }, f) + if z.errs != nil { + return z.errs + } + + // Add imports and unshadow. + s := &scope{ + file: f, + errFn: z.errf, + identFn: z.handleIdent, + index: make(map[string]entry), + } + z.fileScope = s + walk(s, f) + if z.errs != nil { + return z.errs + } + + z.cleanImports() + + return z.errs +} + +type sanitizer struct { + file *ast.File + fileScope *scope + + rand *rand.Rand + + // names is all used names. Can be used to determine a new unique name. + names map[string]bool + referenced map[ast.Node]bool + + // altMap defines an alternative name for an existing entry link (a field, + // alias or let clause). As new names are globally unique, they can be + // safely reused for any unshadowing. + altMap map[ast.Node]string + importMap map[string]*ast.ImportSpec + + errs errors.Error +} + +func (z *sanitizer) errf(p token.Pos, msg string, args ...interface{}) { + z.errs = errors.Append(z.errs, errors.Newf(p, msg, args...)) +} + +func (z *sanitizer) addName(name string) { + z.names[name] = true +} + +func (z *sanitizer) addRename(base string, n ast.Node) (alt string, new bool) { + if name, ok := z.altMap[n]; ok { + return name, false + } + + name := z.uniqueName(base, false) + z.altMap[n] = name + return name, true +} + +func (z *sanitizer) unshadow(parent ast.Node, base string, link ast.Node) string { + name, ok := z.altMap[link] + if !ok { + name = z.uniqueName(base, false) + z.altMap[link] = name + + // Insert new let clause at top to refer to a declaration in possible + // other files. + let := &ast.LetClause{ + Ident: ast.NewIdent(name), + Expr: ast.NewIdent(base), + } + + var decls *[]ast.Decl + + switch x := parent.(type) { + case *ast.File: + decls = &x.Decls + case *ast.StructLit: + decls = &x.Elts + default: + panic(fmt.Sprintf("impossible scope type %T", parent)) + } + + i := 0 + for ; i < len(*decls); i++ { + if (*decls)[i] == link { + break + } + if f, ok := (*decls)[i].(*ast.Field); ok && f.Label == link { + break + } + } + + if i > 0 { + ast.SetRelPos(let, token.NewSection) + } + + a := append((*decls)[:i:i], let) + *decls = append(a, (*decls)[i:]...) + } + return name +} + +func (z *sanitizer) markUsed(s *scope, n *ast.Ident) bool { + if n.Node != nil { + return false + } + _, _, entry := s.lookup(n.String()) + z.referenced[entry.link] = true + return true +} + +func (z *sanitizer) cleanImports() { + z.file.VisitImports(func(d *ast.ImportDecl) { + k := 0 + for _, s := range d.Specs { + if _, ok := z.referenced[s]; ok { + d.Specs[k] = s + k++ + } + } + d.Specs = d.Specs[:k] + }) +} + +func (z *sanitizer) handleIdent(s *scope, n *ast.Ident) bool { + if n.Node == nil { + return true + } + + _, _, node := s.lookup(n.Name) + if node.node == nil { + spec, ok := n.Node.(*ast.ImportSpec) + if !ok { + // Clear node. A reference may have been moved to a different + // file. If not, it should be an error. + n.Node = nil + n.Scope = nil + return false + } + + _ = z.addImport(spec) + info, _ := ParseImportSpec(spec) + z.fileScope.insert(info.Ident, spec, spec) + return true + } + + if x, ok := n.Node.(*ast.ImportSpec); ok { + xi, _ := ParseImportSpec(x) + + if y, ok := node.node.(*ast.ImportSpec); ok { + yi, _ := ParseImportSpec(y) + if xi.ID == yi.ID { // name must be identical as a result of lookup. + z.referenced[y] = true + n.Node = x + n.Scope = nil + return false + } + } + + // Either: + // - the import is shadowed + // - an incorrect import is matched + // In all cases we need to create a new import with a unique name or + // use a previously created one. + spec := z.importMap[xi.ID] + if spec == nil { + name := z.uniqueName(xi.Ident, false) + spec = z.addImport(&ast.ImportSpec{ + Name: ast.NewIdent(name), + Path: x.Path, + }) + z.importMap[xi.ID] = spec + z.fileScope.insert(name, spec, spec) + } + + info, _ := ParseImportSpec(spec) + // TODO(apply): replace n itself directly + n.Name = info.Ident + n.Node = spec + n.Scope = nil + return false + } + + if node.node == n.Node { + return true + } + + // n.Node != node and are both not nil and n.Node is not an ImportSpec. + // This means that either n.Node is illegal or shadowed. + // Look for the scope in which n.Node is defined and add an alias or let. + + parent, e, ok := s.resolveScope(n.Name, n.Node) + if !ok { + // The node isn't within a legal scope within this file. It may only + // possibly shadow a value of another file. We add a top-level let + // clause to refer to this value. + + // TODO(apply): better would be to have resolve use Apply so that we can replace + // the entire ast.Ident, rather than modifying it. + // TODO: resolve to new node or rely on another pass of Resolve? + n.Name = z.unshadow(z.file, n.Name, n) + n.Node = nil + n.Scope = nil + + return false + } + + var name string + // var isNew bool + switch x := e.link.(type) { + case *ast.Field: // referring to regular field. + name, ok = z.altMap[x] + if ok { + break + } + // If this field has not alias, introduce one with a unique name. + // If this has an alias, also introduce a new name. There is a + // possibility that the alias can be used, but it is easier to just + // assign a new name, assuming this case is rather rare. + switch y := x.Label.(type) { + case *ast.Alias: + name = z.unshadow(parent, y.Ident.Name, y) + + case *ast.Ident: + var isNew bool + name, isNew = z.addRename(y.Name, x) + if isNew { + ident := ast.NewIdent(name) + // Move formatting and comments from original label to alias + // identifier. + CopyMeta(ident, y) + ast.SetRelPos(y, token.NoRelPos) + ast.SetComments(y, nil) + x.Label = &ast.Alias{Ident: ident, Expr: y} + } + + default: + // This is an illegal reference. + return false + } + + case *ast.LetClause: + name = z.unshadow(parent, x.Ident.Name, x) + + case *ast.Alias: + name = z.unshadow(parent, x.Ident.Name, x) + + default: + panic(fmt.Sprintf("unexpected link type %T", e.link)) + } + + // TODO(apply): better would be to have resolve use Apply so that we can replace + // the entire ast.Ident, rather than modifying it. + n.Name = name + n.Node = nil + n.Scope = nil + + return true +} + +// uniqueName returns a new name globally unique name of the form +// base_XX ... base_XXXXXXXXXXXXXX or _base or the same pattern with a '_' +// prefix if hidden is true. +// +// It prefers short extensions over large ones, while ensuring the likelihood of +// fast termination is high. There are at least two digits to make it visually +// clearer this concerns a generated number. +// +func (z *sanitizer) uniqueName(base string, hidden bool) string { + if hidden && !strings.HasPrefix(base, "_") { + base = "_" + base + if !z.names[base] { + z.names[base] = true + return base + } + } + + // TODO(go1.13): const mask = 0xff_ffff_ffff_ffff + const mask = 0xffffffffffffff // max bits; stay clear of int64 overflow + const shift = 4 // rate of growth + for n := int64(0x10); ; n = int64(mask&((n< 0 { + name = name[p+1:] + } + return name +} + +// ImportInfo describes the information contained in an ImportSpec. +type ImportInfo struct { + Ident string // identifier used to refer to the import + PkgName string // name of the package + ID string // full import path, including the name + Dir string // import path, excluding the name +} + +// ParseImportSpec returns the name and full path of an ImportSpec. +func ParseImportSpec(spec *ast.ImportSpec) (info ImportInfo, err error) { + str, err := strconv.Unquote(spec.Path.Value) + if err != nil { + return info, err + } + + info.ID = str + + if p := strings.LastIndexByte(str, ':'); p > 0 { + info.Dir = str[:p] + info.PkgName = str[p+1:] + } else { + info.Dir = str + info.PkgName = path.Base(str) + } + + if spec.Name != nil { + info.Ident = spec.Name.Name + } else { + info.Ident = info.PkgName + } + + return info, nil +} + +// CopyComments associates comments of one node with another. +// It may change the relative position of comments. +func CopyComments(to, from ast.Node) { + if from == nil { + return + } + ast.SetComments(to, from.Comments()) +} + +// CopyPosition sets the position of one node to another. +func CopyPosition(to, from ast.Node) { + if from == nil { + return + } + ast.SetPos(to, from.Pos()) +} + +// CopyMeta copies comments and position information from one node to another. +// It returns the destination node. +func CopyMeta(to, from ast.Node) ast.Node { + if from == nil { + return to + } + ast.SetComments(to, from.Comments()) + ast.SetPos(to, from.Pos()) + return to +} + +// insertImport looks up an existing import with the given name and path or will +// add spec if it doesn't exist. It returns a spec in decls matching spec. +func insertImport(decls *[]ast.Decl, spec *ast.ImportSpec) *ast.ImportSpec { + x, _ := ParseImportSpec(spec) + + a := *decls + + var imports *ast.ImportDecl + var orig *ast.ImportSpec + + p := 0 +outer: + for i := 0; i < len(a); i++ { + d := a[i] + switch t := d.(type) { + default: + break outer + + case *ast.Package: + p = i + 1 + case *ast.CommentGroup: + p = i + 1 + case *ast.Attribute: + continue + case *ast.ImportDecl: + p = i + 1 + imports = t + for _, s := range t.Specs { + y, _ := ParseImportSpec(s) + if y.ID != x.ID { + continue + } + orig = s + if x.Ident == "" || y.Ident == x.Ident { + return s + } + } + } + } + + // Import not found, add one. + if imports == nil { + imports = &ast.ImportDecl{} + preamble := append(a[:p:p], imports) + a = append(preamble, a[p:]...) + *decls = a + } + + if orig != nil { + CopyComments(spec, orig) + } + imports.Specs = append(imports.Specs, spec) + ast.SetRelPos(imports.Specs[0], token.NoRelPos) + + return spec +} diff --git a/vendor/cuelang.org/go/cue/ast/astutil/walk.go b/vendor/cuelang.org/go/cue/ast/astutil/walk.go new file mode 100644 index 000000000..269e37146 --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/astutil/walk.go @@ -0,0 +1,205 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package astutil + +import ( + "fmt" + + "cuelang.org/go/cue/ast" +) + +// TODO: use ast.Walk or adopt that version to allow visitors. + +// A visitor's before method is invoked for each node encountered by Walk. +// If the result visitor w is not nil, Walk visits each of the children +// of node with the visitor w, followed by a call of w.After. +type visitor interface { + Before(node ast.Node) (w visitor) + After(node ast.Node) +} + +// Helper functions for common node lists. They may be empty. + +func walkExprList(v visitor, list []ast.Expr) { + for _, x := range list { + walk(v, x) + } +} + +func walkDeclList(v visitor, list []ast.Decl) { + for _, x := range list { + walk(v, x) + } +} + +// walk traverses an AST in depth-first order: It starts by calling +// v.Visit(node); node must not be nil. If the visitor w returned by +// v.Visit(node) is not nil, walk is invoked recursively with visitor +// w for each of the non-nil children of node, followed by a call of +// w.Visit(nil). +// +func walk(v visitor, node ast.Node) { + if v = v.Before(node); v == nil { + return + } + + // TODO: record the comment groups and interleave with the values like for + // parsing and printing? + for _, c := range node.Comments() { + walk(v, c) + } + + // walk children + // (the order of the cases matches the order + // of the corresponding node types in go) + switch n := node.(type) { + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + for _, c := range n.List { + walk(v, c) + } + + case *ast.Attribute: + // nothing to do + + case *ast.Field: + walk(v, n.Label) + if n.Value != nil { + walk(v, n.Value) + } + for _, a := range n.Attrs { + walk(v, a) + } + + case *ast.StructLit: + for _, f := range n.Elts { + walk(v, f) + } + + // Expressions + case *ast.BottomLit, *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.TemplateLabel: + walk(v, n.Ident) + + case *ast.Interpolation: + for _, e := range n.Elts { + walk(v, e) + } + + case *ast.ListLit: + walkExprList(v, n.Elts) + + case *ast.Ellipsis: + if n.Type != nil { + walk(v, n.Type) + } + + case *ast.ParenExpr: + walk(v, n.X) + + case *ast.SelectorExpr: + walk(v, n.X) + walk(v, n.Sel) + + case *ast.IndexExpr: + walk(v, n.X) + walk(v, n.Index) + + case *ast.SliceExpr: + walk(v, n.X) + if n.Low != nil { + walk(v, n.Low) + } + if n.High != nil { + walk(v, n.High) + } + + case *ast.CallExpr: + walk(v, n.Fun) + walkExprList(v, n.Args) + + case *ast.UnaryExpr: + walk(v, n.X) + + case *ast.BinaryExpr: + walk(v, n.X) + walk(v, n.Y) + + // Declarations + case *ast.ImportSpec: + if n.Name != nil { + walk(v, n.Name) + } + walk(v, n.Path) + + case *ast.BadDecl: + // nothing to do + + case *ast.ImportDecl: + for _, s := range n.Specs { + walk(v, s) + } + + case *ast.EmbedDecl: + walk(v, n.Expr) + + case *ast.Alias: + walk(v, n.Ident) + walk(v, n.Expr) + + case *ast.Comprehension: + for _, c := range n.Clauses { + walk(v, c) + } + walk(v, n.Value) + + // Files and packages + case *ast.File: + walkDeclList(v, n.Decls) + + case *ast.Package: + // The package identifier isn't really an identifier. Skip it. + + case *ast.ListComprehension: + walk(v, n.Expr) + for _, c := range n.Clauses { + walk(v, c) + } + + case *ast.LetClause: + walk(v, n.Ident) + walk(v, n.Expr) + + case *ast.ForClause: + if n.Key != nil { + walk(v, n.Key) + } + walk(v, n.Value) + walk(v, n.Source) + + case *ast.IfClause: + walk(v, n.Condition) + + default: + panic(fmt.Sprintf("Walk: unexpected node type %T", n)) + } + + v.After(node) +} diff --git a/vendor/cuelang.org/go/cue/ast/comments.go b/vendor/cuelang.org/go/cue/ast/comments.go new file mode 100644 index 000000000..09d5402c8 --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/comments.go @@ -0,0 +1,46 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +// Comments returns all comments associated with a given node. +func Comments(n Node) []*CommentGroup { + c := n.commentInfo() + if c == nil { + return nil + } + return c.Comments() +} + +// AddComment adds the given comment to the node if it supports it. +// If a node does not support comments, such as for CommentGroup or Comment, +// this call has no effect. +func AddComment(n Node, cg *CommentGroup) { + c := n.commentInfo() + if c == nil { + return + } + c.AddComment(cg) +} + +// SetComments replaces all comments of n with the given set of comments. +// If a node does not support comments, such as for CommentGroup or Comment, +// this call has no effect. +func SetComments(n Node, cgs []*CommentGroup) { + c := n.commentInfo() + if c == nil { + return + } + c.SetComments(cgs) +} diff --git a/vendor/cuelang.org/go/cue/ast/ident.go b/vendor/cuelang.org/go/cue/ast/ident.go new file mode 100644 index 000000000..c86da16c4 --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/ident.go @@ -0,0 +1,227 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch >= utf8.RuneSelf && unicode.IsLetter(ch) +} + +func isDigit(ch rune) bool { + // TODO(mpvl): Is this correct? + return '0' <= ch && ch <= '9' || ch >= utf8.RuneSelf && unicode.IsDigit(ch) +} + +// IsValidIdent reports whether str is a valid identifier. +func IsValidIdent(ident string) bool { + if ident == "" { + return false + } + + // TODO: use consumed again to allow #0. + // consumed := false + if strings.HasPrefix(ident, "_") { + ident = ident[1:] + // consumed = true + if len(ident) == 0 { + return true + } + } + if strings.HasPrefix(ident, "#") { + ident = ident[1:] + // consumed = true + } + + // if !consumed { + if r, _ := utf8.DecodeRuneInString(ident); isDigit(r) { + return false + } + // } + + for _, r := range ident { + if isLetter(r) || isDigit(r) || r == '_' || r == '$' { + continue + } + return false + } + return true +} + +// QuoteIdent quotes an identifier, if needed, and reports +// an error if the identifier is invalid. +// +// Deprecated: quoted identifiers are deprecated. Use aliases. +func QuoteIdent(ident string) (string, error) { + if ident != "" && ident[0] == '`' { + if _, err := strconv.Unquote(ident); err != nil { + return "", errors.Newf(token.NoPos, "invalid quoted identifier %q", ident) + } + return ident, nil + } + + // TODO: consider quoting keywords + // switch ident { + // case "for", "in", "if", "let", "true", "false", "null": + // goto escape + // } + + for _, r := range ident { + if isLetter(r) || isDigit(r) || r == '_' || r == '$' { + continue + } + if r == '-' { + return "`" + ident + "`", nil + } + return "", errors.Newf(token.NoPos, "invalid character '%s' in identifier", string(r)) + } + + _, err := parseIdent(token.NoPos, ident) + return ident, err +} + +// ParseIdent unquotes a possibly quoted identifier and validates +// if the result is valid. +// +// Deprecated: quoted identifiers are deprecated. Use aliases. +func ParseIdent(n *Ident) (string, error) { + return parseIdent(n.NamePos, n.Name) +} + +func parseIdent(pos token.Pos, ident string) (string, error) { + if ident == "" { + return "", errors.Newf(pos, "empty identifier") + } + quoted := false + if ident[0] == '`' { + u, err := strconv.Unquote(ident) + if err != nil { + return "", errors.Newf(pos, "invalid quoted identifier") + } + ident = u + quoted = true + } + + p := 0 + if strings.HasPrefix(ident, "_") { + p++ + if len(ident) == 1 { + return ident, nil + } + } + if strings.HasPrefix(ident[p:], "#") { + p++ + // if len(ident) == p { + // return "", errors.Newf(pos, "invalid identifier '_#'") + // } + } + + if p == 0 || ident[p-1] == '#' { + if r, _ := utf8.DecodeRuneInString(ident[p:]); isDigit(r) { + return "", errors.Newf(pos, "invalid character '%s' in identifier", string(r)) + } + } + + for _, r := range ident[p:] { + if isLetter(r) || isDigit(r) || r == '_' || r == '$' { + continue + } + if r == '-' && quoted { + continue + } + return "", errors.Newf(pos, "invalid character '%s' in identifier", string(r)) + } + + return ident, nil +} + +// LabelName reports the name of a label, whether it is an identifier +// (it binds a value to a scope), and whether it is valid. +// Keywords that are allowed in label positions are interpreted accordingly. +// +// Examples: +// +// Label Result +// foo "foo" true nil +// true "true" true nil +// "foo" "foo" false nil +// "x-y" "x-y" false nil +// "foo "" false invalid string +// "\(x)" "" false errors.Is(err, ErrIsExpression) +// X=foo "foo" true nil +// +func LabelName(l Label) (name string, isIdent bool, err error) { + if a, ok := l.(*Alias); ok { + l, _ = a.Expr.(Label) + } + switch n := l.(type) { + case *ListLit: + // An expression, but not one that can evaluated. + return "", false, errors.Newf(l.Pos(), + "cannot reference fields with square brackets labels outside the field value") + + case *Ident: + // TODO(legacy): use name = n.Name + name, err = ParseIdent(n) + if err != nil { + return "", false, err + } + isIdent = true + // TODO(legacy): remove this return once quoted identifiers are removed. + return name, isIdent, err + + case *BasicLit: + switch n.Kind { + case token.STRING: + // Use strconv to only allow double-quoted, single-line strings. + name, err = strconv.Unquote(n.Value) + if err != nil { + err = errors.Newf(l.Pos(), "invalid") + } + + case token.NULL, token.TRUE, token.FALSE: + name = n.Value + isIdent = true + + default: + // TODO: allow numbers to be fields + // This includes interpolation and template labels. + return "", false, errors.Wrapf(ErrIsExpression, l.Pos(), + "cannot use numbers as fields") + } + + default: + // This includes interpolation and template labels. + return "", false, errors.Wrapf(ErrIsExpression, l.Pos(), + "label is an expression") + } + if !IsValidIdent(name) { + isIdent = false + } + return name, isIdent, err + +} + +// ErrIsExpression reports whether a label is an expression. +// This error is never returned directly. Use errors.Is or xerrors.Is. +var ErrIsExpression = errors.New("not a concrete label") diff --git a/vendor/cuelang.org/go/cue/ast/walk.go b/vendor/cuelang.org/go/cue/ast/walk.go new file mode 100644 index 000000000..659d6c414 --- /dev/null +++ b/vendor/cuelang.org/go/cue/ast/walk.go @@ -0,0 +1,274 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + + "cuelang.org/go/cue/token" +) + +// Walk traverses an AST in depth-first order: It starts by calling f(node); +// node must not be nil. If before returns true, Walk invokes f recursively for +// each of the non-nil children of node, followed by a call of after. Both +// functions may be nil. If before is nil, it is assumed to always return true. +// +func Walk(node Node, before func(Node) bool, after func(Node)) { + walk(&inspector{before: before, after: after}, node) +} + +// A visitor's before method is invoked for each node encountered by Walk. +// If the result visitor w is true, Walk visits each of the children +// of node with the visitor w, followed by a call of w.After. +type visitor interface { + Before(node Node) (w visitor) + After(node Node) +} + +// Helper functions for common node lists. They may be empty. + +func walkExprList(v visitor, list []Expr) { + for _, x := range list { + walk(v, x) + } +} + +func walkDeclList(v visitor, list []Decl) { + for _, x := range list { + walk(v, x) + } +} + +// walk traverses an AST in depth-first order: It starts by calling +// v.Visit(node); node must not be nil. If the visitor w returned by +// v.Visit(node) is not nil, walk is invoked recursively with visitor +// w for each of the non-nil children of node, followed by a call of +// w.Visit(nil). +// +func walk(v visitor, node Node) { + if v = v.Before(node); v == nil { + return + } + + // TODO: record the comment groups and interleave with the values like for + // parsing and printing? + for _, c := range Comments(node) { + walk(v, c) + } + + // walk children + // (the order of the cases matches the order + // of the corresponding node types in go) + switch n := node.(type) { + // Comments and fields + case *Comment: + // nothing to do + + case *CommentGroup: + for _, c := range n.List { + walk(v, c) + } + + case *Attribute: + // nothing to do + + case *Field: + walk(v, n.Label) + if n.Value != nil { + walk(v, n.Value) + } + for _, a := range n.Attrs { + walk(v, a) + } + + case *StructLit: + walkDeclList(v, n.Elts) + + // Expressions + case *BottomLit, *BadExpr, *Ident, *BasicLit: + // nothing to do + + case *TemplateLabel: + walk(v, n.Ident) + + case *Interpolation: + for _, e := range n.Elts { + walk(v, e) + } + + case *ListLit: + walkExprList(v, n.Elts) + + case *Ellipsis: + if n.Type != nil { + walk(v, n.Type) + } + + case *ParenExpr: + walk(v, n.X) + + case *SelectorExpr: + walk(v, n.X) + walk(v, n.Sel) + + case *IndexExpr: + walk(v, n.X) + walk(v, n.Index) + + case *SliceExpr: + walk(v, n.X) + if n.Low != nil { + walk(v, n.Low) + } + if n.High != nil { + walk(v, n.High) + } + + case *CallExpr: + walk(v, n.Fun) + walkExprList(v, n.Args) + + case *UnaryExpr: + walk(v, n.X) + + case *BinaryExpr: + walk(v, n.X) + walk(v, n.Y) + + // Declarations + case *ImportSpec: + if n.Name != nil { + walk(v, n.Name) + } + walk(v, n.Path) + + case *BadDecl: + // nothing to do + + case *ImportDecl: + for _, s := range n.Specs { + walk(v, s) + } + + case *EmbedDecl: + walk(v, n.Expr) + + case *LetClause: + walk(v, n.Ident) + walk(v, n.Expr) + + case *Alias: + walk(v, n.Ident) + walk(v, n.Expr) + + case *Comprehension: + for _, c := range n.Clauses { + walk(v, c) + } + walk(v, n.Value) + + // Files and packages + case *File: + walkDeclList(v, n.Decls) + + case *Package: + walk(v, n.Name) + + case *ListComprehension: + walk(v, n.Expr) + for _, c := range n.Clauses { + walk(v, c) + } + + case *ForClause: + if n.Key != nil { + walk(v, n.Key) + } + walk(v, n.Value) + walk(v, n.Source) + + case *IfClause: + walk(v, n.Condition) + + default: + panic(fmt.Sprintf("Walk: unexpected node type %T", n)) + } + + v.After(node) +} + +type inspector struct { + before func(Node) bool + after func(Node) + + commentStack []commentFrame + current commentFrame +} + +type commentFrame struct { + cg []*CommentGroup + pos int8 +} + +func (f *inspector) Before(node Node) visitor { + if f.before == nil || f.before(node) { + f.commentStack = append(f.commentStack, f.current) + f.current = commentFrame{cg: Comments(node)} + f.visitComments(f.current.pos) + return f + } + return nil +} + +func (f *inspector) After(node Node) { + f.visitComments(127) + p := len(f.commentStack) - 1 + f.current = f.commentStack[p] + f.commentStack = f.commentStack[:p] + f.current.pos++ + if f.after != nil { + f.after(node) + } +} + +func (f *inspector) Token(t token.Token) { + f.current.pos++ +} + +func (f *inspector) setPos(i int8) { + f.current.pos = i +} + +func (f *inspector) visitComments(pos int8) { + c := &f.current + for ; len(c.cg) > 0; c.cg = c.cg[1:] { + cg := c.cg[0] + if cg.Position == pos { + continue + } + if f.before == nil || f.before(cg) { + for _, c := range cg.List { + if f.before == nil || f.before(c) { + if f.after != nil { + f.after(c) + } + } + } + if f.after != nil { + f.after(cg) + } + } + } +} diff --git a/vendor/cuelang.org/go/cue/build.go b/vendor/cuelang.org/go/cue/build.go new file mode 100644 index 000000000..b69490618 --- /dev/null +++ b/vendor/cuelang.org/go/cue/build.go @@ -0,0 +1,220 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/runtime" +) + +// A Runtime is used for creating CUE interpretations. +// +// Any operation that involves two Values or Instances should originate from +// the same Runtime. +// +// The zero value of a Runtime is ready to use. +type Runtime struct { + idx *index +} + +func init() { + internal.GetRuntime = func(instance interface{}) interface{} { + switch x := instance.(type) { + case Value: + return &Runtime{idx: x.idx} + + case *Instance: + return &Runtime{idx: x.index} + + default: + panic("argument must be Value or *Instance") + } + } + + internal.CheckAndForkRuntime = func(runtime, value interface{}) interface{} { + r := runtime.(*Runtime) + idx := value.(Value).ctx().index + if idx != r.idx { + panic("value not from same runtime") + } + return &Runtime{idx: newIndex()} + } + + internal.CoreValue = func(value interface{}) (runtime, vertex interface{}) { + if v, ok := value.(Value); ok && v.v != nil { + return v.idx.Runtime, v.v + } + return nil, nil + } +} + +func dummyLoad(token.Pos, string) *build.Instance { return nil } + +func (r *Runtime) index() *index { + if r.idx == nil { + r.idx = newIndex() + } + return r.idx +} + +func (r *Runtime) complete(p *build.Instance) (*Instance, error) { + idx := r.index() + if err := p.Complete(); err != nil { + return nil, err + } + inst := idx.loadInstance(p) + inst.ImportPath = p.ImportPath + if inst.Err != nil { + return nil, inst.Err + } + return inst, nil +} + +// Compile compiles the given source into an Instance. The source code may be +// provided as a string, byte slice, io.Reader. The name is used as the file +// name in position information. The source may import builtin packages. Use +// Build to allow importing non-builtin packages. +func (r *Runtime) Compile(filename string, source interface{}) (*Instance, error) { + ctx := build.NewContext() + p := ctx.NewInstance(filename, dummyLoad) + if err := p.AddFile(filename, source); err != nil { + return nil, p.Err + } + return r.complete(p) +} + +// CompileFile compiles the given source file into an Instance. The source may +// import builtin packages. Use Build to allow importing non-builtin packages. +func (r *Runtime) CompileFile(file *ast.File) (*Instance, error) { + ctx := build.NewContext() + p := ctx.NewInstance(file.Filename, dummyLoad) + err := p.AddSyntax(file) + if err != nil { + return nil, err + } + _, p.PkgName, _ = internal.PackageInfo(file) + return r.complete(p) +} + +// CompileExpr compiles the given source expression into an Instance. The source +// may import builtin packages. Use Build to allow importing non-builtin +// packages. +func (r *Runtime) CompileExpr(expr ast.Expr) (*Instance, error) { + f, err := astutil.ToFile(expr) + if err != nil { + return nil, err + } + return r.CompileFile(f) +} + +// Parse parses a CUE source value into a CUE Instance. The source code may +// be provided as a string, byte slice, or io.Reader. The name is used as the +// file name in position information. The source may import builtin packages. +// +// Deprecated: use Compile +func (r *Runtime) Parse(name string, source interface{}) (*Instance, error) { + return r.Compile(name, source) +} + +// Build creates an Instance from the given build.Instance. A returned Instance +// may be incomplete, in which case its Err field is set. +func (r *Runtime) Build(instance *build.Instance) (*Instance, error) { + return r.complete(instance) +} + +// Build creates one Instance for each build.Instance. A returned Instance +// may be incomplete, in which case its Err field is set. +// +// Example: +// inst := cue.Build(load.Instances(args)) +// +func Build(instances []*build.Instance) []*Instance { + if len(instances) == 0 { + panic("cue: list of instances must not be empty") + } + var r Runtime + a, _ := r.build(instances) + return a +} + +func (r *Runtime) build(instances []*build.Instance) ([]*Instance, error) { + index := r.index() + + loaded := []*Instance{} + + var errs errors.Error + + for _, p := range instances { + _ = p.Complete() + errs = errors.Append(errs, p.Err) + + i := index.loadInstance(p) + errs = errors.Append(errs, i.Err) + loaded = append(loaded, i) + } + + // TODO: insert imports + return loaded, errs +} + +// FromExpr creates an instance from an expression. +// Any references must be resolved beforehand. +// +// Deprecated: use CompileExpr +func (r *Runtime) FromExpr(expr ast.Expr) (*Instance, error) { + return r.CompileFile(&ast.File{ + Decls: []ast.Decl{&ast.EmbedDecl{Expr: expr}}, + }) +} + +// index maps conversions from label names to internal codes. +// +// All instances belonging to the same package should share this index. +type index struct { + *runtime.Runtime + loaded map[*build.Instance]*Instance +} + +// NewRuntime creates a *runtime.Runtime with builtins preloaded. +func NewRuntime() *runtime.Runtime { + i := newIndex() + i.Runtime.Data = i + return i.Runtime +} + +// newIndex creates a new index. +func newIndex() *index { + r := runtime.New() + i := &index{ + Runtime: r, + loaded: map[*build.Instance]*Instance{}, + } + r.Data = i + return i +} + +func isBuiltin(s string) bool { + return runtime.SharedRuntime.IsBuiltinPackage(s) +} + +func (idx *index) loadInstance(p *build.Instance) *Instance { + v, _ := idx.Runtime.Build(p) + return idx.getImportFromBuild(p, v) +} diff --git a/vendor/cuelang.org/go/cue/build/context.go b/vendor/cuelang.org/go/cue/build/context.go new file mode 100644 index 000000000..664326eee --- /dev/null +++ b/vendor/cuelang.org/go/cue/build/context.go @@ -0,0 +1,128 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package build defines data types and utilities for defining CUE configuration +// instances. +// +// This package enforces the rules regarding packages and instances as defined +// in the spec, but it leaves any other details, as well as handling of modules, +// up to the implementation. +// +// A full implementation of instance loading can be found in the loader package. +// +// WARNING: this packages may change. It is fine to use load and cue, who both +// use this package. +package build + +import ( + "context" + + "cuelang.org/go/cue/ast" +) + +// A Context keeps track of state of building instances and caches work. +type Context struct { + ctxt context.Context + + loader LoadFunc + parseFunc func(str string, src interface{}) (*ast.File, error) + + initialized bool + + imports map[string]*Instance +} + +// NewInstance creates an instance for this Context. +func (c *Context) NewInstance(dir string, f LoadFunc) *Instance { + if c == nil { + c = &Context{} + } + if f == nil { + f = c.loader + } + return &Instance{ + ctxt: c, + loadFunc: f, + Dir: dir, + } +} + +// Complete finishes the initialization of an instance. All files must have +// been added with AddFile before this call. +func (inst *Instance) Complete() error { + if inst.done { + return inst.Err + } + inst.done = true + + err := inst.complete() + if err != nil { + inst.ReportError(err) + } + if inst.Err != nil { + inst.Incomplete = true + return inst.Err + } + return nil +} + +func (c *Context) init() { + if !c.initialized { + c.initialized = true + c.ctxt = context.Background() + c.imports = map[string]*Instance{} + } +} + +// Options: +// - certain parse modes +// - parallellism +// - error handler (allows cancelling the context) +// - file set. + +// NewContext creates a new build context. +// +// All instances must be created with a context. +func NewContext(opts ...Option) *Context { + c := &Context{} + for _, o := range opts { + o(c) + } + c.init() + return c +} + +// Option define build options. +type Option func(c *Context) + +// Loader sets parsing options. +func Loader(f LoadFunc) Option { + return func(c *Context) { c.loader = f } +} + +// ParseFile is called to read and parse each file +// when building syntax tree. +// It must be safe to call ParseFile simultaneously from multiple goroutines. +// If ParseFile is nil, the loader will uses parser.ParseFile. +// +// ParseFile should parse the source from src and use filename only for +// recording position information. +// +// An application may supply a custom implementation of ParseFile +// to change the effective file contents or the behavior of the parser, +// or to modify the syntax tree. For example, changing the backwards +// compatibility. +func ParseFile(f func(filename string, src interface{}) (*ast.File, error)) Option { + return func(c *Context) { c.parseFunc = f } +} diff --git a/vendor/cuelang.org/go/cue/build/doc.go b/vendor/cuelang.org/go/cue/build/doc.go new file mode 100644 index 000000000..52421c65d --- /dev/null +++ b/vendor/cuelang.org/go/cue/build/doc.go @@ -0,0 +1,16 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package build defines collections of CUE files to build an instance. +package build // import "cuelang.org/go/cue/build" diff --git a/vendor/cuelang.org/go/cue/build/file.go b/vendor/cuelang.org/go/cue/build/file.go new file mode 100644 index 000000000..2aef149a5 --- /dev/null +++ b/vendor/cuelang.org/go/cue/build/file.go @@ -0,0 +1,81 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package build + +// A File represents a file that is part of the build process. +type File struct { + Filename string `json:"filename"` + + Encoding Encoding `json:"encoding,omitempty"` + Interpretation Interpretation `json:"interpretation,omitempty"` + Form Form `json:"form,omitempty"` + Tags map[string]string `json:"tags,omitempty"` // code=go + + Source interface{} `json:"-"` // TODO: swap out with concrete type. +} + +// A Encoding indicates a file format for representing a program. +type Encoding string + +const ( + CUE Encoding = "cue" + JSON Encoding = "json" + YAML Encoding = "yaml" + JSONL Encoding = "jsonl" + Text Encoding = "text" + Protobuf Encoding = "proto" + + // TODO: + // TOML + // TextProto + // BinProto + + Code Encoding = "code" // Programming languages +) + +// An Interpretation determines how a certain program should be interpreted. +// For instance, data may be interpreted as describing a schema, which itself +// can be converted to a CUE schema. +type Interpretation string + +const ( + // Auto interprets the underlying data file as data, JSON Schema or OpenAPI, + // depending on the existence of certain marker fields. + // + // JSON Schema is identified by a top-level "$schema" field with a URL + // of the form "https?://json-schema.org/.*schema#?". + // + // OpenAPI is identified by the existence of a top-level field "openapi" + // with a major semantic version of 3, as well as the existence of + // the info.title and info.version fields. + // + // In all other cases, the underlying data is interpreted as is. + Auto Interpretation = "auto" + JSONSchema Interpretation = "jsonschema" + OpenAPI Interpretation = "openapi" +) + +// A Form specifies the form in which a program should be represented. +type Form string + +const ( + Full Form = "full" + Schema Form = "schema" + Struct Form = "struct" + Final Form = "final" // picking default values, may be non-concrete + Graph Form = "graph" // Data only, but allow references + DAG Form = "dag" // Like graph, but don't allow cycles + Data Form = "data" // always final +) diff --git a/vendor/cuelang.org/go/cue/build/import.go b/vendor/cuelang.org/go/cue/build/import.go new file mode 100644 index 000000000..996edb0af --- /dev/null +++ b/vendor/cuelang.org/go/cue/build/import.go @@ -0,0 +1,170 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package build + +import ( + "sort" + "strconv" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +type LoadFunc func(pos token.Pos, path string) *Instance + +type cueError = errors.Error + +type buildError struct { + cueError + inputs []token.Pos +} + +func (e *buildError) InputPositions() []token.Pos { + return e.inputs +} + +func (inst *Instance) complete() errors.Error { + // TODO: handle case-insensitive collisions. + // dir := inst.Dir + // names := []string{} + // for _, src := range sources { + // names = append(names, src.path) + // } + // f1, f2 := str.FoldDup(names) + // if f1 != "" { + // return nil, fmt.Errorf("case-insensitive file name collision: %q and %q", f1, f2) + // } + + var ( + c = inst.ctxt + imported = map[string][]token.Pos{} + ) + + for _, f := range inst.Files { + for _, decl := range f.Decls { + d, ok := decl.(*ast.ImportDecl) + if !ok { + continue + } + for _, spec := range d.Specs { + quoted := spec.Path.Value + path, err := strconv.Unquote(quoted) + if err != nil { + inst.Err = errors.Append(inst.Err, + errors.Newf( + spec.Path.Pos(), + "%s: parser returned invalid quoted string: <%s>", + f.Filename, quoted)) + } + imported[path] = append(imported[path], spec.Pos()) + } + } + } + + paths := make([]string, 0, len(imported)) + for path := range imported { + paths = append(paths, path) + if path == "" { + return &buildError{ + errors.Newf(token.NoPos, "empty import path"), + imported[path], + } + } + } + + sort.Strings(paths) + + if inst.loadFunc != nil { + for i, path := range paths { + isLocal := IsLocalImport(path) + if isLocal { + // path = dirToImportPath(filepath.Join(dir, path)) + } + + imp := c.imports[path] + if imp == nil { + pos := token.NoPos + if len(imported[path]) > 0 { + pos = imported[path][0] + } + imp = inst.loadFunc(pos, path) + if imp == nil { + continue + } + if imp.Err != nil { + return errors.Wrapf(imp.Err, pos, "import failed") + } + imp.ImportPath = path + // imp.parent = inst + c.imports[path] = imp + // imp.parent = nil + } else if imp.parent != nil { + // TODO: report a standard cycle message. + // cycle is now handled explicitly in loader + } + paths[i] = imp.ImportPath + + inst.addImport(imp) + if imp.Incomplete { + inst.Incomplete = true + } + } + } + + inst.ImportPaths = paths + inst.ImportPos = imported + + // Build full dependencies + deps := make(map[string]*Instance) + var q []*Instance + q = append(q, inst.Imports...) + for i := 0; i < len(q); i++ { + p1 := q[i] + path := p1.ImportPath + // The same import path could produce an error or not, + // depending on what tries to import it. + // Prefer to record entries with errors, so we can report them. + // p0 := deps[path] + // if err0, err1 := lastError(p0), lastError(p1); p0 == nil || err1 != nil && (err0 == nil || len(err0.ImportStack) > len(err1.ImportStack)) { + // deps[path] = p1 + // for _, p2 := range p1.Imports { + // if deps[p2.ImportPath] != p2 { + // q = append(q, p2) + // } + // } + // } + if _, ok := deps[path]; !ok { + deps[path] = p1 + } + } + inst.Deps = make([]string, 0, len(deps)) + for dep := range deps { + inst.Deps = append(inst.Deps, dep) + } + sort.Strings(inst.Deps) + + for _, dep := range inst.Deps { + p1 := deps[dep] + if p1 == nil { + panic("impossible: missing entry in package cache for " + dep + " imported by " + inst.ImportPath) + } + if p1.Err != nil { + inst.DepsErrors = append(inst.DepsErrors, p1.Err) + } + } + + return nil +} diff --git a/vendor/cuelang.org/go/cue/build/instance.go b/vendor/cuelang.org/go/cue/build/instance.go new file mode 100644 index 000000000..90eb64d83 --- /dev/null +++ b/vendor/cuelang.org/go/cue/build/instance.go @@ -0,0 +1,286 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package build + +import ( + "fmt" + pathpkg "path" + "path/filepath" + "strings" + "unicode" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +// An Instance describes the collection of files, and its imports, necessary +// to build a CUE instance. +// +// A typical way to create an Instance is to use the cue/load package. +type Instance struct { + ctxt *Context + + BuildFiles []*File // files to be inclduded in the build + IgnoredFiles []*File // files excluded for this build + OrphanedFiles []*File // recognized file formats not part of any build + InvalidFiles []*File // could not parse these files + UnknownFiles []*File // unknown file types + + // Files contains the AST for all files part of this instance. + // TODO: the intent is to deprecate this in favor of BuildFiles. + Files []*ast.File + + loadFunc LoadFunc + done bool + + // Scope is another instance that may be used to resolve any unresolved + // reference of this instance. For instance, tool and test instances + // may refer to top-level fields in their package scope. + Scope *Instance + + // PkgName is the name specified in the package clause. + PkgName string + hasName bool + + // ImportPath returns the unique path to identify an imported instance. + // + // Instances created with NewInstance do not have an import path. + ImportPath string + + // Imports lists the instances of all direct imports of this instance. + Imports []*Instance + + // The Err for loading this package or nil on success. This does not + // include any errors of dependencies. Incomplete will be set if there + // were any errors in dependencies. + Err errors.Error + + // Incomplete reports whether any dependencies had an error. + Incomplete bool + + parent *Instance // TODO: for cycle detection + + // The following fields are for informative purposes and are not used by + // the cue package to create an instance. + + // ImportComment is the path in the import comment on the package statement. + ImportComment string + + // DisplayPath is a user-friendly version of the package or import path. + DisplayPath string + + // Dir is the package directory. Note that a package may also include files + // from ancestor directories, up to the module file. + Dir string + + // Module defines the module name of a package. It must be defined if + // the packages within the directory structure of the module are to be + // imported by other packages, including those within the module. + Module string + + // Root is the root of the directory hierarchy, it may be "" if this an + // instance has no imports. + // If Module != "", this corresponds to the module root. + // Root/pkg is the directory that holds third-party packages. + Root string // root directory of hierarchy ("" if unknown) + + // AllTags are the build tags that can influence file selection in this + // directory. + AllTags []string + + Standard bool // Is a builtin package + User bool // True if package was created from individual files. + + // Deprecated: use BuildFiles + CUEFiles []string // .cue source files + // Deprecated: use BuildFiles and OrphanedFiles + DataFiles []string // recognized data files (.json, .yaml, etc.) + + // The intent is to also deprecate the following fields in favor of + // IgnoredFiles and UnknownFiles. + TestCUEFiles []string // .cue test files (_test.cue) + ToolCUEFiles []string // .cue tool files (_tool.cue) + IgnoredCUEFiles []string // .cue source files ignored for this build + InvalidCUEFiles []string // .cue source files with detected problems (parse error, wrong package name, and so on) + + // Dependencies + ImportPaths []string + ImportPos map[string][]token.Pos // line information for Imports + + Deps []string + DepsErrors []error + Match []string +} + +// ID returns the package ID unique for this module. +func (inst *Instance) ID() string { + if inst.PkgName == "" { + return "" + } + s := fmt.Sprintf("%s:%s", inst.Module, inst.PkgName) + return s +} + +// Dependencies reports all Instances on which this instance depends. +func (inst *Instance) Dependencies() []*Instance { + // TODO: as cyclic dependencies are not allowed, we could just not check. + // Do for safety now and remove later if needed. + return appendDependencies(nil, inst, map[*Instance]bool{}) +} + +func appendDependencies(a []*Instance, inst *Instance, done map[*Instance]bool) []*Instance { + for _, d := range inst.Imports { + if done[d] { + continue + } + a = append(a, d) + done[d] = true + a = appendDependencies(a, d, done) + } + return a +} + +// Abs converts relative path used in the one of the file fields to an +// absolute one. +func (inst *Instance) Abs(path string) string { + if filepath.IsAbs(path) { + return path + } + return filepath.Join(inst.Root, path) +} + +func (inst *Instance) setPkg(pkg string) bool { + if !inst.hasName { + inst.hasName = true + inst.PkgName = pkg + return true + } + return false +} + +// ReportError reports an error processing this instance. +func (inst *Instance) ReportError(err errors.Error) { + inst.Err = errors.Append(inst.Err, err) +} + +// Context defines the build context for this instance. All files defined +// in Syntax as well as all imported instances must be created using the +// same build context. +func (inst *Instance) Context() *Context { + return inst.ctxt +} + +func (inst *Instance) parse(name string, src interface{}) (*ast.File, error) { + if inst.ctxt != nil && inst.ctxt.parseFunc != nil { + return inst.ctxt.parseFunc(name, src) + } + return parser.ParseFile(name, src, parser.ParseComments) +} + +// LookupImport defines a mapping from an ImportSpec's ImportPath to Instance. +func (inst *Instance) LookupImport(path string) *Instance { + path = inst.expandPath(path) + for _, inst := range inst.Imports { + if inst.ImportPath == path { + return inst + } + } + return nil +} + +func (inst *Instance) addImport(imp *Instance) { + for _, inst := range inst.Imports { + if inst.ImportPath == imp.ImportPath { + if inst != imp { + panic("import added multiple times with different instances") + } + return + } + } + inst.Imports = append(inst.Imports, imp) +} + +// AddFile adds the file with the given name to the list of files for this +// instance. The file may be loaded from the cache of the instance's context. +// It does not process the file's imports. The package name of the file must +// match the package name of the instance. +func (inst *Instance) AddFile(filename string, src interface{}) error { + file, err := inst.parse(filename, src) + if err != nil { + // should always be an errors.List, but just in case. + err := errors.Promote(err, "error adding file") + inst.ReportError(err) + return err + } + + return inst.AddSyntax(file) +} + +// AddSyntax adds the given file to list of files for this instance. The package +// name of the file must match the package name of the instance. +func (inst *Instance) AddSyntax(file *ast.File) errors.Error { + astutil.Resolve(file, func(pos token.Pos, msg string, args ...interface{}) { + inst.Err = errors.Append(inst.Err, errors.Newf(pos, msg, args...)) + }) + _, pkg, pos := internal.PackageInfo(file) + if pkg != "" && pkg != "_" && !inst.setPkg(pkg) && pkg != inst.PkgName { + err := errors.Newf(pos, + "package name %q conflicts with previous package name %q", + pkg, inst.PkgName) + inst.ReportError(err) + return err + } + inst.Files = append(inst.Files, file) + return nil +} + +func (inst *Instance) expandPath(path string) string { + isLocal := IsLocalImport(path) + if isLocal { + path = dirToImportPath(filepath.Join(inst.Dir, path)) + } + return path +} + +// dirToImportPath returns the pseudo-import path we use for a package +// outside the CUE path. It begins with _/ and then contains the full path +// to the directory. If the package lives in c:\home\gopher\my\pkg then +// the pseudo-import path is _/c_/home/gopher/my/pkg. +// Using a pseudo-import path like this makes the ./ imports no longer +// a special case, so that all the code to deal with ordinary imports works +// automatically. +func dirToImportPath(dir string) string { + return pathpkg.Join("_", strings.Map(makeImportValid, filepath.ToSlash(dir))) +} + +func makeImportValid(r rune) rune { + // Should match Go spec, compilers, and ../../go/parser/parser.go:/isValidImport. + const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" + if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { + return '_' + } + return r +} + +// IsLocalImport reports whether the import path is +// a local import path, like ".", "..", "./foo", or "../foo". +func IsLocalImport(path string) bool { + return path == "." || path == ".." || + strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../") +} diff --git a/vendor/cuelang.org/go/cue/builtin.go b/vendor/cuelang.org/go/cue/builtin.go new file mode 100644 index 000000000..ef2754bf0 --- /dev/null +++ b/vendor/cuelang.org/go/cue/builtin.go @@ -0,0 +1,56 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "strings" + + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/adt" + "cuelang.org/go/internal/core/runtime" +) + +func pos(n adt.Node) (p token.Pos) { + if n == nil { + return + } + src := n.Source() + if src == nil { + return + } + return src.Pos() +} + +func init() { + // TODO: unroll this function. Should no longer be necessary to be internal. + internal.UnifyBuiltin = func(val interface{}, kind string) interface{} { + v := val.(Value) + ctx := v.ctx() + + p := strings.Split(kind, ".") + pkg, name := p[0], p[1] + s, _ := runtime.SharedRuntime.LoadImport(pkg) + if s == nil { + return v + } + a := s.Lookup(ctx.Label(name, false)) + if a == nil { + return v + } + + return v.Unify(makeValue(v.idx, a)) + } +} diff --git a/vendor/cuelang.org/go/cue/builtinutil.go b/vendor/cuelang.org/go/cue/builtinutil.go new file mode 100644 index 000000000..2bfd8adf7 --- /dev/null +++ b/vendor/cuelang.org/go/cue/builtinutil.go @@ -0,0 +1,45 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +// TODO: this code could be generated, but currently isn't. + +type valueSorter struct { + a []Value + cmp Value + err error +} + +func (s *valueSorter) ret() ([]Value, error) { + if s.err != nil { + return nil, s.err + } + // The input slice is already a copy and that we can modify it safely. + return s.a, nil +} + +func (s *valueSorter) Len() int { return len(s.a) } +func (s *valueSorter) Swap(i, j int) { s.a[i], s.a[j] = s.a[j], s.a[i] } +func (s *valueSorter) Less(i, j int) bool { + v := s.cmp.Fill(s.a[i], "x") + v = v.Fill(s.a[j], "y") + + isLess, err := v.Lookup("less").Bool() + if err != nil && s.err == nil { + s.err = err + return true + } + return isLess +} diff --git a/vendor/cuelang.org/go/cue/context.go b/vendor/cuelang.org/go/cue/context.go new file mode 100644 index 000000000..33d213d5d --- /dev/null +++ b/vendor/cuelang.org/go/cue/context.go @@ -0,0 +1,81 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "cuelang.org/go/internal/core/adt" + "cuelang.org/go/internal/core/debug" + "cuelang.org/go/internal/core/eval" +) + +// context manages evaluation state. +type context struct { + opCtx *adt.OpContext + *index +} + +// newContext returns a new evaluation context. +func (idx *index) newContext() *context { + c := &context{ + index: idx, + } + if idx != nil { + c.opCtx = eval.NewContext(idx.Runtime, nil) + } + return c +} + +func debugStr(ctx *context, v adt.Node) string { + return debug.NodeString(ctx.opCtx, v, nil) +} + +func (c *context) str(v adt.Node) string { + return debugStr(c, v) +} + +func (c *context) mkErr(src adt.Node, args ...interface{}) *adt.Bottom { + return c.index.mkErr(src, args...) +} + +func (c *context) vertex(v *adt.Vertex) *adt.Vertex { + return v +} + +// vertex returns the evaluated vertex of v. +func (v Value) vertex(ctx *context) *adt.Vertex { + return ctx.vertex(v.v) +} + +// eval returns the evaluated value. This may not be the vertex. +// +// Deprecated: use ctx.value +func (v Value) eval(ctx *context) adt.Value { + if v.v == nil { + panic("undefined value") + } + x := ctx.manifest(v.v) + return x.Value() +} + +// func (v Value) evalFull(u value) (Value, adt.Value) { +// ctx := v.ctx() +// x := ctx.manifest(u) +// } + +// TODO: change from Vertex to Vertex. +func (c *context) manifest(v *adt.Vertex) *adt.Vertex { + v.Finalize(c.opCtx) + return v +} diff --git a/vendor/cuelang.org/go/cue/cue.go b/vendor/cuelang.org/go/cue/cue.go new file mode 100644 index 000000000..2e0a03188 --- /dev/null +++ b/vendor/cuelang.org/go/cue/cue.go @@ -0,0 +1,17 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package cue is a transition package for supporting the cue.Value API. +// It aims to be plugin compatible with the old API. +package cue diff --git a/vendor/cuelang.org/go/cue/errors.go b/vendor/cuelang.org/go/cue/errors.go new file mode 100644 index 000000000..3ce30f0d5 --- /dev/null +++ b/vendor/cuelang.org/go/cue/errors.go @@ -0,0 +1,139 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal/core/adt" +) + +func (v Value) toErr(b *adt.Bottom) (err errors.Error) { + errs := errors.Errors(b.Err) + if len(errs) > 1 { + for _, e := range errs { + bb := *b + bb.Err = e + err = errors.Append(err, &valueError{v: v, err: &bb}) + } + return err + } + return &valueError{v: v, err: b} +} + +var _ errors.Error = &valueError{} + +// A valueError is returned as a result of evaluating a value. +type valueError struct { + v Value + err *adt.Bottom +} + +func (e *valueError) Unwrap() error { + if e.err.Err == nil { + return nil + } + return errors.Unwrap(e.err.Err) +} + +func (e *valueError) Bottom() *adt.Bottom { return e.err } + +func (e *valueError) Error() string { + return errors.String(e) +} + +func (e *valueError) Position() token.Pos { + if e.err.Err != nil { + return e.err.Err.Position() + } + src := e.err.Source() + if src == nil { + return token.NoPos + } + return src.Pos() +} + +func (e *valueError) InputPositions() []token.Pos { + if e.err.Err == nil { + return nil + } + return e.err.Err.InputPositions() +} + +func (e *valueError) Msg() (string, []interface{}) { + if e.err.Err == nil { + return "", nil + } + return e.err.Err.Msg() +} + +func (e *valueError) Path() (a []string) { + if e.err.Err != nil { + a = e.err.Err.Path() + if a != nil { + return a + } + } + return e.v.appendPath(nil) +} + +type errCode = adt.ErrorCode + +const ( + codeNotExist = adt.NotExistError + codeIncomplete = adt.IncompleteError +) + +var errNotExists = &adt.Bottom{ + Code: codeNotExist, + Err: errors.Newf(token.NoPos, "undefined value"), +} + +func (idx *index) mkErr(src adt.Node, args ...interface{}) *adt.Bottom { + var e *adt.Bottom + var code errCode = -1 +outer: + for i, a := range args { + switch x := a.(type) { + case errCode: + code = x + case *adt.Bottom: + e = adt.CombineErrors(nil, e, x) + case []*adt.Bottom: + for _, b := range x { + e = adt.CombineErrors(nil, e, b) + } + case errors.Error: + e = adt.CombineErrors(nil, e, &adt.Bottom{Err: x}) + case adt.Expr: + case string: + args := args[i+1:] + // Do not expand message so that errors can be localized. + pos := pos(src) + if code < 0 { + code = 0 + } + e = adt.CombineErrors(nil, e, &adt.Bottom{ + Code: code, + Err: errors.Newf(pos, x, args...), + }) + break outer + } + } + if code >= 0 { + e.Code = code + } + return e +} diff --git a/vendor/cuelang.org/go/cue/errors/errors.go b/vendor/cuelang.org/go/cue/errors/errors.go new file mode 100644 index 000000000..20b5bd0fd --- /dev/null +++ b/vendor/cuelang.org/go/cue/errors/errors.go @@ -0,0 +1,608 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package errors defines shared types for handling CUE errors. +// +// The pivotal error type in CUE packages is the interface type Error. +// The information available in such errors can be most easily retrieved using +// the Path, Positions, and Print functions. +package errors // import "cuelang.org/go/cue/errors" + +import ( + "bytes" + "errors" + "fmt" + "io" + "path/filepath" + "sort" + "strings" + + "github.com/mpvl/unique" + "golang.org/x/xerrors" + + "cuelang.org/go/cue/token" +) + +// New is a convenience wrapper for errors.New in the core library. +// It does not return a CUE error. +func New(msg string) error { + return errors.New(msg) +} + +// Unwrap returns the result of calling the Unwrap method on err, if err +// implements Unwrap. Otherwise, Unwrap returns nil. +func Unwrap(err error) error { + return xerrors.Unwrap(err) +} + +// Is reports whether any error in err's chain matches target. +// +// An error is considered to match a target if it is equal to that target or if +// it implements a method Is(error) bool such that Is(target) returns true. +func Is(err, target error) bool { + return xerrors.Is(err, target) +} + +// As finds the first error in err's chain that matches the type to which target +// points, and if so, sets the target to its value and returns true. An error +// matches a type if it is assignable to the target type, or if it has a method +// As(interface{}) bool such that As(target) returns true. As will panic if +// target is not a non-nil pointer to a type which implements error or is of +// interface type. +// +// The As method should set the target to its value and return true if err +// matches the type to which target points. +func As(err error, target interface{}) bool { + return xerrors.As(err, target) +} + +// A Message implements the error interface as well as Message to allow +// internationalized messages. A Message is typically used as an embedding +// in a CUE message. +type Message struct { + format string + args []interface{} +} + +// NewMessage creates an error message for human consumption. The arguments +// are for later consumption, allowing the message to be localized at a later +// time. The passed argument list should not be modified. +func NewMessage(format string, args []interface{}) Message { + return Message{format: format, args: args} +} + +// Msg returns a printf-style format string and its arguments for human +// consumption. +func (m *Message) Msg() (format string, args []interface{}) { + return m.format, m.args +} + +func (m *Message) Error() string { + return fmt.Sprintf(m.format, m.args...) +} + +// Error is the common error message. +type Error interface { + // Position returns the primary position of an error. If multiple positions + // contribute equally, this reflects one of them. + Position() token.Pos + + // InputPositions reports positions that contributed to an error, including + // the expressions resulting in the conflict, as well as values that were + // the input to this expression. + InputPositions() []token.Pos + + // Error reports the error message without position information. + Error() string + + // Path returns the path into the data tree where the error occurred. + // This path may be nil if the error is not associated with such a location. + Path() []string + + // Msg returns the unformatted error message and its arguments for human + // consumption. + Msg() (format string, args []interface{}) +} + +// Positions returns all positions returned by an error, sorted +// by relevance when possible and with duplicates removed. +func Positions(err error) []token.Pos { + e := Error(nil) + if !xerrors.As(err, &e) { + return nil + } + + a := make([]token.Pos, 0, 3) + + sortOffset := 0 + pos := e.Position() + if pos.IsValid() { + a = append(a, pos) + sortOffset = 1 + } + + for _, p := range e.InputPositions() { + if p.IsValid() && p != pos { + a = append(a, p) + } + } + + byPos := byPos(a[sortOffset:]) + sort.Sort(byPos) + k := unique.ToFront(byPos) + return a[:k+sortOffset] +} + +type byPos []token.Pos + +func (s *byPos) Truncate(n int) { (*s) = (*s)[:n] } +func (s byPos) Len() int { return len(s) } +func (s byPos) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byPos) Less(i, j int) bool { return comparePos(s[i], s[j]) == -1 } + +// Path returns the path of an Error if err is of that type. +func Path(err error) []string { + if e := Error(nil); xerrors.As(err, &e) { + return e.Path() + } + return nil +} + +// Newf creates an Error with the associated position and message. +func Newf(p token.Pos, format string, args ...interface{}) Error { + return &posError{ + pos: p, + Message: NewMessage(format, args), + } +} + +// Wrapf creates an Error with the associated position and message. The provided +// error is added for inspection context. +func Wrapf(err error, p token.Pos, format string, args ...interface{}) Error { + a, ok := err.(list) + if !ok { + return &posError{ + pos: p, + Message: NewMessage(format, args), + err: err, + } + } + b := make([]Error, len(a)) + for i, err := range a { + b[i] = &posError{ + pos: p, + Message: NewMessage(format, args), + err: err, + } + } + return list(b) +} + +// Promote converts a regular Go error to an Error if it isn't already one. +func Promote(err error, msg string) Error { + switch x := err.(type) { + case Error: + return x + default: + return Wrapf(err, token.NoPos, msg) + } +} + +var _ Error = &posError{} + +// In an List, an error is represented by an *posError. +// The position Pos, if valid, points to the beginning of +// the offending token, and the error condition is described +// by Msg. +type posError struct { + pos token.Pos + inputs []token.Pos + Message + + // The underlying error that triggered this one, if any. + err error +} + +func (e *posError) Path() []string { return Path(e.err) } +func (e *posError) InputPositions() []token.Pos { return e.inputs } +func (e *posError) Position() token.Pos { return e.pos } +func (e *posError) Unwrap() error { return e.err } +func (e *posError) Cause() error { return e.err } + +// Error implements the error interface. +func (e *posError) Error() string { + if e.err == nil { + return e.Message.Error() + } + if e.Message.format == "" { + return e.err.Error() + } + return fmt.Sprintf("%s: %s", e.Message.Error(), e.err) +} + +// Append combines two errors, flattening Lists as necessary. +func Append(a, b Error) Error { + switch x := a.(type) { + case nil: + return b + case list: + return appendToList(x, b) + } + // Preserve order of errors. + list := appendToList(nil, a) + list = appendToList(list, b) + return list +} + +// Errors reports the individual errors associated with an error, which is +// the error itself if there is only one or, if the underlying type is List, +// its individual elements. If the given error is not an Error, it will be +// promoted to one. +func Errors(err error) []Error { + switch x := err.(type) { + case nil: + return nil + case list: + return []Error(x) + case Error: + return []Error{x} + default: + return []Error{Promote(err, "")} + } +} + +func appendToList(a list, err Error) list { + switch x := err.(type) { + case nil: + return a + case list: + if a == nil { + return x + } + return append(a, x...) + default: + return append(a, err) + } +} + +// list is a list of Errors. +// The zero value for an list is an empty list ready to use. +type list []Error + +func (p list) Is(err, target error) bool { + for _, e := range p { + if xerrors.Is(e, target) { + return true + } + } + return false +} + +func (p list) As(err error, target interface{}) bool { + for _, e := range p { + if xerrors.As(e, target) { + return true + } + } + return false +} + +// AddNewf adds an Error with given position and error message to an List. +func (p *list) AddNewf(pos token.Pos, msg string, args ...interface{}) { + err := &posError{pos: pos, Message: Message{format: msg, args: args}} + *p = append(*p, err) +} + +// Add adds an Error with given position and error message to an List. +func (p *list) Add(err Error) { + *p = appendToList(*p, err) +} + +// Reset resets an List to no errors. +func (p *list) Reset() { *p = (*p)[:0] } + +// List implements the sort Interface. +func (p list) Len() int { return len(p) } +func (p list) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +func (p list) Less(i, j int) bool { + if c := comparePos(p[i].Position(), p[j].Position()); c != 0 { + return c == -1 + } + // Note that it is not sufficient to simply compare file offsets because + // the offsets do not reflect modified line information (through //line + // comments). + + if !equalPath(p[i].Path(), p[j].Path()) { + return lessPath(p[i].Path(), p[j].Path()) + } + return p[i].Error() < p[j].Error() +} + +func lessOrMore(isLess bool) int { + if isLess { + return -1 + } + return 1 +} + +func comparePos(a, b token.Pos) int { + if a.Filename() != b.Filename() { + return lessOrMore(a.Filename() < b.Filename()) + } + if a.Line() != b.Line() { + return lessOrMore(a.Line() < b.Line()) + } + if a.Column() != b.Column() { + return lessOrMore(a.Column() < b.Column()) + } + return 0 +} + +func lessPath(a, b []string) bool { + for i, x := range a { + if i >= len(b) { + return false + } + if x != b[i] { + return x < b[i] + } + } + return len(a) < len(b) +} + +func equalPath(a, b []string) bool { + if len(a) != len(b) { + return false + } + for i, x := range a { + if x != b[i] { + return false + } + } + return true +} + +// Sanitize sorts multiple errors and removes duplicates on a best effort basis. +// If err represents a single or no error, it returns the error as is. +func Sanitize(err Error) Error { + if l, ok := err.(list); ok && err != nil { + a := make(list, len(l)) + copy(a, l) + a.Sort() + a.RemoveMultiples() + return a + } + return err +} + +// Sort sorts an List. *posError entries are sorted by position, +// other errors are sorted by error message, and before any *posError +// entry. +// +func (p list) Sort() { + sort.Sort(p) +} + +// RemoveMultiples sorts an List and removes all but the first error per line. +func (p *list) RemoveMultiples() { + p.Sort() + var last Error + i := 0 + for _, e := range *p { + if last == nil || !approximateEqual(last, e) { + last = e + (*p)[i] = e + i++ + } + } + (*p) = (*p)[0:i] +} + +func approximateEqual(a, b Error) bool { + aPos := a.Position() + bPos := b.Position() + if aPos == token.NoPos || bPos == token.NoPos { + return a.Error() == b.Error() + } + return aPos.Filename() == bPos.Filename() && + aPos.Line() == bPos.Line() && + equalPath(a.Path(), b.Path()) +} + +// An List implements the error interface. +func (p list) Error() string { + format, args := p.Msg() + return fmt.Sprintf(format, args...) +} + +// Msg reports the unformatted error message for the first error, if any. +func (p list) Msg() (format string, args []interface{}) { + switch len(p) { + case 0: + return "no errors", nil + case 1: + return p[0].Msg() + } + return "%s (and %d more errors)", []interface{}{p[0], len(p) - 1} +} + +// Position reports the primary position for the first error, if any. +func (p list) Position() token.Pos { + if len(p) == 0 { + return token.NoPos + } + return p[0].Position() +} + +// InputPositions reports the input positions for the first error, if any. +func (p list) InputPositions() []token.Pos { + if len(p) == 0 { + return nil + } + return p[0].InputPositions() +} + +// Path reports the path location of the first error, if any. +func (p list) Path() []string { + if len(p) == 0 { + return nil + } + return p[0].Path() +} + +// Err returns an error equivalent to this error list. +// If the list is empty, Err returns nil. +func (p list) Err() error { + if len(p) == 0 { + return nil + } + return p +} + +// A Config defines parameters for printing. +type Config struct { + // Format formats the given string and arguments and writes it to w. + // It is used for all printing. + Format func(w io.Writer, format string, args ...interface{}) + + // Cwd is the current working directory. Filename positions are taken + // relative to this path. + Cwd string + + // ToSlash sets whether to use Unix paths. Mostly used for testing. + ToSlash bool +} + +// Print is a utility function that prints a list of errors to w, +// one error per line, if the err parameter is an List. Otherwise +// it prints the err string. +// +func Print(w io.Writer, err error, cfg *Config) { + if cfg == nil { + cfg = &Config{} + } + if e, ok := err.(Error); ok { + err = Sanitize(e) + } + for _, e := range Errors(err) { + printError(w, e, cfg) + } +} + +// Details is a convenience wrapper for Print to return the error text as a +// string. +func Details(err error, cfg *Config) string { + w := &bytes.Buffer{} + Print(w, err, cfg) + return w.String() +} + +// String generates a short message from a given Error. +func String(err Error) string { + w := &strings.Builder{} + writeErr(w, err) + return w.String() +} + +func writeErr(w io.Writer, err Error) { + if path := strings.Join(err.Path(), "."); path != "" { + _, _ = io.WriteString(w, path) + _, _ = io.WriteString(w, ": ") + } + + for { + u := xerrors.Unwrap(err) + + printed := false + msg, args := err.Msg() + if msg != "" || u == nil { // print at least something + fmt.Fprintf(w, msg, args...) + printed = true + } + + if u == nil { + break + } + + if printed { + _, _ = io.WriteString(w, ": ") + } + err, _ = u.(Error) + if err == nil { + fmt.Fprint(w, u) + break + } + } +} + +func defaultFprintf(w io.Writer, format string, args ...interface{}) { + fmt.Fprintf(w, format, args...) +} + +func printError(w io.Writer, err error, cfg *Config) { + if err == nil { + return + } + fprintf := cfg.Format + if fprintf == nil { + fprintf = defaultFprintf + } + + positions := []string{} + for _, p := range Positions(err) { + pos := p.Position() + s := pos.Filename + if cfg.Cwd != "" { + if p, err := filepath.Rel(cfg.Cwd, s); err == nil { + s = p + // Some IDEs (e.g. VSCode) only recognize a path if it start + // with a dot. This also helps to distinguish between local + // files and builtin packages. + if !strings.HasPrefix(s, ".") { + s = fmt.Sprintf(".%s%s", string(filepath.Separator), s) + } + } + } + if cfg.ToSlash { + s = filepath.ToSlash(s) + } + if pos.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", pos.Line, pos.Column) + } + if s == "" { + s = "-" + } + positions = append(positions, s) + } + + if e, ok := err.(Error); ok { + writeErr(w, e) + } else { + fprintf(w, "%v", err) + } + + if len(positions) == 0 { + fprintf(w, "\n") + return + } + + fprintf(w, ":\n") + for _, pos := range positions { + fprintf(w, " %s\n", pos) + } +} diff --git a/vendor/cuelang.org/go/cue/format/format.go b/vendor/cuelang.org/go/cue/format/format.go new file mode 100644 index 000000000..b3c3c695e --- /dev/null +++ b/vendor/cuelang.org/go/cue/format/format.go @@ -0,0 +1,344 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package format implements standard formatting of CUE configurations. +package format // import "cuelang.org/go/cue/format" + +// TODO: this package is in need of a rewrite. When doing so, the API should +// allow for reformatting an AST, without actually writing bytes. +// +// In essence, formatting determines the relative spacing to tokens. It should +// be possible to have an abstract implementation providing such information +// that can be used to either format or update an AST in a single walk. + +import ( + "bytes" + "fmt" + "strings" + "text/tabwriter" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" +) + +// An Option sets behavior of the formatter. +type Option func(c *config) + +// Simplify allows the formatter to simplify output, such as removing +// unnecessary quotes. +func Simplify() Option { + return func(c *config) { c.simplify = true } +} + +// UseSpaces specifies that tabs should be converted to spaces and sets the +// default tab width. +func UseSpaces(tabwidth int) Option { + return func(c *config) { + c.UseSpaces = true + c.Tabwidth = tabwidth + } +} + +// TabIndent specifies whether to use tabs for indentation independent of +// UseSpaces. +func TabIndent(indent bool) Option { + return func(c *config) { c.TabIndent = indent } +} + +// TODO: make public +// sortImportsOption causes import declarations to be sorted. +func sortImportsOption() Option { + return func(c *config) { c.sortImports = true } +} + +// TODO: other options: +// +// const ( +// RawFormat Mode = 1 << iota // do not use a tabwriter; if set, UseSpaces is ignored +// TabIndent // use tabs for indentation independent of UseSpaces +// UseSpaces // use spaces instead of tabs for alignment +// SourcePos // emit //line comments to preserve original source positions +// ) + +// Node formats node in canonical cue fmt style and writes the result to dst. +// +// The node type must be *ast.File, []syntax.Decl, syntax.Expr, syntax.Decl, or +// syntax.Spec. Node does not modify node. Imports are not sorted for nodes +// representing partial source files (for instance, if the node is not an +// *ast.File). +// +// The function may return early (before the entire result is written) and +// return a formatting error, for instance due to an incorrect AST. +// +func Node(node ast.Node, opt ...Option) ([]byte, error) { + cfg := newConfig(opt) + return cfg.fprint(node) +} + +// Source formats src in canonical cue fmt style and returns the result or an +// (I/O or syntax) error. src is expected to be a syntactically correct CUE +// source file, or a list of CUE declarations or statements. +// +// If src is a partial source file, the leading and trailing space of src is +// applied to the result (such that it has the same leading and trailing space +// as src), and the result is indented by the same amount as the first line of +// src containing code. Imports are not sorted for partial source files. +// +// Caution: Tools relying on consistent formatting based on the installed +// version of cue (for instance, such as for presubmit checks) should execute +// that cue binary instead of calling Source. +// +func Source(b []byte, opt ...Option) ([]byte, error) { + cfg := newConfig(opt) + + f, err := parser.ParseFile("", b, parser.ParseComments) + if err != nil { + return nil, fmt.Errorf("parse: %s", err) + } + + // print AST + return cfg.fprint(f) +} + +type config struct { + UseSpaces bool + TabIndent bool + Tabwidth int // default: 4 + Indent int // default: 0 (all code is indented at least by this much) + + simplify bool + sortImports bool +} + +func newConfig(opt []Option) *config { + cfg := &config{ + Tabwidth: 8, + TabIndent: true, + UseSpaces: true, + } + for _, o := range opt { + o(cfg) + } + return cfg +} + +// Config defines the output of Fprint. +func (cfg *config) fprint(node interface{}) (out []byte, err error) { + var p printer + p.init(cfg) + if err = printNode(node, &p); err != nil { + return p.output, err + } + + padchar := byte('\t') + if cfg.UseSpaces { + padchar = byte(' ') + } + + twmode := tabwriter.StripEscape | tabwriter.TabIndent | tabwriter.DiscardEmptyColumns + if cfg.TabIndent { + twmode |= tabwriter.TabIndent + } + + buf := &bytes.Buffer{} + tw := tabwriter.NewWriter(buf, 0, cfg.Tabwidth, 1, padchar, twmode) + + // write printer result via tabwriter/trimmer to output + if _, err = tw.Write(p.output); err != nil { + return + } + + err = tw.Flush() + if err != nil { + return buf.Bytes(), err + } + + b := buf.Bytes() + if !cfg.TabIndent { + b = bytes.ReplaceAll(b, []byte{'\t'}, bytes.Repeat([]byte{' '}, cfg.Tabwidth)) + } + return b, nil +} + +// A formatter walks a syntax.Node, interspersed with comments and spacing +// directives, in the order that they would occur in printed form. +type formatter struct { + *printer + + stack []frame + current frame + nestExpr int +} + +func newFormatter(p *printer) *formatter { + f := &formatter{ + printer: p, + current: frame{ + settings: settings{ + nodeSep: newline, + parentSep: newline, + }, + }, + } + return f +} + +type whiteSpace int + +const ( + ignore whiteSpace = 0 + + // write a space, or disallow it + blank whiteSpace = 1 << iota + vtab // column marker + noblank + + nooverride + + comma // print a comma, unless trailcomma overrides it + trailcomma // print a trailing comma unless closed on same line + declcomma // write a comma when not at the end of line + + newline // write a line in a table + formfeed // next line is not part of the table + newsection // add two newlines + + indent // request indent an extra level after the next newline + unindent // unindent a level after the next newline + indented // element was indented. +) + +type frame struct { + cg []*ast.CommentGroup + pos int8 + + settings +} + +type settings struct { + // separator is blank if the current node spans a single line and newline + // otherwise. + nodeSep whiteSpace + parentSep whiteSpace + override whiteSpace +} + +// suppress spurious linter warning: field is actually used. +func init() { + s := settings{} + _ = s.override +} + +func (f *formatter) print(a ...interface{}) { + for _, x := range a { + f.Print(x) + switch x.(type) { + case string, token.Token: // , *syntax.BasicLit, *syntax.Ident: + f.current.pos++ + } + } + f.visitComments(f.current.pos) +} + +func (f *formatter) formfeed() whiteSpace { + if f.current.nodeSep == blank { + return blank + } + return formfeed +} + +func (f *formatter) wsOverride(def whiteSpace) whiteSpace { + if f.current.override == ignore { + return def + } + return f.current.override +} + +func (f *formatter) onOneLine(node ast.Node) bool { + a := node.Pos() + b := node.End() + if a.IsValid() && b.IsValid() { + return f.lineFor(a) == f.lineFor(b) + } + // TODO: walk and look at relative positions to determine the same? + return false +} + +func (f *formatter) before(node ast.Node) bool { + f.stack = append(f.stack, f.current) + f.current = frame{settings: f.current.settings} + f.current.parentSep = f.current.nodeSep + + if node != nil { + s, ok := node.(*ast.StructLit) + if ok && len(s.Elts) <= 1 && f.current.nodeSep != blank && f.onOneLine(node) { + f.current.nodeSep = blank + } + f.current.cg = node.Comments() + f.visitComments(f.current.pos) + return true + } + return false +} + +func (f *formatter) after(node ast.Node) { + f.visitComments(127) + p := len(f.stack) - 1 + f.current = f.stack[p] + f.stack = f.stack[:p] + f.current.pos++ + f.visitComments(f.current.pos) +} + +func (f *formatter) visitComments(until int8) { + c := &f.current + + printed := false + for ; len(c.cg) > 0 && c.cg[0].Position <= until; c.cg = c.cg[1:] { + if printed { + f.Print(newsection) + } + printed = true + f.printComment(c.cg[0]) + } +} + +func (f *formatter) printComment(cg *ast.CommentGroup) { + f.Print(cg) + + printBlank := false + if cg.Doc && len(f.output) > 0 { + f.Print(newline) + printBlank = true + } + for _, c := range cg.List { + isEnd := strings.HasPrefix(c.Text, "//") + if !printBlank { + if isEnd { + f.Print(vtab) + } else { + f.Print(blank) + } + } + f.Print(c.Slash) + f.Print(c) + if isEnd { + f.Print(newline) + if cg.Doc { + f.Print(nooverride) + } + } + } +} diff --git a/vendor/cuelang.org/go/cue/format/import.go b/vendor/cuelang.org/go/cue/format/import.go new file mode 100644 index 000000000..873de2c7f --- /dev/null +++ b/vendor/cuelang.org/go/cue/format/import.go @@ -0,0 +1,167 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package format + +import ( + "sort" + "strconv" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +// sortImports sorts runs of consecutive import lines in import blocks in f. +// It also removes duplicate imports when it is possible to do so without data +// loss. +func sortImports(d *ast.ImportDecl) { + if !d.Lparen.IsValid() || len(d.Specs) == 0 { + // Not a block: sorted by default. + return + } + + // Identify and sort runs of specs on successive lines. + i := 0 + specs := d.Specs[:0] + for j, s := range d.Specs { + if j > i && (s.Pos().RelPos() >= token.NewSection || hasDoc(s)) { + setRelativePos(s, token.Newline) + // j begins a new run. End this one. + block := sortSpecs(d.Specs[i:j]) + specs = append(specs, block...) + i = j + } + } + specs = append(specs, sortSpecs(d.Specs[i:])...) + setRelativePos(specs[0], token.Newline) + d.Specs = specs +} + +func setRelativePos(s *ast.ImportSpec, r token.RelPos) { + if hasDoc(s) { + return + } + pos := s.Pos().WithRel(r) + if s.Name != nil { + s.Name.NamePos = pos + } else { + s.Path.ValuePos = pos + } +} + +func hasDoc(s *ast.ImportSpec) bool { + for _, doc := range s.Comments() { + if doc.Doc { + return true + } + } + return false +} + +func importPath(s *ast.ImportSpec) string { + t, err := strconv.Unquote(s.Path.Value) + if err == nil { + return t + } + return "" +} + +func importName(s *ast.ImportSpec) string { + n := s.Name + if n == nil { + return "" + } + return n.Name +} + +func importComment(s *ast.ImportSpec) string { + for _, c := range s.Comments() { + if c.Line { + return c.Text() + } + } + return "" +} + +// collapse indicates whether prev may be removed, leaving only next. +func collapse(prev, next *ast.ImportSpec) bool { + if importPath(next) != importPath(prev) || importName(next) != importName(prev) { + return false + } + for _, c := range prev.Comments() { + if !c.Doc { + return false + } + } + return true +} + +type posSpan struct { + Start token.Pos + End token.Pos +} + +func sortSpecs(specs []*ast.ImportSpec) []*ast.ImportSpec { + // Can't short-circuit here even if specs are already sorted, + // since they might yet need deduplication. + // A lone import, however, may be safely ignored. + if len(specs) <= 1 { + setRelativePos(specs[0], token.NewSection) + return specs + } + + // Record positions for specs. + pos := make([]posSpan, len(specs)) + for i, s := range specs { + pos[i] = posSpan{s.Pos(), s.End()} + } + + // Sort the import specs by import path. + // Remove duplicates, when possible without data loss. + // Reassign the import paths to have the same position sequence. + // Reassign each comment to abut the end of its spec. + // Sort the comments by new position. + sort.Sort(byImportSpec(specs)) + + // Dedup. Thanks to our sorting, we can just consider + // adjacent pairs of imports. + deduped := specs[:0] + for i, s := range specs { + if i == len(specs)-1 || !collapse(s, specs[i+1]) { + deduped = append(deduped, s) + } + } + specs = deduped + + setRelativePos(specs[0], token.NewSection) + return specs +} + +type byImportSpec []*ast.ImportSpec + +func (x byImportSpec) Len() int { return len(x) } +func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x byImportSpec) Less(i, j int) bool { + ipath := importPath(x[i]) + jpath := importPath(x[j]) + if ipath != jpath { + return ipath < jpath + } + iname := importName(x[i]) + jname := importName(x[j]) + if iname != jname { + return iname < jname + } + return importComment(x[i]) < importComment(x[j]) +} diff --git a/vendor/cuelang.org/go/cue/format/node.go b/vendor/cuelang.org/go/cue/format/node.go new file mode 100644 index 000000000..1156c9bff --- /dev/null +++ b/vendor/cuelang.org/go/cue/format/node.go @@ -0,0 +1,932 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package format + +import ( + "fmt" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/scanner" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +func printNode(node interface{}, f *printer) error { + s := newFormatter(f) + + ls := labelSimplifier{scope: map[string]bool{}} + + // format node + f.allowed = nooverride // gobble initial whitespace. + switch x := node.(type) { + case *ast.File: + if f.cfg.simplify { + ls.markReferences(x) + } + s.file(x) + case ast.Expr: + if f.cfg.simplify { + ls.markReferences(x) + } + s.expr(x) + case ast.Decl: + if f.cfg.simplify { + ls.markReferences(x) + } + s.decl(x) + // case ast.Node: // TODO: do we need this? + // s.walk(x) + case []ast.Decl: + if f.cfg.simplify { + ls.processDecls(x) + } + s.walkDeclList(x) + default: + goto unsupported + } + + return s.errs + +unsupported: + return fmt.Errorf("cue/format: unsupported node type %T", node) +} + +func isRegularField(tok token.Token) bool { + return tok == token.ILLEGAL || tok == token.COLON +} + +// Helper functions for common node lists. They may be empty. + +func nestDepth(f *ast.Field) int { + d := 1 + if s, ok := f.Value.(*ast.StructLit); ok { + switch { + case len(s.Elts) != 1: + d = 0 + default: + if f, ok := s.Elts[0].(*ast.Field); ok { + d += nestDepth(f) + } + } + } + return d +} + +// TODO: be more accurate and move to astutil +func hasDocComments(d ast.Decl) bool { + if len(d.Comments()) > 0 { + return true + } + switch x := d.(type) { + case *ast.Field: + return len(x.Label.Comments()) > 0 + case *ast.Alias: + return len(x.Ident.Comments()) > 0 + case *ast.LetClause: + return len(x.Ident.Comments()) > 0 + } + return false +} + +func (f *formatter) walkDeclList(list []ast.Decl) { + f.before(nil) + d := 0 + hasEllipsis := false + for i, x := range list { + if i > 0 { + f.print(declcomma) + nd := 0 + if f, ok := x.(*ast.Field); ok { + nd = nestDepth(f) + } + if f.current.parentSep == newline && (d == 0 || nd != d) { + f.print(f.formfeed()) + } + if hasDocComments(x) { + switch x := list[i-1].(type) { + case *ast.Field: + if x.Token == token.ISA || internal.IsDefinition(x.Label) { + f.print(newsection) + } + + default: + f.print(newsection) + } + } + } + if f.printer.cfg.simplify && internal.IsEllipsis(x) { + hasEllipsis = true + continue + } + f.decl(x) + d = 0 + if f, ok := x.(*ast.Field); ok { + d = nestDepth(f) + } + if j := i + 1; j < len(list) { + switch x := list[j].(type) { + case *ast.Field: + switch x := x.Value.(type) { + case *ast.StructLit: + // TODO: not entirely correct: could have multiple elements, + // not have a valid Lbrace, and be marked multiline. This + // cannot occur for ASTs resulting from a parse, though. + if x.Lbrace.IsValid() || len(x.Elts) != 1 { + f.print(f.formfeed()) + continue + } + case *ast.ListLit: + f.print(f.formfeed()) + continue + } + } + } + f.print(f.current.parentSep) + } + if hasEllipsis { + f.decl(&ast.Ellipsis{}) + f.print(f.current.parentSep) + } + f.after(nil) +} + +func (f *formatter) walkSpecList(list []*ast.ImportSpec) { + f.before(nil) + for _, x := range list { + f.before(x) + f.importSpec(x) + f.after(x) + } + f.after(nil) +} + +func (f *formatter) walkClauseList(list []ast.Clause, ws whiteSpace) { + f.before(nil) + for _, x := range list { + f.before(x) + f.print(ws) + f.clause(x) + f.after(x) + } + f.after(nil) +} + +func (f *formatter) walkListElems(list []ast.Expr) { + f.before(nil) + for _, x := range list { + f.before(x) + switch n := x.(type) { + case *ast.Comprehension: + f.walkClauseList(n.Clauses, blank) + f.print(blank, nooverride) + f.expr(n.Value) + + case *ast.Ellipsis: + f.ellipsis(n) + + case *ast.Alias: + f.expr(n.Ident) + f.print(n.Equal, token.BIND) + f.expr(n.Expr) + + // TODO: ast.CommentGroup: allows comment groups in ListLits. + + case ast.Expr: + f.exprRaw(n, token.LowestPrec, 1) + } + f.print(comma, blank) + f.after(x) + } + f.after(nil) +} + +func (f *formatter) walkArgsList(list []ast.Expr, depth int) { + f.before(nil) + for _, x := range list { + f.before(x) + f.exprRaw(x, token.LowestPrec, depth) + f.print(comma, blank) + f.after(x) + } + f.after(nil) +} + +func (f *formatter) file(file *ast.File) { + f.before(file) + f.walkDeclList(file.Decls) + f.after(file) + f.print(token.EOF) +} + +func (f *formatter) inlineField(n *ast.Field) *ast.Field { + regular := internal.IsRegularField(n) + // shortcut single-element structs. + // If the label has a valid position, we assume that an unspecified + // Lbrace signals the intend to collapse fields. + if !n.Label.Pos().IsValid() && !(f.printer.cfg.simplify && regular) { + return nil + } + + obj, ok := n.Value.(*ast.StructLit) + if !ok || len(obj.Elts) != 1 || + (obj.Lbrace.IsValid() && !f.printer.cfg.simplify) || + (obj.Lbrace.IsValid() && hasDocComments(n)) || + len(n.Attrs) > 0 { + return nil + } + + mem, ok := obj.Elts[0].(*ast.Field) + if !ok || len(mem.Attrs) > 0 { + return nil + } + + if hasDocComments(mem) { + // TODO: this inserts curly braces even in spaces where this + // may not be desirable, such as: + // a: + // // foo + // b: 3 + return nil + } + return mem +} + +func (f *formatter) decl(decl ast.Decl) { + if decl == nil { + return + } + defer f.after(decl) + if !f.before(decl) { + return + } + + switch n := decl.(type) { + case *ast.Field: + f.label(n.Label, n.Optional != token.NoPos) + + regular := isRegularField(n.Token) + if regular { + f.print(noblank, nooverride, n.TokenPos, token.COLON) + } else { + f.print(blank, nooverride, n.Token) + } + + if mem := f.inlineField(n); mem != nil { + switch { + default: + fallthrough + + case regular && f.cfg.simplify: + f.print(blank, nooverride) + f.decl(mem) + + case mem.Label.Pos().IsNewline(): + f.print(indent, formfeed) + f.decl(mem) + f.indent-- + } + return + } + + nextFF := f.nextNeedsFormfeed(n.Value) + tab := vtab + if nextFF { + tab = blank + } + + f.print(tab) + + if n.Value != nil { + switch n.Value.(type) { + case *ast.ListComprehension, *ast.ListLit, *ast.StructLit: + f.expr(n.Value) + default: + f.print(indent) + f.expr(n.Value) + f.markUnindentLine() + } + } else { + f.current.pos++ + f.visitComments(f.current.pos) + } + + space := tab + for _, a := range n.Attrs { + if f.before(a) { + f.print(space, a.At, a) + } + f.after(a) + space = blank + } + + if nextFF { + f.print(formfeed) + } + + case *ast.BadDecl: + f.print(n.From, "*bad decl*", declcomma) + + case *ast.Package: + f.print(n.PackagePos, "package") + f.print(blank, n.Name, newsection, nooverride) + + case *ast.ImportDecl: + f.print(n.Import, "import") + if len(n.Specs) == 0 { + f.print(blank, n.Lparen, token.LPAREN, n.Rparen, token.RPAREN, newline) + break + } + switch { + case len(n.Specs) == 1 && len(n.Specs[0].Comments()) == 0: + if !n.Lparen.IsValid() { + f.print(blank) + f.walkSpecList(n.Specs) + break + } + fallthrough + default: + f.print(blank, n.Lparen, token.LPAREN, newline, indent) + f.walkSpecList(n.Specs) + f.print(unindent, newline, n.Rparen, token.RPAREN, newline) + } + f.print(newsection, nooverride) + + case *ast.LetClause: + if !decl.Pos().HasRelPos() || decl.Pos().RelPos() >= token.Newline { + f.print(formfeed) + } + f.print(n.Let, token.LET, blank, nooverride) + f.expr(n.Ident) + f.print(blank, nooverride, n.Equal, token.BIND, blank) + f.expr(n.Expr) + f.print(declcomma) // implied + + case *ast.EmbedDecl: + if !n.Pos().HasRelPos() || n.Pos().RelPos() >= token.Newline { + f.print(formfeed) + } + f.expr(n.Expr) + f.print(newline, noblank) + + case *ast.Attribute: + f.print(n.At, n) + + case *ast.CommentGroup: + f.printComment(n) + f.print(newsection) + + case ast.Expr: + f.embedding(n) + } +} + +func (f *formatter) embedding(decl ast.Expr) { + switch n := decl.(type) { + case *ast.Comprehension: + if !n.Pos().HasRelPos() || n.Pos().RelPos() >= token.Newline { + f.print(formfeed) + } + f.walkClauseList(n.Clauses, blank) + f.print(blank, nooverride) + f.expr(n.Value) + + case *ast.Ellipsis: + f.ellipsis(n) + + case *ast.Alias: + if !decl.Pos().HasRelPos() || decl.Pos().RelPos() >= token.Newline { + f.print(formfeed) + } + f.expr(n.Ident) + f.print(blank, n.Equal, token.BIND, blank) + f.expr(n.Expr) + f.print(declcomma) // implied + + // TODO: ast.CommentGroup: allows comment groups in ListLits. + + case ast.Expr: + f.exprRaw(n, token.LowestPrec, 1) + } +} + +func (f *formatter) nextNeedsFormfeed(n ast.Expr) bool { + switch x := n.(type) { + case *ast.StructLit: + return true + case *ast.BasicLit: + return strings.IndexByte(x.Value, '\n') >= 0 + case *ast.ListLit: + return true + } + return false +} + +func (f *formatter) importSpec(x *ast.ImportSpec) { + if x.Name != nil { + f.label(x.Name, false) + f.print(blank) + } else { + f.current.pos++ + f.visitComments(f.current.pos) + } + f.expr(x.Path) + f.print(newline) +} + +func isValidIdent(ident string) bool { + var scan scanner.Scanner + scan.Init(token.NewFile("check", -1, len(ident)), []byte(ident), nil, 0) + + _, tok, lit := scan.Scan() + if tok == token.IDENT || tok.IsKeyword() { + return lit == ident + } + return false +} + +func (f *formatter) label(l ast.Label, optional bool) { + f.before(l) + defer f.after(l) + switch n := l.(type) { + case *ast.Alias: + f.expr(n) + + case *ast.Ident: + // Escape an identifier that has invalid characters. This may happen, + // if the AST is not generated by the parser. + name := n.Name + if !ast.IsValidIdent(name) { + name = literal.String.Quote(n.Name) + } + f.print(n.NamePos, name) + + case *ast.BasicLit: + str := n.Value + // Allow any CUE string in the AST, but ensure it is formatted + // according to spec. + if strings.HasPrefix(str, `"""`) || strings.HasPrefix(str, "#") { + if u, err := literal.Unquote(str); err == nil { + str = literal.String.Quote(u) + } + } + f.print(n.ValuePos, str) + + case *ast.TemplateLabel: + f.print(n.Langle, token.LSS, indent) + f.label(n.Ident, false) + f.print(unindent, n.Rangle, token.GTR) + + case *ast.ListLit: + f.expr(n) + + case *ast.Interpolation: + f.expr(n) + + default: + panic(fmt.Sprintf("unknown label type %T", n)) + } + if optional { + f.print(token.OPTION) + } +} + +func (f *formatter) ellipsis(x *ast.Ellipsis) { + f.print(x.Ellipsis, token.ELLIPSIS) + if x.Type != nil && !isTop(x.Type) { + f.expr(x.Type) + } +} + +func (f *formatter) expr(x ast.Expr) { + const depth = 1 + f.expr1(x, token.LowestPrec, depth) +} + +func (f *formatter) expr0(x ast.Expr, depth int) { + f.expr1(x, token.LowestPrec, depth) +} + +func (f *formatter) expr1(expr ast.Expr, prec1, depth int) { + if f.before(expr) { + f.exprRaw(expr, prec1, depth) + } + f.after(expr) +} + +func (f *formatter) exprRaw(expr ast.Expr, prec1, depth int) { + + switch x := expr.(type) { + case *ast.BadExpr: + f.print(x.From, "_|_") + + case *ast.BottomLit: + f.print(x.Bottom, token.BOTTOM) + + case *ast.Alias: + // Aliases in expression positions are printed in short form. + f.label(x.Ident, false) + f.print(x.Equal, token.BIND) + f.expr(x.Expr) + + case *ast.Ident: + f.print(x.NamePos, x) + + case *ast.BinaryExpr: + if depth < 1 { + f.internalError("depth < 1:", depth) + depth = 1 + } + f.binaryExpr(x, prec1, cutoff(x, depth), depth) + + case *ast.UnaryExpr: + const prec = token.UnaryPrec + if prec < prec1 { + // parenthesis needed + f.print(token.LPAREN, nooverride) + f.expr(x) + f.print(token.RPAREN) + } else { + // no parenthesis needed + f.print(x.OpPos, x.Op, nooverride) + f.expr1(x.X, prec, depth) + } + + case *ast.BasicLit: + f.print(x.ValuePos, x) + + case *ast.Interpolation: + f.before(nil) + for _, x := range x.Elts { + f.expr0(x, depth+1) + } + f.after(nil) + + case *ast.ParenExpr: + if _, hasParens := x.X.(*ast.ParenExpr); hasParens { + // don't print parentheses around an already parenthesized expression + // TODO: consider making this more general and incorporate precedence levels + f.expr0(x.X, depth) + } else { + f.print(x.Lparen, token.LPAREN) + f.expr0(x.X, reduceDepth(depth)) // parentheses undo one level of depth + f.print(x.Rparen, token.RPAREN) + } + + case *ast.SelectorExpr: + f.selectorExpr(x, depth) + + case *ast.IndexExpr: + f.expr1(x.X, token.HighestPrec, 1) + f.print(x.Lbrack, token.LBRACK) + f.expr0(x.Index, depth+1) + f.print(x.Rbrack, token.RBRACK) + + case *ast.SliceExpr: + f.expr1(x.X, token.HighestPrec, 1) + f.print(x.Lbrack, token.LBRACK) + indices := []ast.Expr{x.Low, x.High} + for i, y := range indices { + if i > 0 { + // blanks around ":" if both sides exist and either side is a binary expression + x := indices[i-1] + if depth <= 1 && x != nil && y != nil && (isBinary(x) || isBinary(y)) { + f.print(blank, token.COLON, blank) + } else { + f.print(token.COLON) + } + } + if y != nil { + f.expr0(y, depth+1) + } + } + f.print(x.Rbrack, token.RBRACK) + + case *ast.CallExpr: + if len(x.Args) > 1 { + depth++ + } + wasIndented := f.possibleSelectorExpr(x.Fun, token.HighestPrec, depth) + f.print(x.Lparen, token.LPAREN) + f.walkArgsList(x.Args, depth) + f.print(trailcomma, noblank, x.Rparen, token.RPAREN) + if wasIndented { + f.print(unindent) + } + + case *ast.StructLit: + var l line + ws := noblank + ff := f.formfeed() + + switch { + case len(x.Elts) == 0: + if !x.Rbrace.HasRelPos() { + // collapse curly braces if the body is empty. + ffAlt := blank | nooverride + for _, c := range x.Comments() { + if c.Position == 1 { + ffAlt = ff + } + } + ff = ffAlt + } + case !x.Rbrace.HasRelPos() || !x.Elts[0].Pos().HasRelPos(): + ws |= newline | nooverride + } + f.print(x.Lbrace, token.LBRACE, &l, ws, ff, indent) + + f.walkDeclList(x.Elts) + f.matchUnindent() + + ws = noblank + if f.lineout != l { + ws |= newline + if f.lastTok != token.RBRACE && f.lastTok != token.RBRACK { + ws |= nooverride + } + } + f.print(ws, x.Rbrace, token.RBRACE) + + case *ast.ListLit: + f.print(x.Lbrack, token.LBRACK, indent) + f.walkListElems(x.Elts) + f.print(trailcomma, noblank) + f.visitComments(f.current.pos) + f.matchUnindent() + f.print(noblank, x.Rbrack, token.RBRACK) + + case *ast.Ellipsis: + f.ellipsis(x) + + case *ast.ListComprehension: + f.print(x.Lbrack, token.LBRACK, blank, indent) + f.print(blank) + f.walkClauseList(x.Clauses, blank) + f.print(blank, nooverride) + if _, ok := x.Expr.(*ast.StructLit); ok { + f.expr(x.Expr) + } else { + f.print(token.LBRACE, blank) + f.expr(x.Expr) + f.print(blank, token.RBRACE) + } + f.print(unindent, f.wsOverride(blank), x.Rbrack, token.RBRACK) + + default: + panic(fmt.Sprintf("unimplemented type %T", x)) + } +} + +func (f *formatter) clause(clause ast.Clause) { + switch n := clause.(type) { + case *ast.ForClause: + f.print(n.For, "for", blank) + f.print(indent) + if n.Key != nil { + f.label(n.Key, false) + f.print(n.Colon, token.COMMA, blank) + } else { + f.current.pos++ + f.visitComments(f.current.pos) + } + f.label(n.Value, false) + f.print(blank, n.In, "in", blank) + f.expr(n.Source) + f.markUnindentLine() + + case *ast.IfClause: + f.print(n.If, "if", blank) + f.print(indent) + f.expr(n.Condition) + f.markUnindentLine() + + case *ast.LetClause: + f.print(n.Let, token.LET, blank, nooverride) + f.print(indent) + f.expr(n.Ident) + f.print(blank, nooverride, n.Equal, token.BIND, blank) + f.expr(n.Expr) + f.markUnindentLine() + + default: + panic("unknown clause type") + } +} + +func walkBinary(e *ast.BinaryExpr) (has6, has7, has8 bool, maxProblem int) { + switch e.Op.Precedence() { + case 6: + has6 = true + case 7: + has7 = true + case 8: + has8 = true + } + + switch l := e.X.(type) { + case *ast.BinaryExpr: + if l.Op.Precedence() < e.Op.Precedence() { + // parens will be inserted. + // pretend this is an *syntax.ParenExpr and do nothing. + break + } + h6, h7, h8, mp := walkBinary(l) + has6 = has6 || h6 + has7 = has7 || h7 + has8 = has8 || h8 + if maxProblem < mp { + maxProblem = mp + } + } + + switch r := e.Y.(type) { + case *ast.BinaryExpr: + if r.Op.Precedence() <= e.Op.Precedence() { + // parens will be inserted. + // pretend this is an *syntax.ParenExpr and do nothing. + break + } + h6, h7, h8, mp := walkBinary(r) + has6 = has6 || h6 + has7 = has7 || h7 + has8 = has8 || h8 + if maxProblem < mp { + maxProblem = mp + } + + case *ast.UnaryExpr: + switch e.Op.String() + r.Op.String() { + case "/*": + maxProblem = 8 + case "++", "--": + if maxProblem < 6 { + maxProblem = 6 + } + } + } + return +} + +func cutoff(e *ast.BinaryExpr, depth int) int { + has6, has7, has8, maxProblem := walkBinary(e) + if maxProblem > 0 { + return maxProblem + 1 + } + if (has6 || has7) && has8 { + if depth == 1 { + return 8 + } + if has7 { + return 7 + } + return 6 + } + if has6 && has7 { + if depth == 1 { + return 7 + } + return 6 + } + if depth == 1 { + return 8 + } + return 6 +} + +func diffPrec(expr ast.Expr, prec int) int { + x, ok := expr.(*ast.BinaryExpr) + if !ok || prec != x.Op.Precedence() { + return 1 + } + return 0 +} + +func reduceDepth(depth int) int { + depth-- + if depth < 1 { + depth = 1 + } + return depth +} + +// Format the binary expression: decide the cutoff and then format. +// Let's call depth == 1 Normal mode, and depth > 1 Compact mode. +// (Algorithm suggestion by Russ Cox.) +// +// The precedences are: +// 7 * / % quo rem div mod +// 6 + - +// 5 == != < <= > >= +// 4 && +// 3 || +// 2 & +// 1 | +// +// The only decision is whether there will be spaces around levels 6 and 7. +// There are never spaces at level 8 (unary), and always spaces at levels 5 and below. +// +// To choose the cutoff, look at the whole expression but excluding primary +// expressions (function calls, parenthesized exprs), and apply these rules: +// +// 1) If there is a binary operator with a right side unary operand +// that would clash without a space, the cutoff must be (in order): +// +// /* 8 +// ++ 7 // not necessary, but to avoid confusion +// -- 7 +// +// (Comparison operators always have spaces around them.) +// +// 2) If there is a mix of level 7 and level 6 operators, then the cutoff +// is 7 (use spaces to distinguish precedence) in Normal mode +// and 6 (never use spaces) in Compact mode. +// +// 3) If there are no level 6 operators or no level 7 operators, then the +// cutoff is 8 (always use spaces) in Normal mode +// and 6 (never use spaces) in Compact mode. +// +func (f *formatter) binaryExpr(x *ast.BinaryExpr, prec1, cutoff, depth int) { + f.nestExpr++ + defer func() { f.nestExpr-- }() + + prec := x.Op.Precedence() + if prec < prec1 { + // parenthesis needed + // Note: The parser inserts an syntax.ParenExpr node; thus this case + // can only occur if the AST is created in a different way. + // defer p.pushComment(nil).pop() + f.print(token.LPAREN, nooverride) + f.expr0(x, reduceDepth(depth)) // parentheses undo one level of depth + f.print(token.RPAREN) + return + } + + printBlank := prec < cutoff + + f.expr1(x.X, prec, depth+diffPrec(x.X, prec)) + f.print(nooverride) + if printBlank { + f.print(blank) + } + f.print(x.OpPos, x.Op) + if x.Y.Pos().IsNewline() { + // at least one line break, but respect an extra empty line + // in the source + f.print(formfeed) + printBlank = false // no blank after line break + } else { + f.print(nooverride) + } + if printBlank { + f.print(blank) + } + f.expr1(x.Y, prec+1, depth+1) +} + +func isBinary(expr ast.Expr) bool { + _, ok := expr.(*ast.BinaryExpr) + return ok +} + +func (f *formatter) possibleSelectorExpr(expr ast.Expr, prec1, depth int) bool { + if x, ok := expr.(*ast.SelectorExpr); ok { + return f.selectorExpr(x, depth) + } + f.expr1(expr, prec1, depth) + return false +} + +// selectorExpr handles an *syntax.SelectorExpr node and returns whether x spans +// multiple lines. +func (f *formatter) selectorExpr(x *ast.SelectorExpr, depth int) bool { + f.expr1(x.X, token.HighestPrec, depth) + f.print(token.PERIOD) + if x.Sel.Pos().IsNewline() { + f.print(indent, formfeed) + f.expr(x.Sel.(ast.Expr)) + f.print(unindent) + return true + } + f.print(noblank) + f.expr(x.Sel.(ast.Expr)) + return false +} + +func isTop(e ast.Expr) bool { + ident, ok := e.(*ast.Ident) + return ok && ident.Name == "_" +} diff --git a/vendor/cuelang.org/go/cue/format/printer.go b/vendor/cuelang.org/go/cue/format/printer.go new file mode 100644 index 000000000..b39e016b6 --- /dev/null +++ b/vendor/cuelang.org/go/cue/format/printer.go @@ -0,0 +1,424 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package format + +import ( + "fmt" + "os" + "strings" + "text/tabwriter" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" +) + +// A printer takes the stream of formatting tokens and spacing directives +// produced by the formatter and adjusts the spacing based on the original +// source code. +type printer struct { + cfg *config + + allowed whiteSpace + requested whiteSpace + indentStack []whiteSpace + + pos token.Position // current pos in AST + lineout line + + lastTok token.Token // last token printed (syntax.ILLEGAL if it's whitespace) + + output []byte + indent int + spaceBefore bool + + errs errors.Error +} + +type line int + +func (p *printer) init(cfg *config) { + p.cfg = cfg + p.pos = token.Position{Line: 1, Column: 1} +} + +func (p *printer) errf(n ast.Node, format string, args ...interface{}) { + p.errs = errors.Append(p.errs, errors.Newf(n.Pos(), format, args...)) +} + +const debug = false + +func (p *printer) internalError(msg ...interface{}) { + if debug { + fmt.Print(p.pos.String() + ": ") + fmt.Println(msg...) + panic("go/printer") + } +} + +func (p *printer) lineFor(pos token.Pos) int { + return pos.Line() +} + +func (p *printer) Print(v interface{}) { + var ( + impliedComma = false + isLit bool + data string + nextWS whiteSpace + ) + switch x := v.(type) { + case *line: + *x = p.lineout + + case token.Token: + s := x.String() + before, after := mayCombine(p.lastTok, x) + if before && !p.spaceBefore { + // the previous and the current token must be + // separated by a blank otherwise they combine + // into a different incorrect token sequence + // (except for syntax.INT followed by a '.' this + // should never happen because it is taken care + // of via binary expression formatting) + if p.allowed&blank != 0 { + p.internalError("whitespace buffer not empty") + } + p.allowed |= blank + } + if after { + nextWS = blank + } + data = s + switch x { + case token.EOF: + data = "" + p.allowed = newline + p.allowed &^= newsection + case token.LPAREN, token.LBRACK, token.LBRACE: + case token.RPAREN, token.RBRACK, token.RBRACE: + impliedComma = true + } + p.lastTok = x + + case *ast.BasicLit: + data = x.Value + switch x.Kind { + case token.STRING: + // TODO: only do this when simplifying. Right now this does not + // give the right result, but it should be better if: + // 1) simplification is done as a separate step + // 2) simplified structs are explicitly referenced separately + // in the AST. + if p.indent < 6 { + data = literal.IndentTabs(data, p.indent+1) + } + + case token.INT: + if len(data) > 1 && + data[0] == '0' && + data[1] >= '0' && data[1] <= '9' { + data = "0o" + data[1:] + } + // Pad trailing dot before multiplier. + if p := strings.IndexByte(data, '.'); p >= 0 && data[p+1] > '9' { + data = data[:p+1] + "0" + data[p+1:] + } + // Lowercase E, but only if it is not the last character: in the + // future we may use E for Exa. + if p := strings.IndexByte(data, 'E'); p != -1 && p < len(data)-1 { + data = strings.ToLower(data) + } + + case token.FLOAT: + // Pad leading or trailing dots. + switch p := strings.IndexByte(data, '.'); { + case p < 0: + case p == 0: + data = "0" + data + case p == len(data)-1: + data += "0" + case data[p+1] > '9': + data = data[:p+1] + "0" + data[p+1:] + } + if strings.IndexByte(data, 'E') != -1 { + data = strings.ToLower(data) + } + } + + isLit = true + impliedComma = true + p.lastTok = x.Kind + + case *ast.Ident: + data = x.Name + if !ast.IsValidIdent(data) { + p.errf(x, "invalid identifier %q", x.Name) + data = "*bad identifier*" + } + impliedComma = true + p.lastTok = token.IDENT + + case string: + data = x + impliedComma = true + p.lastTok = token.STRING + + case *ast.CommentGroup: + rel := x.Pos().RelPos() + if x.Line { // TODO: we probably don't need this. + rel = token.Blank + } + switch rel { + case token.NoRelPos: + case token.Newline, token.NewSection: + case token.Blank, token.Elided: + p.allowed |= blank + fallthrough + case token.NoSpace: + p.allowed &^= newline | newsection | formfeed | declcomma + } + return + + case *ast.Attribute: + data = x.Text + impliedComma = true + p.lastTok = token.ATTRIBUTE + + case *ast.Comment: + // TODO: if implied comma, postpone comment + data = x.Text + p.lastTok = token.COMMENT + + case whiteSpace: + p.allowed |= x + return + + case token.Pos: + // TODO: should we use a known file position to synchronize? Go does, + // but we don't really have to. + // pos := x + if x.HasRelPos() { + if p.allowed&nooverride == 0 { + requested := p.allowed + switch x.RelPos() { + case token.NoSpace: + requested &^= newline | newsection | formfeed + case token.Blank: + requested |= blank + requested &^= newline | newsection | formfeed + case token.Newline: + requested |= newline + case token.NewSection: + requested |= newsection + } + p.writeWhitespace(requested) + p.allowed = 0 + p.requested = 0 + } + // p.pos = pos + } + return + + default: + fmt.Fprintf(os.Stderr, "print: unsupported argument %v (%T)\n", x, x) + panic("go/printer type") + } + + p.writeWhitespace(p.allowed) + p.allowed = 0 + p.requested = 0 + p.writeString(data, isLit) + p.allowed = nextWS + _ = impliedComma // TODO: delay comment printings +} + +func (p *printer) writeWhitespace(ws whiteSpace) { + if ws&comma != 0 { + switch { + case ws&(newsection|newline|formfeed) != 0, + ws&trailcomma == 0: + p.writeByte(',', 1) + } + } + if ws&indent != 0 { + p.markLineIndent(ws) + } + if ws&unindent != 0 { + p.markUnindentLine() + } + switch { + case ws&newsection != 0: + p.maybeIndentLine(ws) + p.writeByte('\f', 2) + p.lineout += 2 + p.spaceBefore = true + case ws&formfeed != 0: + p.maybeIndentLine(ws) + p.writeByte('\f', 1) + p.lineout++ + p.spaceBefore = true + case ws&newline != 0: + p.maybeIndentLine(ws) + p.writeByte('\n', 1) + p.lineout++ + p.spaceBefore = true + case ws&declcomma != 0: + p.writeByte(',', 1) + p.writeByte(' ', 1) + p.spaceBefore = true + case ws&noblank != 0: + case ws&vtab != 0: + p.writeByte('\v', 1) + p.spaceBefore = true + case ws&blank != 0: + p.writeByte(' ', 1) + p.spaceBefore = true + } +} + +func (p *printer) markLineIndent(ws whiteSpace) { + p.indentStack = append(p.indentStack, ws) +} + +func (p *printer) markUnindentLine() (wasUnindented bool) { + last := len(p.indentStack) - 1 + if ws := p.indentStack[last]; ws&indented != 0 { + p.indent-- + wasUnindented = true + } + p.indentStack = p.indentStack[:last] + return wasUnindented +} + +func (p *printer) maybeIndentLine(ws whiteSpace) { + if ws&unindent == 0 && len(p.indentStack) > 0 { + last := len(p.indentStack) - 1 + if ws := p.indentStack[last]; ws&indented != 0 || ws&indent == 0 { + return + } + p.indentStack[last] |= indented + p.indent++ + } +} + +func (f *formatter) matchUnindent() whiteSpace { + f.allowed |= unindent + // TODO: make this work. Whitespace from closing bracket should match that + // of opening if there is no position information. + // f.allowed &^= nooverride | newline | newsection | formfeed | blank | noblank + // ws := f.indentStack[len(f.indentStack)-1] + // mask := blank | noblank | vtab + // f.allowed |= unindent | blank | noblank + // if ws&newline != 0 || ws*indented != 0 { + // f.allowed |= newline + // } + return 0 +} + +// writeString writes the string s to p.output and updates p.pos, p.out, +// and p.last. If isLit is set, s is escaped w/ tabwriter.Escape characters +// to protect s from being interpreted by the tabwriter. +// +// Note: writeString is only used to write Go tokens, literals, and +// comments, all of which must be written literally. Thus, it is correct +// to always set isLit = true. However, setting it explicitly only when +// needed (i.e., when we don't know that s contains no tabs or line breaks) +// avoids processing extra escape characters and reduces run time of the +// printer benchmark by up to 10%. +// +func (p *printer) writeString(s string, isLit bool) { + if s != "" { + p.spaceBefore = false + } + + if isLit { + // Protect s such that is passes through the tabwriter + // unchanged. Note that valid Go programs cannot contain + // tabwriter.Escape bytes since they do not appear in legal + // UTF-8 sequences. + p.output = append(p.output, tabwriter.Escape) + } + + p.output = append(p.output, s...) + + if isLit { + p.output = append(p.output, tabwriter.Escape) + } + // update positions + nLines := 0 + var li int // index of last newline; valid if nLines > 0 + for i := 0; i < len(s); i++ { + // CUE tokens cannot contain '\f' - no need to look for it + if s[i] == '\n' { + nLines++ + li = i + } + } + p.pos.Offset += len(s) + if nLines > 0 { + p.pos.Line += nLines + c := len(s) - li + p.pos.Column = c + } else { + p.pos.Column += len(s) + } +} + +func (p *printer) writeByte(ch byte, n int) { + for i := 0; i < n; i++ { + p.output = append(p.output, ch) + } + + // update positions + p.pos.Offset += n + if ch == '\n' || ch == '\f' { + p.pos.Line += n + p.pos.Column = 1 + + n := p.cfg.Indent + p.indent // include base indentation + for i := 0; i < n; i++ { + p.output = append(p.output, '\t') + } + + // update positions + p.pos.Offset += n + p.pos.Column += n + + return + } + p.pos.Column += n +} + +func mayCombine(prev, next token.Token) (before, after bool) { + s := next.String() + if 'a' <= s[0] && s[0] < 'z' { + return true, true + } + switch prev { + case token.IQUO, token.IREM, token.IDIV, token.IMOD: + return false, false + case token.INT: + before = next == token.PERIOD // 1. + case token.ADD: + before = s[0] == '+' // ++ + case token.SUB: + before = s[0] == '-' // -- + case token.QUO: + before = s[0] == '*' // /* + } + return before, false +} diff --git a/vendor/cuelang.org/go/cue/format/simplify.go b/vendor/cuelang.org/go/cue/format/simplify.go new file mode 100644 index 000000000..f4981978c --- /dev/null +++ b/vendor/cuelang.org/go/cue/format/simplify.go @@ -0,0 +1,113 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package format + +import ( + "strconv" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/internal" +) + +// labelSimplifier rewrites string labels to identifiers if +// no identifiers will subsequently bind to the exposed label. +// In other words, string labels are only replaced if this does +// not change the semantics of the CUE code. +type labelSimplifier struct { + parent *labelSimplifier + scope map[string]bool +} + +func (s *labelSimplifier) processDecls(decls []ast.Decl) { + sc := labelSimplifier{parent: s, scope: map[string]bool{}} + for _, d := range decls { + switch x := d.(type) { + case *ast.Field: + ast.Walk(x.Label, sc.markStrings, nil) + } + } + + for _, d := range decls { + switch x := d.(type) { + case *ast.Field: + ast.Walk(x.Value, sc.markReferences, nil) + default: + ast.Walk(x, sc.markReferences, nil) + } + } + + for _, d := range decls { + switch x := d.(type) { + case *ast.Field: + x.Label = astutil.Apply(x.Label, sc.replace, nil).(ast.Label) + } + } +} + +func (s *labelSimplifier) markReferences(n ast.Node) bool { + // Record strings at this level. + switch x := n.(type) { + case *ast.File: + s.processDecls(x.Decls) + return false + + case *ast.StructLit: + s.processDecls(x.Elts) + return false + + case *ast.SelectorExpr: + ast.Walk(x.X, s.markReferences, nil) + return false + + case *ast.Ident: + for c := s; c != nil; c = c.parent { + if _, ok := c.scope[x.Name]; ok { + c.scope[x.Name] = false + break + } + } + } + return true +} + +func (s *labelSimplifier) markStrings(n ast.Node) bool { + switch x := n.(type) { + case *ast.BasicLit: + str, err := strconv.Unquote(x.Value) + if err != nil || !ast.IsValidIdent(str) || internal.IsDefOrHidden(str) { + return false + } + s.scope[str] = true + + case *ast.Ident: + s.scope[x.Name] = true + + case *ast.ListLit, *ast.Interpolation: + return false + } + return true +} + +func (s *labelSimplifier) replace(c astutil.Cursor) bool { + switch x := c.Node().(type) { + case *ast.BasicLit: + str, err := strconv.Unquote(x.Value) + if err == nil && s.scope[str] && !internal.IsDefOrHidden(str) { + c.Replace(ast.NewIdent(str)) + } + } + return true +} diff --git a/vendor/cuelang.org/go/cue/go.go b/vendor/cuelang.org/go/cue/go.go new file mode 100644 index 000000000..1d6717c8c --- /dev/null +++ b/vendor/cuelang.org/go/cue/go.go @@ -0,0 +1,46 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/adt" + "cuelang.org/go/internal/core/convert" + "cuelang.org/go/internal/core/eval" +) + +func init() { + internal.FromGoValue = func(runtime, x interface{}, nilIsTop bool) interface{} { + r := runtime.(*Runtime) + ctx := eval.NewContext(r.index().Runtime, nil) + v := convert.GoValueToValue(ctx, x, nilIsTop) + n := adt.ToVertex(v) + return Value{r.idx, n} + } + + internal.FromGoType = func(runtime, x interface{}) interface{} { + r := runtime.(*Runtime) + ctx := eval.NewContext(r.index().Runtime, nil) + expr, err := convert.GoTypeToExpr(ctx, x) + if err != nil { + expr = &adt.Bottom{Err: err} + } + n := &adt.Vertex{} + n.AddConjunct(adt.MakeRootConjunct(nil, expr)) + return Value{r.idx, n} + + // return convertType(runtime.(*Runtime), x) + } +} diff --git a/vendor/cuelang.org/go/cue/instance.go b/vendor/cuelang.org/go/cue/instance.go new file mode 100644 index 000000000..53bc5d6d0 --- /dev/null +++ b/vendor/cuelang.org/go/cue/instance.go @@ -0,0 +1,424 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/adt" + "cuelang.org/go/internal/core/compile" + "cuelang.org/go/internal/core/convert" + "cuelang.org/go/internal/core/eval" +) + +// An Instance defines a single configuration based on a collection of +// underlying CUE files. +type Instance struct { + *index + + root *adt.Vertex + + ImportPath string + Dir string + PkgName string + DisplayName string + + Incomplete bool // true if Pkg and all its dependencies are free of errors + Err errors.Error // non-nil if the package had errors + + inst *build.Instance + + // complete bool // for cycle detection +} + +func (x *index) addInst(p *Instance) *Instance { + if p.inst == nil { + p.inst = &build.Instance{ + ImportPath: p.ImportPath, + PkgName: p.PkgName, + } + } + // fmt.Println(p.ImportPath, "XXX") + x.AddInst(p.ImportPath, p.root, p.inst) + x.loaded[p.inst] = p + p.index = x + return p +} + +func (x *index) getImportFromBuild(p *build.Instance, v *adt.Vertex) *Instance { + inst := x.loaded[p] + + if inst != nil { + return inst + } + + inst = &Instance{ + ImportPath: p.ImportPath, + Dir: p.Dir, + PkgName: p.PkgName, + DisplayName: p.ImportPath, + root: v, + inst: p, + index: x, + } + if p.Err != nil { + inst.setListOrError(p.Err) + } + + x.loaded[p] = inst + + return inst +} + +func (x *index) getImportFromNode(v *adt.Vertex) *Instance { + p := x.GetInstanceFromNode(v) + if p == nil { + return nil + } + + return x.getImportFromBuild(p, v) +} + +func (x *index) getImportFromPath(id string) *Instance { + node, _ := x.LoadImport(id) + if node == nil { + return nil + } + b := x.GetInstanceFromNode(node) + inst := x.loaded[b] + if inst == nil { + inst = &Instance{ + ImportPath: b.ImportPath, + PkgName: b.PkgName, + root: node, + inst: b, + index: x, + } + } + return inst +} + +func init() { + internal.MakeInstance = func(value interface{}) interface{} { + v := value.(Value) + x := v.eval(v.ctx()) + st, ok := x.(*adt.Vertex) + if !ok { + st = &adt.Vertex{} + st.AddConjunct(adt.MakeRootConjunct(nil, x)) + } + return v.ctx().index.addInst(&Instance{ + root: st, + }) + } +} + +// newInstance creates a new instance. Use Insert to populate the instance. +func newInstance(x *index, p *build.Instance, v *adt.Vertex) *Instance { + // TODO: associate root source with structLit. + inst := &Instance{ + root: v, + inst: p, + } + if p != nil { + inst.ImportPath = p.ImportPath + inst.Dir = p.Dir + inst.PkgName = p.PkgName + inst.DisplayName = p.ImportPath + if p.Err != nil { + inst.setListOrError(p.Err) + } + } + + x.AddInst(p.ImportPath, v, p) + x.loaded[p] = inst + inst.index = x + return inst +} + +func (inst *Instance) setListOrError(err errors.Error) { + inst.Incomplete = true + inst.Err = errors.Append(inst.Err, err) +} + +func (inst *Instance) setError(err errors.Error) { + inst.Incomplete = true + inst.Err = errors.Append(inst.Err, err) +} + +func (inst *Instance) eval(ctx *context) adt.Value { + // TODO: remove manifest here? + v := ctx.manifest(inst.root) + return v +} + +func init() { + internal.EvalExpr = func(value, expr interface{}) interface{} { + v := value.(Value) + e := expr.(ast.Expr) + ctx := v.idx.newContext() + return newValueRoot(ctx, evalExpr(ctx, v.vertex(ctx), e)) + } +} + +// pkgID reports a package path that can never resolve to a valid package. +func pkgID() string { + return "_" +} + +// evalExpr evaluates expr within scope. +func evalExpr(ctx *context, scope *adt.Vertex, expr ast.Expr) adt.Value { + cfg := &compile.Config{ + Scope: scope, + Imports: func(x *ast.Ident) (pkgPath string) { + if !isBuiltin(x.Name) { + return "" + } + return x.Name + }, + } + + c, err := compile.Expr(cfg, ctx.opCtx, pkgID(), expr) + if err != nil { + return &adt.Bottom{Err: err} + } + return adt.Resolve(ctx.opCtx, c) + + // scope.Finalize(ctx.opCtx) // TODO: not appropriate here. + // switch s := scope.Value.(type) { + // case *bottom: + // return s + // case *adt.StructMarker: + // default: + // return ctx.mkErr(scope, "instance is not a struct, found %s", scope.Kind()) + // } + + // c := ctx.opCtx + + // x, err := compile.Expr(&compile.Config{Scope: scope}, c.Runtime, expr) + // if err != nil { + // return c.NewErrf("could not evaluate %s: %v", c.Str(x), err) + // } + + // env := &adt.Environment{Vertex: scope} + + // switch v := x.(type) { + // case adt.Value: + // return v + // case adt.Resolver: + // r, err := c.Resolve(env, v) + // if err != nil { + // return err + // } + // return r + + // case adt.Evaluator: + // e, _ := c.Evaluate(env, x) + // return e + + // } + + // return c.NewErrf("could not evaluate %s", c.Str(x)) +} + +// ID returns the package identifier that uniquely qualifies module and +// package name. +func (inst *Instance) ID() string { + if inst == nil || inst.inst == nil { + return "" + } + return inst.inst.ID() +} + +// Doc returns the package comments for this instance. +func (inst *Instance) Doc() []*ast.CommentGroup { + var docs []*ast.CommentGroup + if inst.inst == nil { + return nil + } + for _, f := range inst.inst.Files { + if c := internal.FileComment(f); c != nil { + docs = append(docs, c) + } + } + return docs +} + +// Value returns the root value of the configuration. If the configuration +// defines in emit value, it will be that value. Otherwise it will be all +// top-level values. +func (inst *Instance) Value() Value { + ctx := inst.newContext() + inst.root.Finalize(ctx.opCtx) + return newVertexRoot(ctx, inst.root) +} + +// Eval evaluates an expression within an existing instance. +// +// Expressions may refer to builtin packages if they can be uniquely identified. +func (inst *Instance) Eval(expr ast.Expr) Value { + ctx := inst.newContext() + v := inst.root + v.Finalize(ctx.opCtx) + result := evalExpr(ctx, v, expr) + return newValueRoot(ctx, result) +} + +// Merge unifies the given instances into a single one. +// +// Deprecated: do not use. +func Merge(inst ...*Instance) *Instance { + v := &adt.Vertex{} + + i := inst[0] + ctx := i.index.newContext().opCtx + + // TODO: interesting test: use actual unification and then on K8s corpus. + + for _, i := range inst { + w := i.Value() + v.AddConjunct(adt.MakeRootConjunct(nil, w.v.ToDataAll())) + } + v.Finalize(ctx) + + p := i.index.addInst(&Instance{ + root: v, + // complete: true, + }) + return p +} + +// Build creates a new instance from the build instances, allowing unbound +// identifier to bind to the top-level field in inst. The top-level fields in +// inst take precedence over predeclared identifier and builtin functions. +func (inst *Instance) Build(p *build.Instance) *Instance { + p.Complete() + + idx := inst.index + r := inst.index.Runtime + + rErr := r.ResolveFiles(p) + + cfg := &compile.Config{Scope: inst.root} + v, err := compile.Files(cfg, r, p.ID(), p.Files...) + + v.AddConjunct(adt.MakeRootConjunct(nil, inst.root)) + + i := newInstance(idx, p, v) + if rErr != nil { + i.setListOrError(rErr) + } + if i.Err != nil { + i.setListOrError(i.Err) + } + + if err != nil { + i.setListOrError(err) + } + + // i.complete = true + + return i +} + +func (inst *Instance) value() Value { + return newVertexRoot(inst.newContext(), inst.root) +} + +// Lookup reports the value at a path starting from the top level struct. The +// Exists method of the returned value will report false if the path did not +// exist. The Err method reports if any error occurred during evaluation. The +// empty path returns the top-level configuration struct. Use LookupDef for definitions or LookupField for +// any kind of field. +func (inst *Instance) Lookup(path ...string) Value { + return inst.value().Lookup(path...) +} + +// LookupDef reports the definition with the given name within struct v. The +// Exists method of the returned value will report false if the definition did +// not exist. The Err method reports if any error occurred during evaluation. +func (inst *Instance) LookupDef(path string) Value { + return inst.value().LookupDef(path) +} + +// LookupField reports a Field at a path starting from v, or an error if the +// path is not. The empty path returns v itself. +// +// It cannot look up hidden or unexported fields. +// +// Deprecated: this API does not work with new-style definitions. Use +// FieldByName defined on inst.Value(). +func (inst *Instance) LookupField(path ...string) (f FieldInfo, err error) { + v := inst.value() + for _, k := range path { + s, err := v.Struct() + if err != nil { + return f, err + } + + f, err = s.FieldByName(k, true) + if err != nil { + return f, err + } + if f.IsHidden { + return f, errNotFound + } + v = f.Value + } + return f, err +} + +// Fill creates a new instance with the values of the old instance unified with +// the given value. It is not possible to update the emit value. +// +// Values may be any Go value that can be converted to CUE, an ast.Expr or +// a Value. In the latter case, it will panic if the Value is not from the same +// Runtime. +func (inst *Instance) Fill(x interface{}, path ...string) (*Instance, error) { + for i := len(path) - 1; i >= 0; i-- { + x = map[string]interface{}{path[i]: x} + } + a := make([]adt.Conjunct, len(inst.root.Conjuncts)) + copy(a, inst.root.Conjuncts) + u := &adt.Vertex{Conjuncts: a} + + if v, ok := x.(Value); ok { + if inst.index != v.idx { + panic("value of type Value is not created with same Runtime as Instance") + } + for _, c := range v.v.Conjuncts { + u.AddConjunct(c) + } + } else { + ctx := eval.NewContext(inst.index.Runtime, nil) + expr := convert.GoValueToExpr(ctx, true, x) + u.AddConjunct(adt.MakeRootConjunct(nil, expr)) + u.Finalize(ctx) + } + inst = inst.index.addInst(&Instance{ + root: u, + inst: nil, + + // Omit ImportPath to indicate this is not an importable package. + Dir: inst.Dir, + PkgName: inst.PkgName, + Incomplete: inst.Incomplete, + + // complete: true, + }) + return inst, nil +} diff --git a/vendor/cuelang.org/go/cue/literal/doc.go b/vendor/cuelang.org/go/cue/literal/doc.go new file mode 100644 index 000000000..3d3095c6c --- /dev/null +++ b/vendor/cuelang.org/go/cue/literal/doc.go @@ -0,0 +1,17 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package literal implements conversions to and from string representations of +// basic data types. +package literal diff --git a/vendor/cuelang.org/go/cue/literal/indent.go b/vendor/cuelang.org/go/cue/literal/indent.go new file mode 100644 index 000000000..193ca3b44 --- /dev/null +++ b/vendor/cuelang.org/go/cue/literal/indent.go @@ -0,0 +1,33 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package literal + +import "strings" + +// IndentTabs takes a quoted string and reindents it for the given indentation. +// If a string is not a multiline string it will return the string as is. +func IndentTabs(s string, n int) string { + indent := tabs(n) + + qi, _, _, err := ParseQuotes(s, s) + if err != nil || !qi.multiline || qi.whitespace == indent { + return s + } + + search := "\n" + qi.whitespace + replace := "\n" + indent + + return strings.ReplaceAll(s, search, replace) +} diff --git a/vendor/cuelang.org/go/cue/literal/num.go b/vendor/cuelang.org/go/cue/literal/num.go new file mode 100644 index 000000000..bb77d5b2f --- /dev/null +++ b/vendor/cuelang.org/go/cue/literal/num.go @@ -0,0 +1,357 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package literal + +import ( + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "github.com/cockroachdb/apd/v2" +) + +var baseContext apd.Context + +func init() { + baseContext = apd.BaseContext + baseContext.Precision = 24 +} + +// NumInfo contains information about a parsed numbers. +// +// Reusing a NumInfo across parses may avoid memory allocations. +type NumInfo struct { + pos token.Pos + src string + p int + ch byte + buf []byte + + mul Multiplier + base byte + neg bool + UseSep bool + isFloat bool + err error +} + +// String returns a canonical string representation of the number so that +// it can be parsed with math.Float.Parse. +func (p *NumInfo) String() string { + if len(p.buf) > 0 && p.base == 10 && p.mul == 0 { + return string(p.buf) + } + var d apd.Decimal + _ = p.decimal(&d) + return d.String() +} + +type decimal = apd.Decimal + +// Decimal is for internal use. +func (p *NumInfo) Decimal(v *decimal) error { + return p.decimal(v) +} + +func (p *NumInfo) decimal(v *apd.Decimal) error { + if p.base != 10 { + _, _, _ = v.SetString("0") + b := p.buf + if p.buf[0] == '-' { + v.Negative = p.neg + b = p.buf[1:] + } + v.Coeff.SetString(string(b), int(p.base)) + return nil + } + _ = v.UnmarshalText(p.buf) + if p.mul != 0 { + _, _ = baseContext.Mul(v, v, mulToRat[p.mul]) + cond, _ := baseContext.RoundToIntegralExact(v, v) + if cond.Inexact() { + return p.errorf("number cannot be represented as int") + } + } + return nil +} + +// Multiplier reports which multiplier was used in an integral number. +func (p *NumInfo) Multiplier() Multiplier { + return p.mul +} + +// IsInt reports whether the number is an integral number. +func (p *NumInfo) IsInt() bool { + return !p.isFloat +} + +// ParseNum parses s and populates NumInfo with the result. +func ParseNum(s string, n *NumInfo) error { + *n = NumInfo{pos: n.pos, src: s, buf: n.buf[:0]} + if !n.next() { + return n.errorf("invalid number %q", s) + } + if n.ch == '-' { + n.neg = true + n.buf = append(n.buf, '-') + n.next() + } + seenDecimalPoint := false + if n.ch == '.' { + n.next() + seenDecimalPoint = true + } + err := n.scanNumber(seenDecimalPoint) + if err != nil { + return err + } + if n.err != nil { + return n.err + } + if n.p < len(n.src) { + return n.errorf("invalid number %q", s) + } + if len(n.buf) == 0 { + n.buf = append(n.buf, '0') + } + return nil +} + +func (p *NumInfo) errorf(format string, args ...interface{}) error { + return errors.Newf(p.pos, format, args...) +} + +// A Multiplier indicates a multiplier indicator used in the literal. +type Multiplier byte + +const ( + mul1 Multiplier = 1 + iota + mul2 + mul3 + mul4 + mul5 + mul6 + mul7 + mul8 + + mulBin = 0x10 + mulDec = 0x20 + + K = mulDec | mul1 + M = mulDec | mul2 + G = mulDec | mul3 + T = mulDec | mul4 + P = mulDec | mul5 + E = mulDec | mul6 + Z = mulDec | mul7 + Y = mulDec | mul8 + + Ki = mulBin | mul1 + Mi = mulBin | mul2 + Gi = mulBin | mul3 + Ti = mulBin | mul4 + Pi = mulBin | mul5 + Ei = mulBin | mul6 + Zi = mulBin | mul7 + Yi = mulBin | mul8 +) + +func (p *NumInfo) next() bool { + if p.p >= len(p.src) { + p.ch = 0 + return false + } + p.ch = p.src[p.p] + p.p++ + if p.ch == '.' { + if len(p.buf) == 0 { + p.buf = append(p.buf, '0') + } + p.buf = append(p.buf, '.') + } + return true +} + +func (p *NumInfo) digitVal(ch byte) (d int) { + switch { + case '0' <= ch && ch <= '9': + d = int(ch - '0') + case ch == '_': + p.UseSep = true + return 0 + case 'a' <= ch && ch <= 'f': + d = int(ch - 'a' + 10) + case 'A' <= ch && ch <= 'F': + d = int(ch - 'A' + 10) + default: + return 16 // larger than any legal digit val + } + return d +} + +func (p *NumInfo) scanMantissa(base int) bool { + hasDigit := false + var last byte + for p.digitVal(p.ch) < base { + if p.ch != '_' { + p.buf = append(p.buf, p.ch) + hasDigit = true + } + last = p.ch + p.next() + } + if last == '_' { + p.err = p.errorf("illegal '_' in number") + } + return hasDigit +} + +func (p *NumInfo) scanNumber(seenDecimalPoint bool) error { + p.base = 10 + + if seenDecimalPoint { + p.isFloat = true + if !p.scanMantissa(10) { + return p.errorf("illegal fraction %q", p.src) + } + goto exponent + } + + if p.ch == '0' { + // int or float + p.next() + switch p.ch { + case 'x', 'X': + p.base = 16 + // hexadecimal int + p.next() + if !p.scanMantissa(16) { + // only scanned "0x" or "0X" + return p.errorf("illegal hexadecimal number %q", p.src) + } + case 'b': + p.base = 2 + // binary int + p.next() + if !p.scanMantissa(2) { + // only scanned "0b" + return p.errorf("illegal binary number %q", p.src) + } + case 'o': + p.base = 8 + // octal int + p.next() + if !p.scanMantissa(8) { + // only scanned "0o" + return p.errorf("illegal octal number %q", p.src) + } + default: + // int (base 8 or 10) or float + p.scanMantissa(8) + if p.ch == '8' || p.ch == '9' { + p.scanMantissa(10) + if p.ch != '.' && p.ch != 'e' && p.ch != 'E' { + return p.errorf("illegal integer number %q", p.src) + } + } + switch p.ch { + case 'e', 'E': + if len(p.buf) == 0 { + p.buf = append(p.buf, '0') + } + fallthrough + case '.': + goto fraction + } + if len(p.buf) > 0 { + p.base = 8 + } + } + goto exit + } + + // decimal int or float + if !p.scanMantissa(10) { + return p.errorf("illegal number start %q", p.src) + } + +fraction: + if p.ch == '.' { + p.isFloat = true + p.next() + p.scanMantissa(10) + } + +exponent: + switch p.ch { + case 'K', 'M', 'G', 'T', 'P': + p.mul = charToMul[p.ch] + p.next() + if p.ch == 'i' { + p.mul |= mulBin + p.next() + } else { + p.mul |= mulDec + } + var v apd.Decimal + p.isFloat = false + return p.decimal(&v) + + case 'e', 'E': + p.isFloat = true + p.next() + p.buf = append(p.buf, 'e') + if p.ch == '-' || p.ch == '+' { + p.buf = append(p.buf, p.ch) + p.next() + } + if !p.scanMantissa(10) { + return p.errorf("illegal exponent %q", p.src) + } + } + +exit: + return nil +} + +var charToMul = map[byte]Multiplier{ + 'K': mul1, + 'M': mul2, + 'G': mul3, + 'T': mul4, + 'P': mul5, + 'E': mul6, + 'Z': mul7, + 'Y': mul8, +} + +var mulToRat = map[Multiplier]*apd.Decimal{} + +func init() { + d := apd.New(1, 0) + b := apd.New(1, 0) + dm := apd.New(1000, 0) + bm := apd.New(1024, 0) + + c := apd.BaseContext + for i := Multiplier(1); int(i) < len(charToMul); i++ { + // TODO: may we write to one of the sources? + var bn, dn apd.Decimal + _, _ = c.Mul(&dn, d, dm) + d = &dn + _, _ = c.Mul(&bn, b, bm) + b = &bn + mulToRat[mulDec|i] = d + mulToRat[mulBin|i] = b + } +} diff --git a/vendor/cuelang.org/go/cue/literal/quote.go b/vendor/cuelang.org/go/cue/literal/quote.go new file mode 100644 index 000000000..3d5ac2a0b --- /dev/null +++ b/vendor/cuelang.org/go/cue/literal/quote.go @@ -0,0 +1,370 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package literal + +import ( + "strconv" + "strings" + "unicode/utf8" +) + +// Form defines how to quote a string or bytes literal. +type Form struct { + hashCount int + quote byte + multiline bool + auto bool + exact bool + asciiOnly bool + graphicOnly bool + indent string + tripleQuote string +} + +// TODO: +// - Fixed or max level of escape modifiers (#""#). +// - Option to fall back to bytes if value cannot be represented as string. +// E.g. ExactString. +// - QuoteExact that fails with an error if a string cannot be represented +// without loss. +// - Handle auto-breaking for long lines (Swift-style, \-terminated lines). +// This is not supported yet in CUE, but may, and should be considred as +// a possibility in API design. +// - Other possible convenience forms: Blob (auto-break bytes), String (bytes +// or string), Label. + +// WithTabIndent returns a new Form with indentation set to the given number +// of tabs. The result will be a multiline string. +func (f Form) WithTabIndent(n int) Form { + f.indent = tabs(n) + f.multiline = true + return f +} + +const tabIndent = "\t\t\t\t\t\t\t\t\t\t\t\t" + +func tabs(n int) string { + if n < len(tabIndent) { + return tabIndent[:n] + } + return strings.Repeat("\t", n) +} + +// WithOptionalIndent is like WithTabIndent, but only returns a multiline +// strings if it doesn't contain any newline characters. +func (f Form) WithOptionalTabIndent(tabs int) Form { + if tabs < len(tabIndent) { + f.indent = tabIndent[:tabs] + } else { + f.indent = strings.Repeat("\t", tabs) + } + f.auto = true + return f +} + +// WithASCIIOnly ensures the quoted strings consists solely of valid ASCII +// characters. +func (f Form) WithASCIIOnly() Form { + f.asciiOnly = true + return f +} + +// WithGraphicOnly ensures the quoted strings consists solely of printable +// characters. +func (f Form) WithGraphicOnly() Form { + f.graphicOnly = true + return f +} + +var ( + // String defines the format of a CUE string. Conversions may be lossy. + String Form = stringForm + + // TODO: ExactString: quotes to bytes type if the string cannot be + // represented without loss of accuracy. + + // Label is like Text, but optimized for labels. + Label Form = stringForm + + // Bytes defines the format of bytes literal. + Bytes Form = bytesForm + + stringForm = Form{ + quote: '"', + tripleQuote: `"""`, + } + bytesForm = Form{ + quote: '\'', + tripleQuote: `'''`, + exact: true, + } +) + +// Quote returns CUE string literal representing s. The returned string uses CUE +// escape sequences (\t, \n, \u00FF, \u0100) for control characters and +// non-printable characters as defined by strconv.IsPrint. +// +// It reports an error if the string cannot be converted to the desired form. +func (f Form) Quote(s string) string { + return string(f.Append(make([]byte, 0, 3*len(s)/2), s)) +} + +const ( + lowerhex = "0123456789abcdef" +) + +// Append appends a CUE string literal representing s, as generated by Quote, to +// buf and returns the extended buffer. +func (f Form) Append(buf []byte, s string) []byte { + if f.auto && strings.ContainsRune(s, '\n') { + f.multiline = true + } + if f.multiline { + f.hashCount = f.requiredHashCount(s) + } + + // Often called with big strings, so preallocate. If there's quoting, + // this is conservative but still helps a lot. + if cap(buf)-len(buf) < len(s) { + nBuf := make([]byte, len(buf), len(buf)+1+len(s)+1) + copy(nBuf, buf) + buf = nBuf + } + for i := 0; i < f.hashCount; i++ { + buf = append(buf, '#') + } + if f.multiline { + buf = append(buf, f.quote, f.quote, f.quote, '\n') + if s == "" { + buf = append(buf, f.indent...) + buf = append(buf, f.quote, f.quote, f.quote) + return buf + } + if len(s) > 0 && s[0] != '\n' { + buf = append(buf, f.indent...) + } + } else { + buf = append(buf, f.quote) + } + + buf = f.appendEscaped(buf, s) + + if f.multiline { + buf = append(buf, '\n') + buf = append(buf, f.indent...) + buf = append(buf, f.quote, f.quote, f.quote) + } else { + buf = append(buf, f.quote) + } + for i := 0; i < f.hashCount; i++ { + buf = append(buf, '#') + } + + return buf +} + +// AppendEscaped appends a CUE string literal representing s, as generated by +// Quote but without the quotes, to buf and returns the extended buffer. +// +// It does not include the last indentation. +func (f Form) AppendEscaped(buf []byte, s string) []byte { + if f.auto && strings.ContainsRune(s, '\n') { + f.multiline = true + } + + // Often called with big strings, so preallocate. If there's quoting, + // this is conservative but still helps a lot. + if cap(buf)-len(buf) < len(s) { + nBuf := make([]byte, len(buf), len(buf)+1+len(s)+1) + copy(nBuf, buf) + buf = nBuf + } + + buf = f.appendEscaped(buf, s) + + return buf +} + +func (f Form) appendEscaped(buf []byte, s string) []byte { + for width := 0; len(s) > 0; s = s[width:] { + r := rune(s[0]) + width = 1 + if r >= utf8.RuneSelf { + r, width = utf8.DecodeRuneInString(s) + } + if f.exact && width == 1 && r == utf8.RuneError { + buf = append(buf, `\x`...) + buf = append(buf, lowerhex[s[0]>>4]) + buf = append(buf, lowerhex[s[0]&0xF]) + continue + } + if f.multiline && r == '\n' { + buf = append(buf, '\n') + if len(s) > 1 && s[1] != '\n' { + buf = append(buf, f.indent...) + } + continue + } + buf = f.appendEscapedRune(buf, r) + } + return buf +} + +func (f *Form) appendEscapedRune(buf []byte, r rune) []byte { + var runeTmp [utf8.UTFMax]byte + if (!f.multiline && r == rune(f.quote)) || r == '\\' { // always backslashed + buf = f.appendEscape(buf) + buf = append(buf, byte(r)) + return buf + } + if f.asciiOnly { + if r < utf8.RuneSelf && strconv.IsPrint(r) { + buf = append(buf, byte(r)) + return buf + } + } else if strconv.IsPrint(r) || f.graphicOnly && isInGraphicList(r) { + n := utf8.EncodeRune(runeTmp[:], r) + buf = append(buf, runeTmp[:n]...) + return buf + } + buf = f.appendEscape(buf) + switch r { + case '\a': + buf = append(buf, 'a') + case '\b': + buf = append(buf, 'b') + case '\f': + buf = append(buf, 'f') + case '\n': + buf = append(buf, 'n') + case '\r': + buf = append(buf, 'r') + case '\t': + buf = append(buf, 't') + case '\v': + buf = append(buf, 'v') + default: + switch { + case r < ' ' && f.exact: + buf = append(buf, 'x') + buf = append(buf, lowerhex[byte(r)>>4]) + buf = append(buf, lowerhex[byte(r)&0xF]) + case r > utf8.MaxRune: + r = 0xFFFD + fallthrough + case r < 0x10000: + buf = append(buf, 'u') + for s := 12; s >= 0; s -= 4 { + buf = append(buf, lowerhex[r>>uint(s)&0xF]) + } + default: + buf = append(buf, 'U') + for s := 28; s >= 0; s -= 4 { + buf = append(buf, lowerhex[r>>uint(s)&0xF]) + } + } + } + return buf +} + +func (f *Form) appendEscape(buf []byte) []byte { + buf = append(buf, '\\') + for i := 0; i < f.hashCount; i++ { + buf = append(buf, '#') + } + return buf +} + +// requiredHashCount returns the number of # characters +// that are required to quote the multiline string s. +func (f *Form) requiredHashCount(s string) int { + hashCount := 0 + i := 0 + // Find all occurrences of the triple-quote and count + // the maximum number of succeeding # characters. + for { + j := strings.Index(s[i:], f.tripleQuote) + if j == -1 { + break + } + i += j + 3 + // Absorb all extra quotes, so we + // get to the end of the sequence. + for ; i < len(s); i++ { + if s[i] != f.quote { + break + } + } + e := i - 1 + // Count succeeding # characters. + for ; i < len(s); i++ { + if s[i] != '#' { + break + } + } + if nhash := i - e; nhash > hashCount { + hashCount = nhash + } + } + return hashCount +} + +// isInGraphicList reports whether the rune is in the isGraphic list. This separation +// from IsGraphic allows quoteWith to avoid two calls to IsPrint. +// Should be called only if IsPrint fails. +func isInGraphicList(r rune) bool { + // We know r must fit in 16 bits - see makeisprint.go. + if r > 0xFFFF { + return false + } + rr := uint16(r) + i := bsearch16(isGraphic, rr) + return i < len(isGraphic) && rr == isGraphic[i] +} + +// bsearch16 returns the smallest i such that a[i] >= x. +// If there is no such i, bsearch16 returns len(a). +func bsearch16(a []uint16, x uint16) int { + i, j := 0, len(a) + for i < j { + h := i + (j-i)/2 + if a[h] < x { + i = h + 1 + } else { + j = h + } + } + return i +} + +// isGraphic lists the graphic runes not matched by IsPrint. +var isGraphic = []uint16{ + 0x00a0, + 0x1680, + 0x2000, + 0x2001, + 0x2002, + 0x2003, + 0x2004, + 0x2005, + 0x2006, + 0x2007, + 0x2008, + 0x2009, + 0x200a, + 0x202f, + 0x205f, + 0x3000, +} diff --git a/vendor/cuelang.org/go/cue/literal/string.go b/vendor/cuelang.org/go/cue/literal/string.go new file mode 100644 index 000000000..8fa52d96f --- /dev/null +++ b/vendor/cuelang.org/go/cue/literal/string.go @@ -0,0 +1,411 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package literal + +import ( + "errors" + "strings" + "unicode" + "unicode/utf8" +) + +var ( + errSyntax = errors.New("invalid syntax") + errInvalidWhitespace = errors.New("invalid string: invalid whitespace") + errMissingNewline = errors.New( + "invalid string: opening quote of multiline string must be followed by newline") + errUnmatchedQuote = errors.New("invalid string: unmatched quote") + // TODO: making this an error is optional according to RFC 4627. But we + // could make it not an error if this ever results in an issue. + errSurrogate = errors.New("unmatched surrogate pair") +) + +// Unquote interprets s as a single- or double-quoted, single- or multi-line +// string, possibly with custom escape delimiters, returning the string value +// that s quotes. +func Unquote(s string) (string, error) { + info, nStart, _, err := ParseQuotes(s, s) + if err != nil { + return "", err + } + s = s[nStart:] + return info.Unquote(s) +} + +// QuoteInfo describes the type of quotes used for a string. +type QuoteInfo struct { + quote string + whitespace string + numHash int + multiline bool + char byte + numChar byte +} + +// IsDouble reports whether the literal uses double quotes. +func (q QuoteInfo) IsDouble() bool { + return q.char == '"' +} + +// ParseQuotes checks if the opening quotes in start matches the ending quotes +// in end and reports its type as q or an error if they do not matching or are +// invalid. nStart indicates the number of bytes used for the opening quote. +func ParseQuotes(start, end string) (q QuoteInfo, nStart, nEnd int, err error) { + for i, c := range start { + if c != '#' { + break + } + q.numHash = i + 1 + } + s := start[q.numHash:] + switch s[0] { + case '"', '\'': + q.char = s[0] + if len(s) > 3 && s[1] == s[0] && s[2] == s[0] { + switch s[3] { + case '\n': + q.quote = start[:3+q.numHash] + case '\r': + if len(s) > 4 && s[4] == '\n' { + q.quote = start[:4+q.numHash] + break + } + fallthrough + default: + return q, 0, 0, errMissingNewline + } + q.multiline = true + q.numChar = 3 + nStart = len(q.quote) + 1 // add whitespace later + } else { + q.quote = start[:1+q.numHash] + q.numChar = 1 + nStart = len(q.quote) + } + default: + return q, 0, 0, errSyntax + } + quote := start[:int(q.numChar)+q.numHash] + for i := 0; i < len(quote); i++ { + if j := len(end) - i - 1; j < 0 || quote[i] != end[j] { + return q, 0, 0, errUnmatchedQuote + } + } + if q.multiline { + i := len(end) - len(quote) + for i > 0 { + r, size := utf8.DecodeLastRuneInString(end[:i]) + if r == '\n' || !unicode.IsSpace(r) { + break + } + i -= size + } + q.whitespace = end[i : len(end)-len(quote)] + + if len(start) > nStart && start[nStart] != '\n' { + if !strings.HasPrefix(start[nStart:], q.whitespace) { + return q, 0, 0, errInvalidWhitespace + } + nStart += len(q.whitespace) + } + } + + return q, nStart, int(q.numChar) + q.numHash, nil +} + +// Unquote unquotes the given string. It must be terminated with a quote or an +// interpolation start. Escape sequences are expanded and surrogates +// are replaced with the corresponding non-surrogate code points. +func (q QuoteInfo) Unquote(s string) (string, error) { + if len(s) > 0 && !q.multiline { + if contains(s, '\n') || contains(s, '\r') { + return "", errSyntax + } + + // Is it trivial? Avoid allocation. + if s[len(s)-1] == q.char && q.numHash == 0 { + if s := s[:len(s)-1]; isSimple(s, rune(q.char)) { + return s, nil + } + } + } + + var runeTmp [utf8.UTFMax]byte + buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations. + stripNL := false + for len(s) > 0 { + switch s[0] { + case '\r': + s = s[1:] + continue + case '\n': + switch { + case !q.multiline: + fallthrough + default: + return "", errInvalidWhitespace + case strings.HasPrefix(s[1:], q.whitespace): + s = s[1+len(q.whitespace):] + case strings.HasPrefix(s[1:], "\n"): + s = s[1:] + } + stripNL = true + buf = append(buf, '\n') + continue + } + c, multibyte, ss, err := unquoteChar(s, q) + if surHigh <= c && c < surEnd { + if c >= surLow { + return "", errSurrogate + } + var cl rune + cl, _, ss, err = unquoteChar(ss, q) + if cl < surLow || surEnd <= cl { + return "", errSurrogate + } + c = 0x10000 + (c-surHigh)*0x400 + (cl - surLow) + } + + if err != nil { + return "", err + } + + s = ss + if c < 0 { + if c == -2 { + stripNL = false + } + if stripNL { + // Strip the last newline, but only if it came from a closing + // quote. + buf = buf[:len(buf)-1] + } + return string(buf), nil + } + stripNL = false + if c < utf8.RuneSelf || !multibyte { + buf = append(buf, byte(c)) + } else { + n := utf8.EncodeRune(runeTmp[:], c) + buf = append(buf, runeTmp[:n]...) + } + } + // allow unmatched quotes if already checked. + return "", errUnmatchedQuote +} + +const ( + surHigh = 0xD800 + surLow = 0xDC00 + surEnd = 0xE000 +) + +func isSimple(s string, quote rune) bool { + // TODO(perf): check if using a simple DFA to detect surrogate pairs is + // faster than converting to code points. At the very least there should + // be an ASCII fast path. + for _, r := range s { + if r == quote || r == '\\' { + return false + } + if surHigh <= r && r < surEnd { + return false + } + } + return true +} + +// contains reports whether the string contains the byte c. +func contains(s string, c byte) bool { + for i := 0; i < len(s); i++ { + if s[i] == c { + return true + } + } + return false +} + +// unquoteChar decodes the first character or byte in the escaped string. +// It returns four values: +// +// 1) value, the decoded Unicode code point or byte value; the special value +// of -1 indicates terminated by quotes and -2 means terminated by \(. +// 2) multibyte, a boolean indicating whether the decoded character requires a multibyte UTF-8 representation; +// 3) tail, the remainder of the string after the character; and +// 4) an error that will be nil if the character is syntactically valid. +// +// The second argument, kind, specifies the type of literal being parsed +// and therefore which kind of escape sequences are permitted. +// For kind 's' only JSON escapes and \u{ are permitted. +// For kind 'b' also hexadecimal and octal escape sequences are permitted. +// +// The third argument, quote, specifies that an ASCII quoting character that +// is not permitted in the output. +func unquoteChar(s string, info QuoteInfo) (value rune, multibyte bool, tail string, err error) { + // easy cases + switch c := s[0]; { + case c == info.char && info.char != 0: + for i := 1; byte(i) < info.numChar; i++ { + if i >= len(s) || s[i] != info.char { + return rune(info.char), false, s[1:], nil + } + } + for i := 0; i < info.numHash; i++ { + if i+int(info.numChar) >= len(s) || s[i+int(info.numChar)] != '#' { + return rune(info.char), false, s[1:], nil + } + } + if ln := int(info.numChar) + info.numHash; len(s) != ln { + // TODO: terminating quote in middle of string + return 0, false, s[ln:], errSyntax + } + return -1, false, "", nil + case c >= utf8.RuneSelf: + // TODO: consider handling surrogate values. These are discarded by + // DecodeRuneInString. It is technically correct to disallow it, but + // some JSON parsers allow this anyway. + r, size := utf8.DecodeRuneInString(s) + return r, true, s[size:], nil + case c != '\\': + return rune(s[0]), false, s[1:], nil + } + + if len(s) <= 1+info.numHash { + return '\\', false, s[1:], nil + } + for i := 1; i <= info.numHash && i < len(s); i++ { + if s[i] != '#' { + return '\\', false, s[1:], nil + } + } + + c := s[1+info.numHash] + s = s[2+info.numHash:] + + switch c { + case 'a': + value = '\a' + case 'b': + value = '\b' + case 'f': + value = '\f' + case 'n': + value = '\n' + case 'r': + value = '\r' + case 't': + value = '\t' + case 'v': + value = '\v' + case '/': + value = '/' + case 'x', 'u', 'U': + n := 0 + switch c { + case 'x': + n = 2 + case 'u': + n = 4 + case 'U': + n = 8 + } + var v rune + if len(s) < n { + err = errSyntax + return + } + for j := 0; j < n; j++ { + x, ok := unhex(s[j]) + if !ok { + err = errSyntax + return + } + v = v<<4 | x + } + s = s[n:] + if c == 'x' { + if info.char == '"' { + err = errSyntax + return + } + // single-byte string, possibly not UTF-8 + value = v + break + } + if v > utf8.MaxRune { + err = errSyntax + return + } + value = v + multibyte = true + case '0', '1', '2', '3', '4', '5', '6', '7': + if info.char == '"' { + err = errSyntax + return + } + v := rune(c) - '0' + if len(s) < 2 { + err = errSyntax + return + } + for j := 0; j < 2; j++ { // one digit already; two more + x := rune(s[j]) - '0' + if x < 0 || x > 7 { + err = errSyntax + return + } + v = (v << 3) | x + } + s = s[2:] + if v > 255 { + err = errSyntax + return + } + value = v + case '\\': + value = '\\' + case '\'', '"': + // TODO: should we allow escaping of quotes regardless? + if c != info.char { + err = errSyntax + return + } + value = rune(c) + case '(': + if s != "" { + // TODO: terminating quote in middle of string + return 0, false, s, errSyntax + } + value = -2 + default: + err = errSyntax + return + } + tail = s + return +} + +func unhex(b byte) (v rune, ok bool) { + c := rune(b) + switch { + case '0' <= c && c <= '9': + return c - '0', true + case 'a' <= c && c <= 'f': + return c - 'a' + 10, true + case 'A' <= c && c <= 'F': + return c - 'A' + 10, true + } + return +} diff --git a/vendor/cuelang.org/go/cue/load/config.go b/vendor/cuelang.org/go/cue/load/config.go new file mode 100644 index 000000000..535b6b64c --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/config.go @@ -0,0 +1,634 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "io" + "os" + pathpkg "path" + "path/filepath" + goruntime "runtime" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/compile" + "cuelang.org/go/internal/core/eval" + "cuelang.org/go/internal/core/runtime" +) + +const ( + cueSuffix = ".cue" + modDir = "cue.mod" + configFile = "module.cue" + pkgDir = "pkg" +) + +// FromArgsUsage is a partial usage message that applications calling +// FromArgs may wish to include in their -help output. +// +// Some of the aspects of this documentation, like flags and handling '--' need +// to be implemented by the tools. +const FromArgsUsage = ` + is a list of arguments denoting a set of instances of the form: + + * * + +1. A list of source files + + CUE files are parsed, loaded and unified into a single instance. All files + must have the same package name. + + Data files, like YAML or JSON, are handled in one of two ways: + + a. Explicitly mapped into a single CUE namespace, using the --path, --files + and --list flags. In this case these are unified into a single instance + along with any other CUE files. + + b. Treated as a stream of data elements that each is optionally unified with + a single instance, which either consists of the other CUE files specified + on the command line or a single package. + + By default, the format of files is derived from the file extension. + This behavior may be modified with file arguments of the form : + For instance, + + cue eval foo.cue json: bar.data + + indicates that the bar.data file should be interpreted as a JSON file. + A qualifier applies to all files following it until the next qualifier. + + The following qualifiers are available: + + encodings + cue CUE definitions and data + json JSON data, one value only + jsonl newline-separated JSON values + yaml a YAML file, may contain a stream + proto Protobuf definitions + + interpretations + jsonschema data encoding describes JSON Schema + openapi data encoding describes Open API + + formats + data output as -- or only accept -- data + graph data allowing references or anchors + schema output as schema; defaults JSON files to JSON Schema + def full definitions, including documentation + +2. A list of relative directories to denote a package instance. + + Each directory matching the pattern is loaded as a separate instance. + The instance contains all files in this directory and ancestor directories, + up to the module root, with the same package name. The package name must + be either uniquely determined by the files in the given directory, or + explicitly defined using a package name qualifier. For instance, ./...:foo + selects all packages named foo in the any subdirectory of the current + working directory. + + 3. An import path referring to a directory within the current module + + All CUE files in that directory, and all the ancestor directories up to the + module root (if applicable), with a package name corresponding to the base + name of the directory or the optional explicit package name are loaded into + a single instance. + + Examples, assume a module name of acme.org/root: + example.com/foo package in cue.mod + ./foo package corresponding to foo directory + .:bar package in current directory with package name bar +` + +// GenPath reports the directory in which to store generated +// files. +func GenPath(root string) string { + return internal.GenPath(root) +} + +// A Config configures load behavior. +type Config struct { + // Context specifies the context for the load operation. + // If the context is cancelled, the loader may stop early + // and return an ErrCancelled error. + // If Context is nil, the load cannot be cancelled. + Context *build.Context + + loader *loader + + // A Module is a collection of packages and instances that are within the + // directory hierarchy rooted at the module root. The module root can be + // marked with a cue.mod file. + ModuleRoot string + + // Module specifies the module prefix. If not empty, this value must match + // the module field of an existing cue.mod file. + Module string + + // Package defines the name of the package to be loaded. If this is not set, + // the package must be uniquely defined from its context. Special values: + // _ load files without a package + // * load all packages. Files without packages are loaded + // in the _ package. + Package string + + // Dir is the directory in which to run the build system's query tool + // that provides information about the packages. + // If Dir is empty, the tool is run in the current directory. + Dir string + + // Tags defines boolean tags or key-value pairs to select files to build + // or be injected as values in fields. + // + // Each string is of the form + // + // key [ "=" value ] + // + // where key is a valid CUE identifier and value valid CUE scalar. + // + // The Tags values are used to both select which files get included in a + // build and to inject values into the AST. + // + // + // File selection + // + // Files with an attribute of the form @if(expr) before a package clause + // are conditionally included if expr resolves to true, where expr refers to + // boolean values in Tags. + // + // It is an error for a file to have more than one @if attribute or to + // have a @if attribute without or after a package clause. + // + // + // Value injection + // + // The Tags values are also used to inject values into fields with a + // @tag attribute. + // + // For any field of the form + // + // field: x @tag(key) + // + // and Tags value for which the name matches key, the field will be + // modified to + // + // field: x & "value" + // + // By default, the injected value is treated as a string. Alternatively, a + // "type" option of the @tag attribute allows a value to be interpreted as + // an int, number, or bool. For instance, for a field + // + // field: x @tag(key,type=int) + // + // an entry "key=2" modifies the field to + // + // field: x & 2 + // + // Valid values for type are "int", "number", "bool", and "string". + // + // A @tag attribute can also define shorthand values, which can be injected + // into the fields without having to specify the key. For instance, for + // + // environment: string @tag(env,short=prod|staging) + // + // the Tags entry "prod" sets the environment field to the value "prod". + // This is equivalent to a Tags entry of "env=prod". + // + // The use of @tag does not preclude using any of the usual CUE constraints + // to limit the possible values of a field. For instance + // + // environment: "prod" | "staging" @tag(env,short=prod|staging) + // + // ensures the user may only specify "prod" or "staging". + Tags []string + + // Include all files, regardless of tags. + AllCUEFiles bool + + // Deprecated: use Tags + BuildTags []string + releaseTags []string + + // If Tests is set, the loader includes not just the packages + // matching a particular pattern but also any related test packages. + Tests bool + + // If Tools is set, the loader includes tool files associated with + // a package. + Tools bool + + // filesMode indicates that files are specified + // explicitly on the command line. + filesMode bool + + // If DataFiles is set, the loader includes entries for directories that + // have no CUE files, but have recognized data files that could be converted + // to CUE. + DataFiles bool + + // StdRoot specifies an alternative directory for standard libaries. + // This is mostly used for bootstrapping. + StdRoot string + + // ParseFile is called to read and parse each file when preparing a + // package's syntax tree. It must be safe to call ParseFile simultaneously + // from multiple goroutines. If ParseFile is nil, the loader will uses + // parser.ParseFile. + // + // ParseFile should parse the source from src and use filename only for + // recording position information. + // + // An application may supply a custom implementation of ParseFile to change + // the effective file contents or the behavior of the parser, or to modify + // the syntax tree. + ParseFile func(name string, src interface{}) (*ast.File, error) + + // Overlay provides a mapping of absolute file paths to file contents. + // If the file with the given path already exists, the parser will use the + // alternative file contents provided by the map. + // + // If the value must be of type string, []byte, io.Reader, or *ast.File. + Overlay map[string]Source + + // Stdin defines an alternative for os.Stdin for the file "-". When used, + // the corresponding build.File will be associated with the full buffer. + Stdin io.Reader + + fileSystem + + loadFunc build.LoadFunc +} + +func (c *Config) stdin() io.Reader { + if c.Stdin == nil { + return os.Stdin + } + return c.Stdin +} + +func (c *Config) newInstance(pos token.Pos, p importPath) *build.Instance { + dir, name, err := c.absDirFromImportPath(pos, p) + i := c.Context.NewInstance(dir, c.loadFunc) + i.Dir = dir + i.PkgName = name + i.DisplayPath = string(p) + i.ImportPath = string(p) + i.Root = c.ModuleRoot + i.Module = c.Module + i.Err = errors.Append(i.Err, err) + + return i +} + +func (c *Config) newRelInstance(pos token.Pos, path, pkgName string) *build.Instance { + fs := c.fileSystem + + var err errors.Error + dir := path + + p := c.Context.NewInstance(path, c.loadFunc) + p.PkgName = pkgName + p.DisplayPath = filepath.ToSlash(path) + // p.ImportPath = string(dir) // compute unique ID. + p.Root = c.ModuleRoot + p.Module = c.Module + + if isLocalImport(path) { + if c.Dir == "" { + err = errors.Append(err, errors.Newf(pos, "cwd unknown")) + } + dir = filepath.Join(c.Dir, filepath.FromSlash(path)) + } + + if path == "" { + err = errors.Append(err, errors.Newf(pos, + "import %q: invalid import path", path)) + } else if path != cleanImport(path) { + err = errors.Append(err, c.loader.errPkgf(nil, + "non-canonical import path: %q should be %q", path, pathpkg.Clean(path))) + } + + if importPath, e := c.importPathFromAbsDir(fsPath(dir), path); e != nil { + // Detect later to keep error messages consistent. + } else { + p.ImportPath = string(importPath) + } + + p.Dir = dir + + if fs.isAbsPath(path) || strings.HasPrefix(path, "/") { + err = errors.Append(err, errors.Newf(pos, + "absolute import path %q not allowed", path)) + } + if err != nil { + p.Err = errors.Append(p.Err, err) + p.Incomplete = true + } + + return p +} + +func (c Config) newErrInstance(pos token.Pos, path importPath, err error) *build.Instance { + i := c.newInstance(pos, path) + i.Err = errors.Promote(err, "instance") + return i +} + +func toImportPath(dir string) importPath { + return importPath(filepath.ToSlash(dir)) +} + +type importPath string + +type fsPath string + +func (c *Config) importPathFromAbsDir(absDir fsPath, key string) (importPath, errors.Error) { + if c.ModuleRoot == "" { + return "", errors.Newf(token.NoPos, + "cannot determine import path for %q (root undefined)", key) + } + + dir := filepath.Clean(string(absDir)) + if !strings.HasPrefix(dir, c.ModuleRoot) { + return "", errors.Newf(token.NoPos, + "cannot determine import path for %q (dir outside of root)", key) + } + + pkg := filepath.ToSlash(dir[len(c.ModuleRoot):]) + switch { + case strings.HasPrefix(pkg, "/cue.mod/"): + pkg = pkg[len("/cue.mod/"):] + if pkg == "" { + return "", errors.Newf(token.NoPos, + "invalid package %q (root of %s)", key, modDir) + } + + // TODO(legacy): remove. + case strings.HasPrefix(pkg, "/pkg/"): + pkg = pkg[len("/pkg/"):] + if pkg == "" { + return "", errors.Newf(token.NoPos, + "invalid package %q (root of %s)", key, pkgDir) + } + + case c.Module == "": + return "", errors.Newf(token.NoPos, + "cannot determine import path for %q (no module)", key) + default: + pkg = c.Module + pkg + } + + name := c.Package + switch name { + case "_", "*": + name = "" + } + + return addImportQualifier(importPath(pkg), name) +} + +func addImportQualifier(pkg importPath, name string) (importPath, errors.Error) { + if name != "" { + s := string(pkg) + if i := strings.LastIndexByte(s, '/'); i >= 0 { + s = s[i+1:] + } + if i := strings.LastIndexByte(s, ':'); i >= 0 { + // should never happen, but just in case. + s = s[i+1:] + if s != name { + return "", errors.Newf(token.NoPos, + "non-matching package names (%s != %s)", s, name) + } + } else if s != name { + pkg += importPath(":" + name) + } + } + + return pkg, nil +} + +// absDirFromImportPath converts a giving import path to an absolute directory +// and a package name. The root directory must be set. +// +// The returned directory may not exist. +func (c *Config) absDirFromImportPath(pos token.Pos, p importPath) (absDir, name string, err errors.Error) { + if c.ModuleRoot == "" { + return "", "", errors.Newf(pos, "cannot import %q (root undefined)", p) + } + + // Extract the package name. + + name = string(p) + switch i := strings.LastIndexAny(name, "/:"); { + case i < 0: + case p[i] == ':': + name = string(p[i+1:]) + p = p[:i] + + default: // p[i] == '/' + name = string(p[i+1:]) + } + + // TODO: fully test that name is a valid identifier. + if name == "" { + err = errors.Newf(pos, "empty package name in import path %q", p) + } else if strings.IndexByte(name, '.') >= 0 { + err = errors.Newf(pos, + "cannot determine package name for %q (set explicitly with ':')", p) + } + + // Determine the directory. + + sub := filepath.FromSlash(string(p)) + switch hasPrefix := strings.HasPrefix(string(p), c.Module); { + case hasPrefix && len(sub) == len(c.Module): + absDir = c.ModuleRoot + + case hasPrefix && p[len(c.Module)] == '/': + absDir = filepath.Join(c.ModuleRoot, sub[len(c.Module)+1:]) + + default: + absDir = filepath.Join(GenPath(c.ModuleRoot), sub) + } + + return absDir, name, err +} + +// Complete updates the configuration information. After calling complete, +// the following invariants hold: +// - c.ModuleRoot != "" +// - c.Module is set to the module import prefix if there is a cue.mod file +// with the module property. +// - c.loader != nil +// - c.cache != "" +func (c Config) complete() (cfg *Config, err error) { + // Each major CUE release should add a tag here. + // Old tags should not be removed. That is, the cue1.x tag is present + // in all releases >= CUE 1.x. Code that requires CUE 1.x or later should + // say "+build cue1.x", and code that should only be built before CUE 1.x + // (perhaps it is the stub to use in that case) should say "+build !cue1.x". + c.releaseTags = []string{"cue0.1"} + + if c.Dir == "" { + c.Dir, err = os.Getwd() + if err != nil { + return nil, err + } + } else if c.Dir, err = filepath.Abs(c.Dir); err != nil { + return nil, err + } + + // TODO: we could populate this already with absolute file paths, + // but relative paths cannot be added. Consider what is reasonable. + if err := c.fileSystem.init(&c); err != nil { + return nil, err + } + + // TODO: determine root on a package basis. Maybe we even need a + // pkgname.cue.mod + // Look to see if there is a cue.mod. + if c.ModuleRoot == "" { + // Only consider the current directory by default + c.ModuleRoot = c.Dir + if root := c.findRoot(c.Dir); root != "" { + c.ModuleRoot = root + } + } + + c.loader = &loader{ + cfg: &c, + buildTags: make(map[string]bool), + } + + // TODO: also make this work if run from outside the module? + switch { + case true: + mod := filepath.Join(c.ModuleRoot, modDir) + info, cerr := c.fileSystem.stat(mod) + if cerr != nil { + break + } + if info.IsDir() { + mod = filepath.Join(mod, configFile) + } + f, cerr := c.fileSystem.openFile(mod) + if cerr != nil { + break + } + + // TODO: move to full build again + file, err := parser.ParseFile("load", f) + if err != nil { + return nil, errors.Wrapf(err, token.NoPos, "invalid cue.mod file") + } + + r := runtime.New() + v, err := compile.Files(nil, r, "_", file) + if err != nil { + return nil, errors.Wrapf(err, token.NoPos, "invalid cue.mod file") + } + ctx := eval.NewContext(r, v) + v.Finalize(ctx) + prefix := v.Lookup(ctx.StringLabel("module")) + if prefix != nil { + name := ctx.StringValue(prefix.Value()) + if err := ctx.Err(); err != nil { + return &c, err.Err + } + pos := token.NoPos + src := prefix.Value().Source() + if src != nil { + pos = src.Pos() + } + if c.Module != "" && c.Module != name { + return &c, errors.Newf(pos, "inconsistent modules: got %q, want %q", name, c.Module) + } + c.Module = name + } + } + + c.loadFunc = c.loader.loadFunc() + + if c.Context == nil { + c.Context = build.NewContext( + build.Loader(c.loadFunc), + build.ParseFile(c.loader.cfg.ParseFile), + ) + } + + return &c, nil +} + +func (c Config) isRoot(dir string) bool { + fs := &c.fileSystem + // Note: cue.mod used to be a file. We still allow both to match. + _, err := fs.stat(filepath.Join(dir, modDir)) + return err == nil +} + +// findRoot returns the module root or "" if none was found. +func (c Config) findRoot(dir string) string { + fs := &c.fileSystem + + absDir, err := filepath.Abs(dir) + if err != nil { + return "" + } + abs := absDir + for { + if c.isRoot(abs) { + return abs + } + d := filepath.Dir(abs) + if filepath.Base(filepath.Dir(abs)) == modDir { + // The package was located within a "cue.mod" dir and there was + // not cue.mod found until now. So there is no root. + return "" + } + if len(d) >= len(abs) { + break // reached top of file system, no cue.mod + } + abs = d + } + abs = absDir + + // TODO(legacy): remove this capability at some point. + for { + info, err := fs.stat(filepath.Join(abs, pkgDir)) + if err == nil && info.IsDir() { + return abs + } + d := filepath.Dir(abs) + if len(d) >= len(abs) { + return "" // reached top of file system, no pkg dir. + } + abs = d + } +} + +func home() string { + env := "HOME" + if goruntime.GOOS == "windows" { + env = "USERPROFILE" + } else if goruntime.GOOS == "plan9" { + env = "home" + } + return os.Getenv(env) +} diff --git a/vendor/cuelang.org/go/cue/load/doc.go b/vendor/cuelang.org/go/cue/load/doc.go new file mode 100644 index 000000000..d1b599f21 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/doc.go @@ -0,0 +1,16 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package load loads CUE instances. +package load // import "cuelang.org/go/cue/load" diff --git a/vendor/cuelang.org/go/cue/load/errors.go b/vendor/cuelang.org/go/cue/load/errors.go new file mode 100644 index 000000000..a4312822b --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/errors.go @@ -0,0 +1,166 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "fmt" + "path/filepath" + "strings" + + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +func lastError(p *build.Instance) *PackageError { + if p == nil { + return nil + } + switch v := p.Err.(type) { + case *PackageError: + return v + } + return nil +} + +func report(p *build.Instance, err *PackageError) { + if err != nil { + p.ReportError(err) + } +} + +// A PackageError describes an error loading information about a package. +type PackageError struct { + ImportStack []string // shortest path from package named on command line to this one + Pos token.Pos // position of error + errors.Message // the error itself + IsImportCycle bool // the error is an import cycle +} + +func (p *PackageError) Position() token.Pos { return p.Pos } +func (p *PackageError) InputPositions() []token.Pos { return nil } +func (p *PackageError) Path() []string { return p.ImportStack } + +func (l *loader) errPkgf(importPos []token.Pos, format string, args ...interface{}) *PackageError { + err := &PackageError{ + ImportStack: l.stk.Copy(), + Message: errors.NewMessage(format, args), + } + err.fillPos(l.cfg.Dir, importPos) + return err +} + +func (p *PackageError) fillPos(cwd string, positions []token.Pos) { + if len(positions) > 0 && !p.Pos.IsValid() { + p.Pos = positions[0] + } +} + +// TODO(localize) +func (p *PackageError) Error() string { + // Import cycles deserve special treatment. + if p.IsImportCycle { + return fmt.Sprintf("%s\npackage %s\n", p.Message, strings.Join(p.ImportStack, "\n\timports ")) + } + if p.Pos.IsValid() { + // Omit import stack. The full path to the file where the error + // is the most important thing. + return p.Pos.String() + ": " + p.Message.Error() + } + if len(p.ImportStack) == 0 { + return p.Message.Error() + } + return "package " + strings.Join(p.ImportStack, "\n\timports ") + ": " + p.Message.Error() +} + +// NoFilesError is the error used by Import to describe a directory +// containing no usable source files. (It may still contain +// tool files, files hidden by build tags, and so on.) +type NoFilesError struct { + Package *build.Instance + + ignored bool // whether any Go files were ignored due to build tags +} + +func (e *NoFilesError) Position() token.Pos { return token.NoPos } +func (e *NoFilesError) InputPositions() []token.Pos { return nil } +func (e *NoFilesError) Path() []string { return nil } + +// TODO(localize) +func (e *NoFilesError) Msg() (string, []interface{}) { return e.Error(), nil } + +// TODO(localize) +func (e *NoFilesError) Error() string { + // Count files beginning with _, which we will pretend don't exist at all. + dummy := 0 + for _, name := range e.Package.IgnoredCUEFiles { + if strings.HasPrefix(name, "_") { + dummy++ + } + } + + // path := shortPath(e.Package.Root, e.Package.Dir) + path := e.Package.DisplayPath + + if len(e.Package.IgnoredCUEFiles) > dummy { + // CUE files exist, but they were ignored due to build constraints. + msg := "build constraints exclude all CUE files in " + path + " (ignored: " + files := e.Package.IgnoredCUEFiles + if len(files) > 4 { + files = append(files[:4], "...") + } + for i, f := range files { + files[i] = filepath.ToSlash(f) + } + msg += strings.Join(files, ", ") + msg += ")" + return msg + } + // if len(e.Package.TestCUEFiles) > 0 { + // // Test CUE files exist, but we're not interested in them. + // // The double-negative is unfortunate but we want e.Package.Dir + // // to appear at the end of error message. + // return "no non-test CUE files in " + e.Package.Dir + // } + return "no CUE files in " + path +} + +// MultiplePackageError describes an attempt to build a package composed of +// CUE files from different packages. +type MultiplePackageError struct { + Dir string // directory containing files + Packages []string // package names found + Files []string // corresponding files: Files[i] declares package Packages[i] +} + +func (e *MultiplePackageError) Position() token.Pos { return token.NoPos } +func (e *MultiplePackageError) InputPositions() []token.Pos { return nil } +func (e *MultiplePackageError) Path() []string { return nil } + +func (e *MultiplePackageError) Msg() (string, []interface{}) { + return "found packages %q (%s) and %s (%s) in %q", []interface{}{ + e.Packages[0], + e.Files[0], + e.Packages[1], + e.Files[1], + e.Dir, + } +} + +func (e *MultiplePackageError) Error() string { + // Error string limited to two entries for compatibility. + format, args := e.Msg() + return fmt.Sprintf(format, args...) +} diff --git a/vendor/cuelang.org/go/cue/load/fs.go b/vendor/cuelang.org/go/cue/load/fs.go new file mode 100644 index 000000000..b269f0052 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/fs.go @@ -0,0 +1,291 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "bytes" + "io" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +type overlayFile struct { + basename string + contents []byte + file *ast.File + modtime time.Time + isDir bool +} + +func (f *overlayFile) Name() string { return f.basename } +func (f *overlayFile) Size() int64 { return int64(len(f.contents)) } +func (f *overlayFile) Mode() os.FileMode { return 0644 } +func (f *overlayFile) ModTime() time.Time { return f.modtime } +func (f *overlayFile) IsDir() bool { return f.isDir } +func (f *overlayFile) Sys() interface{} { return nil } + +// A fileSystem specifies the supporting context for a build. +type fileSystem struct { + overlayDirs map[string]map[string]*overlayFile + cwd string +} + +func (fs *fileSystem) getDir(dir string, create bool) map[string]*overlayFile { + dir = filepath.Clean(dir) + m, ok := fs.overlayDirs[dir] + if !ok && create { + m = map[string]*overlayFile{} + fs.overlayDirs[dir] = m + } + return m +} + +func (fs *fileSystem) init(c *Config) error { + fs.cwd = c.Dir + + overlay := c.Overlay + fs.overlayDirs = map[string]map[string]*overlayFile{} + + // Organize overlay + for filename, src := range overlay { + // TODO: do we need to further clean the path or check that the + // specified files are within the root/ absolute files? + dir, base := filepath.Split(filename) + m := fs.getDir(dir, true) + + b, file, err := src.contents() + if err != nil { + return err + } + m[base] = &overlayFile{ + basename: base, + contents: b, + file: file, + modtime: time.Now(), + } + + for { + prevdir := dir + dir, base = filepath.Split(filepath.Dir(dir)) + if dir == prevdir || dir == "" { + break + } + m := fs.getDir(dir, true) + if m[base] == nil { + m[base] = &overlayFile{ + basename: base, + modtime: time.Now(), + isDir: true, + } + } + } + } + return nil +} + +func (fs *fileSystem) joinPath(elem ...string) string { + return filepath.Join(elem...) +} + +func (fs *fileSystem) splitPathList(s string) []string { + return filepath.SplitList(s) +} + +func (fs *fileSystem) isAbsPath(path string) bool { + return filepath.IsAbs(path) +} + +func (fs *fileSystem) makeAbs(path string) string { + if fs.isAbsPath(path) { + return path + } + return filepath.Clean(filepath.Join(fs.cwd, path)) +} + +func (fs *fileSystem) isDir(path string) bool { + path = fs.makeAbs(path) + if fs.getDir(path, false) != nil { + return true + } + fi, err := os.Stat(path) + return err == nil && fi.IsDir() +} + +func (fs *fileSystem) hasSubdir(root, dir string) (rel string, ok bool) { + // Try using paths we received. + if rel, ok = hasSubdir(root, dir); ok { + return + } + + // Try expanding symlinks and comparing + // expanded against unexpanded and + // expanded against expanded. + rootSym, _ := filepath.EvalSymlinks(root) + dirSym, _ := filepath.EvalSymlinks(dir) + + if rel, ok = hasSubdir(rootSym, dir); ok { + return + } + if rel, ok = hasSubdir(root, dirSym); ok { + return + } + return hasSubdir(rootSym, dirSym) +} + +func hasSubdir(root, dir string) (rel string, ok bool) { + const sep = string(filepath.Separator) + root = filepath.Clean(root) + if !strings.HasSuffix(root, sep) { + root += sep + } + dir = filepath.Clean(dir) + if !strings.HasPrefix(dir, root) { + return "", false + } + return filepath.ToSlash(dir[len(root):]), true +} + +func (fs *fileSystem) readDir(path string) ([]os.FileInfo, errors.Error) { + path = fs.makeAbs(path) + m := fs.getDir(path, false) + items, err := ioutil.ReadDir(path) + if err != nil { + if !os.IsNotExist(err) || m == nil { + return nil, errors.Wrapf(err, token.NoPos, "readDir") + } + } + if m != nil { + done := map[string]bool{} + for i, fi := range items { + done[fi.Name()] = true + if o := m[fi.Name()]; o != nil { + items[i] = o + } + } + for _, o := range m { + if !done[o.Name()] { + items = append(items, o) + } + } + sort.Slice(items, func(i, j int) bool { + return items[i].Name() < items[j].Name() + }) + } + return items, nil +} + +func (fs *fileSystem) getOverlay(path string) *overlayFile { + dir, base := filepath.Split(path) + if m := fs.getDir(dir, false); m != nil { + return m[base] + } + return nil +} + +func (fs *fileSystem) stat(path string) (os.FileInfo, errors.Error) { + path = fs.makeAbs(path) + if fi := fs.getOverlay(path); fi != nil { + return fi, nil + } + fi, err := os.Stat(path) + if err != nil { + return nil, errors.Wrapf(err, token.NoPos, "stat") + } + return fi, nil +} + +func (fs *fileSystem) lstat(path string) (os.FileInfo, errors.Error) { + path = fs.makeAbs(path) + if fi := fs.getOverlay(path); fi != nil { + return fi, nil + } + fi, err := os.Lstat(path) + if err != nil { + return nil, errors.Wrapf(err, token.NoPos, "stat") + } + return fi, nil +} + +func (fs *fileSystem) openFile(path string) (io.ReadCloser, errors.Error) { + path = fs.makeAbs(path) + if fi := fs.getOverlay(path); fi != nil { + return ioutil.NopCloser(bytes.NewReader(fi.contents)), nil + } + + f, err := os.Open(path) + if err != nil { + return nil, errors.Wrapf(err, token.NoPos, "load") + } + return f, nil +} + +var skipDir = errors.Newf(token.NoPos, "skip directory") + +type walkFunc func(path string, info os.FileInfo, err errors.Error) errors.Error + +func (fs *fileSystem) walk(root string, f walkFunc) error { + fi, err := fs.lstat(root) + if err != nil { + err = f(root, fi, err) + } else if !fi.IsDir() { + return errors.Newf(token.NoPos, "path %q is not a directory", root) + } else { + err = fs.walkRec(root, fi, f) + } + if err == skipDir { + return nil + } + return err + +} + +func (fs *fileSystem) walkRec(path string, info os.FileInfo, f walkFunc) errors.Error { + if !info.IsDir() { + return f(path, info, nil) + } + + dir, err := fs.readDir(path) + err1 := f(path, info, err) + + // If err != nil, walk can't walk into this directory. + // err1 != nil means walkFn want walk to skip this directory or stop walking. + // Therefore, if one of err and err1 isn't nil, walk will return. + if err != nil || err1 != nil { + // The caller's behavior is controlled by the return value, which is decided + // by walkFn. walkFn may ignore err and return nil. + // If walkFn returns SkipDir, it will be handled by the caller. + // So walk should return whatever walkFn returns. + return err1 + } + + for _, info := range dir { + filename := fs.joinPath(path, info.Name()) + err = fs.walkRec(filename, info, f) + if err != nil { + if !info.IsDir() || err != skipDir { + return err + } + } + } + return nil +} diff --git a/vendor/cuelang.org/go/cue/load/import.go b/vendor/cuelang.org/go/cue/load/import.go new file mode 100644 index 000000000..9f6e9ad00 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/import.go @@ -0,0 +1,673 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "bytes" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/filetypes" +) + +// An importMode controls the behavior of the Import method. +type importMode uint + +const ( + // If findOnly is set, Import stops after locating the directory + // that should contain the sources for a package. It does not + // read any files in the directory. + findOnly importMode = 1 << iota + + // If importComment is set, parse import comments on package statements. + // Import returns an error if it finds a comment it cannot understand + // or finds conflicting comments in multiple source files. + // See golang.org/s/go14customimport for more information. + importComment + + allowAnonymous +) + +// importPkg returns details about the CUE package named by the import path, +// interpreting local import paths relative to the srcDir directory. +// If the path is a local import path naming a package that can be imported +// using a standard import path, the returned package will set p.ImportPath +// to that path. +// +// In the directory and ancestor directories up to including one with a +// cue.mod file, all .cue files are considered part of the package except for: +// +// - files starting with _ or . (likely editor temporary files) +// - files with build constraints not satisfied by the context +// +// If an error occurs, importPkg sets the error in the returned instance, +// which then may contain partial information. +// +// pkgName indicates which packages to load. It supports the following +// values: +// "" the default package for the directory, if only one +// is present. +// _ anonymous files (which may be marked with _) +// * all packages +// +func (l *loader) importPkg(pos token.Pos, p *build.Instance) []*build.Instance { + l.stk.Push(p.ImportPath) + defer l.stk.Pop() + + cfg := l.cfg + ctxt := &cfg.fileSystem + + if p.Err != nil { + return []*build.Instance{p} + } + + retErr := func(errs errors.Error) []*build.Instance { + // XXX: move this loop to ReportError + for _, err := range errors.Errors(errs) { + p.ReportError(err) + } + return []*build.Instance{p} + } + + if !strings.HasPrefix(p.Dir, cfg.ModuleRoot) { + err := errors.Newf(token.NoPos, "module root not defined", p.DisplayPath) + return retErr(err) + } + + fp := newFileProcessor(cfg, p) + + if p.PkgName == "" { + if l.cfg.Package == "*" { + fp.ignoreOther = true + fp.allPackages = true + p.PkgName = "_" + } else { + p.PkgName = l.cfg.Package + } + } + if p.PkgName != "" { + // If we have an explicit package name, we can ignore other packages. + fp.ignoreOther = true + } + + if !strings.HasPrefix(p.Dir, cfg.ModuleRoot) { + panic("") + } + + var dirs [][2]string + genDir := GenPath(cfg.ModuleRoot) + if strings.HasPrefix(p.Dir, genDir) { + dirs = append(dirs, [2]string{genDir, p.Dir}) + // TODO(legacy): don't support "pkg" + // && p.PkgName != "_" + if filepath.Base(genDir) != "pkg" { + for _, sub := range []string{"pkg", "usr"} { + rel, err := filepath.Rel(genDir, p.Dir) + if err != nil { + // should not happen + return retErr( + errors.Wrapf(err, token.NoPos, "invalid path")) + } + base := filepath.Join(cfg.ModuleRoot, modDir, sub) + dir := filepath.Join(base, rel) + dirs = append(dirs, [2]string{base, dir}) + } + } + } else { + dirs = append(dirs, [2]string{cfg.ModuleRoot, p.Dir}) + } + + found := false + for _, d := range dirs { + info, err := ctxt.stat(d[1]) + if err == nil && info.IsDir() { + found = true + break + } + } + + if !found { + return retErr( + &PackageError{ + Message: errors.NewMessage("cannot find package %q", + []interface{}{p.DisplayPath}), + }) + } + + // This algorithm assumes that multiple directories within cue.mod/*/ + // have the same module scope and that there are no invalid modules. + inModule := false // if pkg == "_" + for _, d := range dirs { + if l.cfg.findRoot(d[1]) != "" { + inModule = true + break + } + } + + for _, d := range dirs { + for dir := filepath.Clean(d[1]); ctxt.isDir(dir); { + files, err := ctxt.readDir(dir) + if err != nil && !os.IsNotExist(err) { + return retErr(errors.Wrapf(err, pos, "import failed reading dir %v", dirs[0][1])) + } + for _, f := range files { + if f.IsDir() { + continue + } + if f.Name() == "-" { + if _, err := cfg.fileSystem.stat("-"); !os.IsNotExist(err) { + continue + } + } + file, err := filetypes.ParseFile(f.Name(), filetypes.Input) + if err != nil { + p.UnknownFiles = append(p.UnknownFiles, &build.File{ + Filename: f.Name(), + }) + continue // skip unrecognized file types + } + fp.add(pos, dir, file, importComment) + } + + if p.PkgName == "" || !inModule || l.cfg.isRoot(dir) || dir == d[0] { + break + } + + // From now on we just ignore files that do not belong to the same + // package. + fp.ignoreOther = true + + parent, _ := filepath.Split(dir) + parent = filepath.Clean(parent) + + if parent == dir || len(parent) < len(d[0]) { + break + } + dir = parent + } + } + + all := []*build.Instance{} + + for _, p := range fp.pkgs { + impPath, err := addImportQualifier(importPath(p.ImportPath), p.PkgName) + p.ImportPath = string(impPath) + if err != nil { + p.ReportError(err) + } + + all = append(all, p) + rewriteFiles(p, cfg.ModuleRoot, false) + if errs := fp.finalize(p); errs != nil { + p.ReportError(errs) + return all + } + + l.addFiles(cfg.ModuleRoot, p) + _ = p.Complete() + } + sort.Slice(all, func(i, j int) bool { + return all[i].Dir < all[j].Dir + }) + return all +} + +// loadFunc creates a LoadFunc that can be used to create new build.Instances. +func (l *loader) loadFunc() build.LoadFunc { + + return func(pos token.Pos, path string) *build.Instance { + cfg := l.cfg + + impPath := importPath(path) + if isLocalImport(path) { + return cfg.newErrInstance(pos, impPath, + errors.Newf(pos, "relative import paths not allowed (%q)", path)) + } + + // is it a builtin? + if strings.IndexByte(strings.Split(path, "/")[0], '.') == -1 { + if l.cfg.StdRoot != "" { + p := cfg.newInstance(pos, impPath) + _ = l.importPkg(pos, p) + return p + } + return nil + } + + p := cfg.newInstance(pos, impPath) + _ = l.importPkg(pos, p) + return p + } +} + +func normPrefix(root, path string, isLocal bool) string { + root = filepath.Clean(root) + prefix := "" + if isLocal { + prefix = "." + string(filepath.Separator) + } + if !strings.HasSuffix(root, string(filepath.Separator)) && + strings.HasPrefix(path, root) { + path = prefix + path[len(root)+1:] + } + return path +} + +func rewriteFiles(p *build.Instance, root string, isLocal bool) { + p.Root = root + + normalizeFilenames(root, p.CUEFiles, isLocal) + normalizeFilenames(root, p.TestCUEFiles, isLocal) + normalizeFilenames(root, p.ToolCUEFiles, isLocal) + normalizeFilenames(root, p.IgnoredCUEFiles, isLocal) + normalizeFilenames(root, p.InvalidCUEFiles, isLocal) + + normalizeFiles(p.BuildFiles) + normalizeFiles(p.IgnoredFiles) + normalizeFiles(p.OrphanedFiles) + normalizeFiles(p.InvalidFiles) + normalizeFiles(p.UnknownFiles) +} + +func normalizeFilenames(root string, a []string, isLocal bool) { + for i, path := range a { + if strings.HasPrefix(path, root) { + a[i] = normPrefix(root, path, isLocal) + } + } + sortParentsFirst(a) +} + +func sortParentsFirst(s []string) { + sort.Slice(s, func(i, j int) bool { + return len(filepath.Dir(s[i])) < len(filepath.Dir(s[j])) + }) +} + +func normalizeFiles(a []*build.File) { + sort.Slice(a, func(i, j int) bool { + return len(filepath.Dir(a[i].Filename)) < len(filepath.Dir(a[j].Filename)) + }) +} + +type fileProcessor struct { + firstFile string + firstCommentFile string + imported map[string][]token.Pos + allTags map[string]bool + allFiles bool + ignoreOther bool // ignore files from other packages + allPackages bool + + c *Config + pkgs map[string]*build.Instance + pkg *build.Instance + + err errors.Error +} + +func newFileProcessor(c *Config, p *build.Instance) *fileProcessor { + return &fileProcessor{ + imported: make(map[string][]token.Pos), + allTags: make(map[string]bool), + c: c, + pkgs: map[string]*build.Instance{"_": p}, + pkg: p, + } +} + +func countCUEFiles(c *Config, p *build.Instance) int { + count := len(p.CUEFiles) + if c.Tools { + count += len(p.ToolCUEFiles) + } + if c.Tests { + count += len(p.TestCUEFiles) + } + return count +} + +func (fp *fileProcessor) finalize(p *build.Instance) errors.Error { + if fp.err != nil { + return fp.err + } + if countCUEFiles(fp.c, p) == 0 && + !fp.c.DataFiles && + (p.PkgName != "_" || !fp.allPackages) { + fp.err = errors.Append(fp.err, &NoFilesError{Package: p, ignored: len(p.IgnoredCUEFiles) > 0}) + return fp.err + } + + for tag := range fp.allTags { + p.AllTags = append(p.AllTags, tag) + } + sort.Strings(p.AllTags) + + p.ImportPaths, _ = cleanImports(fp.imported) + + return nil +} + +func (fp *fileProcessor) add(pos token.Pos, root string, file *build.File, mode importMode) (added bool) { + fullPath := file.Filename + if fullPath != "-" { + if !filepath.IsAbs(fullPath) { + fullPath = filepath.Join(root, fullPath) + } + } + file.Filename = fullPath + + base := filepath.Base(fullPath) + + // special * and _ + p := fp.pkg // default package + + // badFile := func(p *build.Instance, err errors.Error) bool { + badFile := func(err errors.Error) bool { + fp.err = errors.Append(fp.err, err) + p.InvalidCUEFiles = append(p.InvalidCUEFiles, fullPath) + p.InvalidFiles = append(p.InvalidFiles, file) + return true + } + + match, data, err := matchFile(fp.c, file, true, fp.allFiles, fp.allTags) + if err != nil { + return badFile(err) + } + if !match { + if file.Encoding == build.CUE && file.Interpretation == "" { + p.IgnoredCUEFiles = append(p.IgnoredCUEFiles, fullPath) + p.IgnoredFiles = append(p.IgnoredFiles, file) + } else { + p.OrphanedFiles = append(p.OrphanedFiles, file) + p.DataFiles = append(p.DataFiles, fullPath) + } + return false // don't mark as added + } + + pf, perr := parser.ParseFile(fullPath, data, parser.ImportsOnly, parser.ParseComments) + if perr != nil { + badFile(errors.Promote(perr, "add failed")) + return true + } + + _, pkg, _ := internal.PackageInfo(pf) + if pkg == "" { + pkg = "_" + } + + switch { + case pkg == p.PkgName, mode&allowAnonymous != 0: + case fp.allPackages && pkg != "_": + q := fp.pkgs[pkg] + if q == nil { + q = &build.Instance{ + PkgName: pkg, + + Dir: p.Dir, + DisplayPath: p.DisplayPath, + ImportPath: p.ImportPath + ":" + pkg, + Root: p.Root, + Module: p.Module, + } + fp.pkgs[pkg] = q + } + p = q + + case pkg != "_": + + default: + p.IgnoredCUEFiles = append(p.IgnoredCUEFiles, fullPath) + p.IgnoredFiles = append(p.IgnoredFiles, file) + return false // don't mark as added + } + + if !fp.c.AllCUEFiles { + if include, err := shouldBuildFile(pf, fp); !include { + if err != nil { + fp.err = errors.Append(fp.err, err) + } + p.IgnoredCUEFiles = append(p.InvalidCUEFiles, fullPath) + p.IgnoredFiles = append(p.InvalidFiles, file) + return false + } + } + + if pkg != "" && pkg != "_" { + if p.PkgName == "" { + p.PkgName = pkg + fp.firstFile = base + } else if pkg != p.PkgName { + if fp.ignoreOther { + p.IgnoredCUEFiles = append(p.IgnoredCUEFiles, fullPath) + p.IgnoredFiles = append(p.IgnoredFiles, file) + return false + } + return badFile(&MultiplePackageError{ + Dir: p.Dir, + Packages: []string{p.PkgName, pkg}, + Files: []string{fp.firstFile, base}, + }) + } + } + + isTest := strings.HasSuffix(base, "_test"+cueSuffix) + isTool := strings.HasSuffix(base, "_tool"+cueSuffix) + + if mode&importComment != 0 { + qcom, line := findimportComment(data) + if line != 0 { + com, err := strconv.Unquote(qcom) + if err != nil { + badFile(errors.Newf(pos, "%s:%d: cannot parse import comment", fullPath, line)) + } else if p.ImportComment == "" { + p.ImportComment = com + fp.firstCommentFile = base + } else if p.ImportComment != com { + badFile(errors.Newf(pos, "found import comments %q (%s) and %q (%s) in %s", p.ImportComment, fp.firstCommentFile, com, base, p.Dir)) + } + } + } + + for _, decl := range pf.Decls { + d, ok := decl.(*ast.ImportDecl) + if !ok { + continue + } + for _, spec := range d.Specs { + quoted := spec.Path.Value + path, err := strconv.Unquote(quoted) + if err != nil { + badFile(errors.Newf( + spec.Path.Pos(), + "%s: parser returned invalid quoted string: <%s>", fullPath, quoted, + )) + } + if !isTest || fp.c.Tests { + fp.imported[path] = append(fp.imported[path], spec.Pos()) + } + } + } + switch { + case isTest: + p.TestCUEFiles = append(p.TestCUEFiles, fullPath) + if fp.c.loader.cfg.Tests { + p.BuildFiles = append(p.BuildFiles, file) + } else { + p.IgnoredFiles = append(p.IgnoredFiles, file) + } + case isTool: + p.ToolCUEFiles = append(p.ToolCUEFiles, fullPath) + if fp.c.loader.cfg.Tools { + p.BuildFiles = append(p.BuildFiles, file) + } else { + p.IgnoredFiles = append(p.IgnoredFiles, file) + } + default: + p.CUEFiles = append(p.CUEFiles, fullPath) + p.BuildFiles = append(p.BuildFiles, file) + } + return true +} + +func nameExt(name string) string { + i := strings.LastIndex(name, ".") + if i < 0 { + return "" + } + return name[i:] +} + +// hasCUEFiles reports whether dir contains any files with names ending in .go. +// For a vendor check we must exclude directories that contain no .go files. +// Otherwise it is not possible to vendor just a/b/c and still import the +// non-vendored a/b. See golang.org/issue/13832. +func hasCUEFiles(ctxt *fileSystem, dir string) bool { + ents, _ := ctxt.readDir(dir) + for _, ent := range ents { + if !ent.IsDir() && strings.HasSuffix(ent.Name(), cueSuffix) { + return true + } + } + return false +} + +func findimportComment(data []byte) (s string, line int) { + // expect keyword package + word, data := parseWord(data) + if string(word) != "package" { + return "", 0 + } + + // expect package name + _, data = parseWord(data) + + // now ready for import comment, a // comment + // beginning and ending on the current line. + for len(data) > 0 && (data[0] == ' ' || data[0] == '\t' || data[0] == '\r') { + data = data[1:] + } + + var comment []byte + switch { + case bytes.HasPrefix(data, slashSlash): + i := bytes.Index(data, newline) + if i < 0 { + i = len(data) + } + comment = data[2:i] + } + comment = bytes.TrimSpace(comment) + + // split comment into `import`, `"pkg"` + word, arg := parseWord(comment) + if string(word) != "import" { + return "", 0 + } + + line = 1 + bytes.Count(data[:cap(data)-cap(arg)], newline) + return strings.TrimSpace(string(arg)), line +} + +var ( + slashSlash = []byte("//") + newline = []byte("\n") +) + +// skipSpaceOrComment returns data with any leading spaces or comments removed. +func skipSpaceOrComment(data []byte) []byte { + for len(data) > 0 { + switch data[0] { + case ' ', '\t', '\r', '\n': + data = data[1:] + continue + case '/': + if bytes.HasPrefix(data, slashSlash) { + i := bytes.Index(data, newline) + if i < 0 { + return nil + } + data = data[i+1:] + continue + } + } + break + } + return data +} + +// parseWord skips any leading spaces or comments in data +// and then parses the beginning of data as an identifier or keyword, +// returning that word and what remains after the word. +func parseWord(data []byte) (word, rest []byte) { + data = skipSpaceOrComment(data) + + // Parse past leading word characters. + rest = data + for { + r, size := utf8.DecodeRune(rest) + if unicode.IsLetter(r) || '0' <= r && r <= '9' || r == '_' { + rest = rest[size:] + continue + } + break + } + + word = data[:len(data)-len(rest)] + if len(word) == 0 { + return nil, nil + } + + return word, rest +} + +func cleanImports(m map[string][]token.Pos) ([]string, map[string][]token.Pos) { + all := make([]string, 0, len(m)) + for path := range m { + all = append(all, path) + } + sort.Strings(all) + return all, m +} + +// // Import is shorthand for Default.Import. +// func Import(path, srcDir string, mode ImportMode) (*Package, error) { +// return Default.Import(path, srcDir, mode) +// } + +// // ImportDir is shorthand for Default.ImportDir. +// func ImportDir(dir string, mode ImportMode) (*Package, error) { +// return Default.ImportDir(dir, mode) +// } + +var slashslash = []byte("//") + +// isLocalImport reports whether the import path is +// a local import path, like ".", "..", "./foo", or "../foo". +func isLocalImport(path string) bool { + return path == "." || path == ".." || + strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../") +} diff --git a/vendor/cuelang.org/go/cue/load/loader.go b/vendor/cuelang.org/go/cue/load/loader.go new file mode 100644 index 000000000..1cdd0c187 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/loader.go @@ -0,0 +1,318 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +// Files in package are to a large extent based on Go files from the following +// Go packages: +// - cmd/go/internal/load +// - go/build + +import ( + pathpkg "path" + "path/filepath" + "strings" + "unicode" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal/encoding" + "cuelang.org/go/internal/filetypes" + + // Trigger the unconditional loading of all core builtin packages if load + // is used. This was deemed the simplest way to avoid having to import + // this line explicitly, and thus breaking existing code, for the majority + // of cases, while not introducing an import cycle. + _ "cuelang.org/go/pkg" +) + +// Instances returns the instances named by the command line arguments 'args'. +// If errors occur trying to load an instance it is returned with Incomplete +// set. Errors directly related to loading the instance are recorded in this +// instance, but errors that occur loading dependencies are recorded in these +// dependencies. +func Instances(args []string, c *Config) []*build.Instance { + if c == nil { + c = &Config{} + } + newC, err := c.complete() + if err != nil { + return []*build.Instance{c.newErrInstance(token.NoPos, "", err)} + } + c = newC + + l := c.loader + + // TODO: require packages to be placed before files. At some point this + // could be relaxed. + i := 0 + for ; i < len(args) && filetypes.IsPackage(args[i]); i++ { + } + + a := []*build.Instance{} + + if len(args) == 0 || i > 0 { + for _, m := range l.importPaths(args[:i]) { + if m.Err != nil { + inst := c.newErrInstance(token.NoPos, "", m.Err) + a = append(a, inst) + continue + } + a = append(a, m.Pkgs...) + } + } + + if args = args[i:]; len(args) > 0 { + files, err := filetypes.ParseArgs(args) + if err != nil { + return []*build.Instance{c.newErrInstance(token.NoPos, "", err)} + } + a = append(a, l.cueFilesPackage(files)) + } + + for _, p := range a { + tags, err := findTags(p) + if err != nil { + p.ReportError(err) + } + l.tags = append(l.tags, tags...) + } + + // TODO(api): have API call that returns an error which is the aggregate + // of all build errors. Certain errors, like these, hold across builds. + if err := injectTags(c.Tags, l); err != nil { + for _, p := range a { + p.ReportError(err) + } + } + + if l.replacements == nil { + return a + } + + for _, p := range a { + for _, f := range p.Files { + ast.Walk(f, nil, func(n ast.Node) { + if ident, ok := n.(*ast.Ident); ok { + if v, ok := l.replacements[ident.Node]; ok { + ident.Node = v + } + } + }) + } + } + + return a +} + +// Mode flags for loadImport and download (in get.go). +const ( + // resolveImport means that loadImport should do import path expansion. + // That is, resolveImport means that the import path came from + // a source file and has not been expanded yet to account for + // vendoring or possible module adjustment. + // Every import path should be loaded initially with resolveImport, + // and then the expanded version (for example with the /vendor/ in it) + // gets recorded as the canonical import path. At that point, future loads + // of that package must not pass resolveImport, because + // disallowVendor will reject direct use of paths containing /vendor/. + resolveImport = 1 << iota +) + +type loader struct { + cfg *Config + stk importStack + tags []tag // tags found in files + buildTags map[string]bool + replacements map[ast.Node]ast.Node +} + +func (l *loader) abs(filename string) string { + if !isLocalImport(filename) { + return filename + } + return filepath.Join(l.cfg.Dir, filename) +} + +// cueFilesPackage creates a package for building a collection of CUE files +// (typically named on the command line). +func (l *loader) cueFilesPackage(files []*build.File) *build.Instance { + pos := token.NoPos + cfg := l.cfg + cfg.filesMode = true + // ModInit() // TODO: support modules + pkg := l.cfg.Context.NewInstance(cfg.Dir, l.loadFunc()) + + _, err := filepath.Abs(cfg.Dir) + if err != nil { + return cfg.newErrInstance(pos, toImportPath(cfg.Dir), + errors.Wrapf(err, pos, "could not convert '%s' to absolute path", cfg.Dir)) + } + + for _, bf := range files { + f := bf.Filename + if f == "-" { + continue + } + if !filepath.IsAbs(f) { + f = filepath.Join(cfg.Dir, f) + } + fi, err := cfg.fileSystem.stat(f) + if err != nil { + return cfg.newErrInstance(pos, toImportPath(f), + errors.Wrapf(err, pos, "could not find file")) + } + if fi.IsDir() { + return cfg.newErrInstance(token.NoPos, toImportPath(f), + errors.Newf(pos, "file is a directory %v", f)) + } + } + + fp := newFileProcessor(cfg, pkg) + for _, file := range files { + fp.add(pos, cfg.Dir, file, allowAnonymous) + } + + // TODO: ModImportFromFiles(files) + pkg.Dir = cfg.Dir + rewriteFiles(pkg, pkg.Dir, true) + for _, err := range errors.Errors(fp.finalize(pkg)) { // ImportDir(&ctxt, dir, 0) + var x *NoFilesError + if len(pkg.OrphanedFiles) == 0 || !errors.As(err, &x) { + pkg.ReportError(err) + } + } + // TODO: Support module importing. + // if ModDirImportPath != nil { + // // Use the effective import path of the directory + // // for deciding visibility during pkg.load. + // bp.ImportPath = ModDirImportPath(dir) + // } + + l.addFiles(cfg.Dir, pkg) + + pkg.User = true + l.stk.Push("user") + _ = pkg.Complete() + l.stk.Pop() + pkg.User = true + //pkg.LocalPrefix = dirToImportPath(dir) + pkg.DisplayPath = "command-line-arguments" + + return pkg +} + +func (l *loader) addFiles(dir string, p *build.Instance) { + for _, f := range p.BuildFiles { + d := encoding.NewDecoder(f, &encoding.Config{ + Stdin: l.cfg.stdin(), + ParseFile: l.cfg.ParseFile, + }) + for ; !d.Done(); d.Next() { + _ = p.AddSyntax(d.File()) + } + if err := d.Err(); err != nil { + p.ReportError(errors.Promote(err, "load")) + } + d.Close() + } +} + +func cleanImport(path string) string { + orig := path + path = pathpkg.Clean(path) + if strings.HasPrefix(orig, "./") && path != ".." && !strings.HasPrefix(path, "../") { + path = "./" + path + } + return path +} + +// An importStack is a stack of import paths, possibly with the suffix " (test)" appended. +// The import path of a test package is the import path of the corresponding +// non-test package with the suffix "_test" added. +type importStack []string + +func (s *importStack) Push(p string) { + *s = append(*s, p) +} + +func (s *importStack) Pop() { + *s = (*s)[0 : len(*s)-1] +} + +func (s *importStack) Copy() []string { + return append([]string{}, *s...) +} + +// shorterThan reports whether sp is shorter than t. +// We use this to record the shortest import sequences +// that leads to a particular package. +func (sp *importStack) shorterThan(t []string) bool { + s := *sp + if len(s) != len(t) { + return len(s) < len(t) + } + // If they are the same length, settle ties using string ordering. + for i := range s { + if s[i] != t[i] { + return s[i] < t[i] + } + } + return false // they are equal +} + +// reusePackage reuses package p to satisfy the import at the top +// of the import stack stk. If this use causes an import loop, +// reusePackage updates p's error information to record the loop. +func (l *loader) reusePackage(p *build.Instance) *build.Instance { + // We use p.Internal.Imports==nil to detect a package that + // is in the midst of its own loadPackage call + // (all the recursion below happens before p.Internal.Imports gets set). + if p.ImportPaths == nil { + if err := lastError(p); err == nil { + err = l.errPkgf(nil, "import cycle not allowed") + err.IsImportCycle = true + report(p, err) + } + p.Incomplete = true + } + // Don't rewrite the import stack in the error if we have an import cycle. + // If we do, we'll lose the path that describes the cycle. + if err := lastError(p); err != nil && !err.IsImportCycle && l.stk.shorterThan(err.ImportStack) { + err.ImportStack = l.stk.Copy() + } + return p +} + +// dirToImportPath returns the pseudo-import path we use for a package +// outside the CUE path. It begins with _/ and then contains the full path +// to the directory. If the package lives in c:\home\gopher\my\pkg then +// the pseudo-import path is _/c_/home/gopher/my/pkg. +// Using a pseudo-import path like this makes the ./ imports no longer +// a special case, so that all the code to deal with ordinary imports works +// automatically. +func dirToImportPath(dir string) string { + return pathpkg.Join("_", strings.Map(makeImportValid, filepath.ToSlash(dir))) +} + +func makeImportValid(r rune) rune { + // Should match Go spec, compilers, and ../../go/parser/parser.go:/isValidImport. + const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" + if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { + return '_' + } + return r +} diff --git a/vendor/cuelang.org/go/cue/load/match.go b/vendor/cuelang.org/go/cue/load/match.go new file mode 100644 index 000000000..4d3378c5c --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/match.go @@ -0,0 +1,137 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "io/ioutil" + "path/filepath" + "strings" + "unicode" + + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// matchFile determines whether the file with the given name in the given directory +// should be included in the package being constructed. +// It returns the data read from the file. +// If returnImports is true and name denotes a CUE file, matchFile reads +// until the end of the imports (and returns that data) even though it only +// considers text until the first non-comment. +// If allTags is non-nil, matchFile records any encountered build tag +// by setting allTags[tag] = true. +func matchFile(cfg *Config, file *build.File, returnImports, allFiles bool, allTags map[string]bool) (match bool, data []byte, err errors.Error) { + if fi := cfg.fileSystem.getOverlay(file.Filename); fi != nil { + if fi.file != nil { + file.Source = fi.file + } else { + file.Source = fi.contents + } + } + + if file.Encoding != build.CUE { + return + } + + if file.Filename == "-" { + b, err2 := ioutil.ReadAll(cfg.stdin()) + if err2 != nil { + err = errors.Newf(token.NoPos, "read stdin: %v", err) + return + } + file.Source = b + data = b + match = true // don't check shouldBuild for stdin + return + } + + name := filepath.Base(file.Filename) + if !cfg.filesMode && strings.HasPrefix(name, ".") { + return + } + + if strings.HasPrefix(name, "_") { + return + } + + f, err := cfg.fileSystem.openFile(file.Filename) + if err != nil { + return + } + + data, err = readImports(f, false, nil) + f.Close() + if err != nil { + err = errors.Newf(token.NoPos, "read %s: %v", file.Filename, err) + return + } + + match = true + return +} + +// doMatch reports whether the name is one of: +// +// tag (if tag is listed in cfg.Build.BuildTags or cfg.Build.ReleaseTags) +// !tag (if tag is not listed in cfg.Build.BuildTags or cfg.Build.ReleaseTags) +// a comma-separated list of any of these +// +func doMatch(cfg *Config, name string, allTags map[string]bool) bool { + if name == "" { + if allTags != nil { + allTags[name] = true + } + return false + } + if i := strings.Index(name, ","); i >= 0 { + // comma-separated list + ok1 := doMatch(cfg, name[:i], allTags) + ok2 := doMatch(cfg, name[i+1:], allTags) + return ok1 && ok2 + } + if strings.HasPrefix(name, "!!") { // bad syntax, reject always + return false + } + if strings.HasPrefix(name, "!") { // negation + return len(name) > 1 && !doMatch(cfg, name[1:], allTags) + } + + if allTags != nil { + allTags[name] = true + } + + // Tags must be letters, digits, underscores or dots. + // Unlike in CUE identifiers, all digits are fine (e.g., "386"). + for _, c := range name { + if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' { + return false + } + } + + // other tags + for _, tag := range cfg.BuildTags { + if tag == name { + return true + } + } + for _, tag := range cfg.releaseTags { + if tag == name { + return true + } + } + + return false +} diff --git a/vendor/cuelang.org/go/cue/load/package.go b/vendor/cuelang.org/go/cue/load/package.go new file mode 100644 index 000000000..79759a5a3 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/package.go @@ -0,0 +1,49 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "unicode/utf8" +) + +// Package rules: +// +// - the package clause defines a namespace. +// - a cue file without a package clause is a standalone file. +// - all files with the same package name within a directory and its +// ancestor directories up to the package root belong to the same package. +// - The package root is either the top of the file hierarchy or the first +// directory in which a cue.mod file is defined. +// +// The contents of a namespace depends on the directory that is selected as the +// starting point to load a package. An instance defines a package-directory +// pair. + +// safeArg reports whether arg is a "safe" command-line argument, +// meaning that when it appears in a command-line, it probably +// doesn't have some special meaning other than its own name. +// Obviously args beginning with - are not safe (they look like flags). +// Less obviously, args beginning with @ are not safe (they look like +// GNU binutils flagfile specifiers, sometimes called "response files"). +// To be conservative, we reject almost any arg beginning with non-alphanumeric ASCII. +// We accept leading . _ and / as likely in file system paths. +// There is a copy of this function in cmd/compile/internal/gc/noder.go. +func safeArg(name string) bool { + if name == "" { + return false + } + c := name[0] + return '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || c == '.' || c == '_' || c == '/' || c >= utf8.RuneSelf +} diff --git a/vendor/cuelang.org/go/cue/load/read.go b/vendor/cuelang.org/go/cue/load/read.go new file mode 100644 index 000000000..12835bdf0 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/read.go @@ -0,0 +1,259 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "bufio" + "io" + "unicode/utf8" + + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +type importReader struct { + b *bufio.Reader + buf []byte + peek byte + err errors.Error + eof bool + nerr int +} + +func isIdent(c byte) bool { + return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '_' || c >= utf8.RuneSelf +} + +var ( + errSyntax = errors.Newf(token.NoPos, "syntax error") // TODO: remove + errNUL = errors.Newf(token.NoPos, "unexpected NUL in input") +) + +// syntaxError records a syntax error, but only if an I/O error has not already been recorded. +func (r *importReader) syntaxError() { + if r.err == nil { + r.err = errSyntax + } +} + +// readByte reads the next byte from the input, saves it in buf, and returns it. +// If an error occurs, readByte records the error in r.err and returns 0. +func (r *importReader) readByte() byte { + c, err := r.b.ReadByte() + if err == nil { + r.buf = append(r.buf, c) + if c == 0 { + err = errNUL + } + } + if err != nil { + if err == io.EOF { + r.eof = true + } else if r.err == nil { + r.err = errors.Wrapf(err, token.NoPos, "readByte") + } + c = 0 + } + return c +} + +// peekByte returns the next byte from the input reader but does not advance beyond it. +// If skipSpace is set, peekByte skips leading spaces and comments. +func (r *importReader) peekByte(skipSpace bool) byte { + if r.err != nil { + if r.nerr++; r.nerr > 10000 { + panic("go/build: import reader looping") + } + return 0 + } + + // Use r.peek as first input byte. + // Don't just return r.peek here: it might have been left by peekByte(false) + // and this might be peekByte(true). + c := r.peek + if c == 0 { + c = r.readByte() + } + for r.err == nil && !r.eof { + if skipSpace { + // For the purposes of this reader, semicolons are never necessary to + // understand the input and are treated as spaces. + switch c { + case ' ', '\f', '\t', '\r', '\n', ';': + c = r.readByte() + continue + + case '/': + c = r.readByte() + if c == '/' { + for c != '\n' && r.err == nil && !r.eof { + c = r.readByte() + } + } else if c == '*' { + var c1 byte + for (c != '*' || c1 != '/') && r.err == nil { + if r.eof { + r.syntaxError() + } + c, c1 = c1, r.readByte() + } + } else { + r.syntaxError() + } + c = r.readByte() + continue + } + } + break + } + r.peek = c + return r.peek +} + +// nextByte is like peekByte but advances beyond the returned byte. +func (r *importReader) nextByte(skipSpace bool) byte { + c := r.peekByte(skipSpace) + r.peek = 0 + return c +} + +// readKeyword reads the given keyword from the input. +// If the keyword is not present, readKeyword records a syntax error. +func (r *importReader) readKeyword(kw string) { + r.peekByte(true) + for i := 0; i < len(kw); i++ { + if r.nextByte(false) != kw[i] { + r.syntaxError() + return + } + } + if isIdent(r.peekByte(false)) { + r.syntaxError() + } +} + +// readIdent reads an identifier from the input. +// If an identifier is not present, readIdent records a syntax error. +func (r *importReader) readIdent() { + c := r.peekByte(true) + if !isIdent(c) { + r.syntaxError() + return + } + for isIdent(r.peekByte(false)) { + r.peek = 0 + } +} + +// readString reads a quoted string literal from the input. +// If an identifier is not present, readString records a syntax error. +func (r *importReader) readString(save *[]string) { + switch r.nextByte(true) { + case '`': + start := len(r.buf) - 1 + for r.err == nil { + if r.nextByte(false) == '`' { + if save != nil { + *save = append(*save, string(r.buf[start:])) + } + break + } + if r.eof { + r.syntaxError() + } + } + case '"': + start := len(r.buf) - 1 + for r.err == nil { + c := r.nextByte(false) + if c == '"' { + if save != nil { + *save = append(*save, string(r.buf[start:])) + } + break + } + if r.eof || c == '\n' { + r.syntaxError() + } + if c == '\\' { + r.nextByte(false) + } + } + default: + r.syntaxError() + } +} + +// readImport reads an import clause - optional identifier followed by quoted string - +// from the input. +func (r *importReader) readImport(imports *[]string) { + c := r.peekByte(true) + if c == '.' { + r.peek = 0 + } else if isIdent(c) { + r.readIdent() + } + r.readString(imports) +} + +// readComments is like ioutil.ReadAll, except that it only reads the leading +// block of comments in the file. +func readComments(f io.Reader) ([]byte, errors.Error) { + r := &importReader{b: bufio.NewReader(f)} + r.peekByte(true) + if r.err == nil && !r.eof { + // Didn't reach EOF, so must have found a non-space byte. Remove it. + r.buf = r.buf[:len(r.buf)-1] + } + return r.buf, r.err +} + +// readImports is like ioutil.ReadAll, except that it expects a CUE file as +// input and stops reading the input once the imports have completed. +func readImports(f io.Reader, reportSyntaxError bool, imports *[]string) ([]byte, errors.Error) { + r := &importReader{b: bufio.NewReader(f)} + + r.readKeyword("package") + r.readIdent() + for r.peekByte(true) == 'i' { + r.readKeyword("import") + if r.peekByte(true) == '(' { + r.nextByte(false) + for r.peekByte(true) != ')' && r.err == nil { + r.readImport(imports) + } + r.nextByte(false) + } else { + r.readImport(imports) + } + } + + // If we stopped successfully before EOF, we read a byte that told us we were done. + // Return all but that last byte, which would cause a syntax error if we let it through. + if r.err == nil && !r.eof { + return r.buf[:len(r.buf)-1], nil + } + + // If we stopped for a syntax error, consume the whole file so that + // we are sure we don't change the errors that go/parser returns. + if r.err == errSyntax && !reportSyntaxError { + r.err = nil + for r.err == nil && !r.eof { + r.readByte() + } + } + + return r.buf, r.err +} diff --git a/vendor/cuelang.org/go/cue/load/search.go b/vendor/cuelang.org/go/cue/load/search.go new file mode 100644 index 000000000..a6443d955 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/search.go @@ -0,0 +1,541 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + // TODO: remove this usage + + "os" + "path" + "path/filepath" + "regexp" + "strings" + + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// A match represents the result of matching a single package pattern. +type match struct { + Pattern string // the pattern itself + Literal bool // whether it is a literal (no wildcards) + Pkgs []*build.Instance + Err errors.Error +} + +// TODO: should be matched from module file only. +// The pattern is either "all" (all packages), "std" (standard packages), +// "cmd" (standard commands), or a path including "...". +func (l *loader) matchPackages(pattern, pkgName string) *match { + // cfg := l.cfg + m := &match{ + Pattern: pattern, + Literal: false, + } + // match := func(string) bool { return true } + // treeCanMatch := func(string) bool { return true } + // if !isMetaPackage(pattern) { + // match = matchPattern(pattern) + // treeCanMatch = treeCanMatchPattern(pattern) + // } + + // have := map[string]bool{ + // "builtin": true, // ignore pseudo-package that exists only for documentation + // } + + // for _, src := range cfg.srcDirs() { + // if pattern == "std" || pattern == "cmd" { + // continue + // } + // src = filepath.Clean(src) + string(filepath.Separator) + // root := src + // if pattern == "cmd" { + // root += "cmd" + string(filepath.Separator) + // } + // filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + // if err != nil || path == src { + // return nil + // } + + // want := true + // // Avoid .foo, _foo, and testdata directory trees. + // _, elem := filepath.Split(path) + // if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + // want = false + // } + + // name := filepath.ToSlash(path[len(src):]) + // if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { + // // The name "std" is only the standard library. + // // If the name is cmd, it's the root of the command tree. + // want = false + // } + // if !treeCanMatch(name) { + // want = false + // } + + // if !fi.IsDir() { + // if fi.Mode()&os.ModeSymlink != 0 && want { + // if target, err := os.Stat(path); err == nil && target.IsDir() { + // fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path) + // } + // } + // return nil + // } + // if !want { + // return skipDir + // } + + // if have[name] { + // return nil + // } + // have[name] = true + // if !match(name) { + // return nil + // } + // pkg := l.importPkg(".", path) + // if err := pkg.Error; err != nil { + // if _, noGo := err.(*noCUEError); noGo { + // return nil + // } + // } + + // // If we are expanding "cmd", skip main + // // packages under cmd/vendor. At least as of + // // March, 2017, there is one there for the + // // vendored pprof tool. + // if pattern == "cmd" && strings.HasPrefix(pkg.DisplayPath, "cmd/vendor") && pkg.PkgName == "main" { + // return nil + // } + + // m.Pkgs = append(m.Pkgs, pkg) + // return nil + // }) + // } + return m +} + +// matchPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +// (See go help packages for pattern syntax.) +func (l *loader) matchPackagesInFS(pattern, pkgName string) *match { + c := l.cfg + m := &match{ + Pattern: pattern, + Literal: false, + } + + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + root := l.abs(dir) + + // Find new module root from here or check there are no additional + // cue.mod files between here and the next module. + + if !hasFilepathPrefix(root, c.ModuleRoot) { + m.Err = errors.Newf(token.NoPos, + "cue: pattern %s refers to dir %s, outside module root %s", + pattern, root, c.ModuleRoot) + return m + } + + pkgDir := filepath.Join(root, modDir) + // TODO(legacy): remove + pkgDir2 := filepath.Join(root, "pkg") + + _ = c.fileSystem.walk(root, func(path string, fi os.FileInfo, err errors.Error) errors.Error { + if err != nil || !fi.IsDir() { + return nil + } + if path == pkgDir || path == pkgDir2 { + return skipDir + } + + top := path == root + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || (elem == "testdata" && !top) { + return skipDir + } + + if !top { + // Ignore other modules found in subdirectories. + if _, err := c.fileSystem.stat(filepath.Join(path, modDir)); err == nil { + return skipDir + } + } + + // name := prefix + filepath.ToSlash(path) + // if !match(name) { + // return nil + // } + + // We keep the directory if we can import it, or if we can't import it + // due to invalid CUE source files. This means that directories + // containing parse errors will be built (and fail) instead of being + // silently skipped as not matching the pattern. + // Do not take root, as we want to stay relative + // to one dir only. + dir, e := filepath.Rel(c.Dir, path) + if e != nil { + panic(err) + } else { + dir = "./" + dir + } + // TODO: consider not doing these checks here. + inst := c.newRelInstance(token.NoPos, dir, pkgName) + pkgs := l.importPkg(token.NoPos, inst) + for _, p := range pkgs { + if err := p.Err; err != nil && (p == nil || len(p.InvalidCUEFiles) == 0) { + switch err.(type) { + case nil: + break + case *NoFilesError: + if c.DataFiles && len(p.OrphanedFiles) > 0 { + break + } + return nil + default: + m.Err = errors.Append(m.Err, err) + } + } + } + + m.Pkgs = append(m.Pkgs, pkgs...) + return nil + }) + return m +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +// Unfortunately, there are two special cases. Quoting "go help packages": +// +// First, /... at the end of the pattern can match an empty string, +// so that net/... matches both net and packages in its subdirectories, like net/http. +// Second, any slash-separted pattern element containing a wildcard never +// participates in a match of the "vendor" element in the path of a vendored +// package, so that ./... does not match packages in subdirectories of +// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do. +// Note, however, that a directory named vendor that itself contains code +// is not a vendored package: cmd/vendor would be a command named vendor, +// and the pattern cmd/... matches it. +func matchPattern(pattern string) func(name string) bool { + // Convert pattern to regular expression. + // The strategy for the trailing /... is to nest it in an explicit ? expression. + // The strategy for the vendor exclusion is to change the unmatchable + // vendor strings to a disallowed code point (vendorChar) and to use + // "(anything but that codepoint)*" as the implementation of the ... wildcard. + // This is a bit complicated but the obvious alternative, + // namely a hand-written search like in most shell glob matchers, + // is too easy to make accidentally exponential. + // Using package regexp guarantees linear-time matching. + + const vendorChar = "\x00" + + if strings.Contains(pattern, vendorChar) { + return func(name string) bool { return false } + } + + re := regexp.QuoteMeta(pattern) + re = replaceVendor(re, vendorChar) + switch { + case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`): + re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)` + case re == vendorChar+`/\.\.\.`: + re = `(/vendor|/` + vendorChar + `/\.\.\.)` + case strings.HasSuffix(re, `/\.\.\.`): + re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?` + } + re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1) + + reg := regexp.MustCompile(`^` + re + `$`) + + return func(name string) bool { + if strings.Contains(name, vendorChar) { + return false + } + return reg.MatchString(replaceVendor(name, vendorChar)) + } +} + +// replaceVendor returns the result of replacing +// non-trailing vendor path elements in x with repl. +func replaceVendor(x, repl string) string { + if !strings.Contains(x, "vendor") { + return x + } + elem := strings.Split(x, "/") + for i := 0; i < len(elem)-1; i++ { + if elem[i] == "vendor" { + elem[i] = repl + } + } + return strings.Join(elem, "/") +} + +// warnUnmatched warns about patterns that didn't match any packages. +func warnUnmatched(matches []*match) { + for _, m := range matches { + if len(m.Pkgs) == 0 { + m.Err = + errors.Newf(token.NoPos, "cue: %q matched no packages\n", m.Pattern) + } + } +} + +// importPaths returns the matching paths to use for the given command line. +// It calls ImportPathsQuiet and then WarnUnmatched. +func (l *loader) importPaths(patterns []string) []*match { + matches := l.importPathsQuiet(patterns) + warnUnmatched(matches) + return matches +} + +// importPathsQuiet is like ImportPaths but does not warn about patterns with no matches. +func (l *loader) importPathsQuiet(patterns []string) []*match { + var out []*match + for _, a := range cleanPatterns(patterns) { + if isMetaPackage(a) { + out = append(out, l.matchPackages(a, l.cfg.Package)) + continue + } + + orig := a + pkgName := l.cfg.Package + switch p := strings.IndexByte(a, ':'); { + case p < 0: + case p == 0: + pkgName = a[1:] + a = "." + default: + pkgName = a[p+1:] + a = a[:p] + } + if pkgName == "*" { + pkgName = "" + } + + if strings.Contains(a, "...") { + if isLocalImport(a) { + out = append(out, l.matchPackagesInFS(a, pkgName)) + } else { + out = append(out, l.matchPackages(a, pkgName)) + } + continue + } + + var p *build.Instance + if isLocalImport(a) { + p = l.cfg.newRelInstance(token.NoPos, a, pkgName) + } else { + p = l.cfg.newInstance(token.NoPos, importPath(orig)) + } + + pkgs := l.importPkg(token.NoPos, p) + out = append(out, &match{Pattern: a, Literal: true, Pkgs: pkgs}) + } + return out +} + +// cleanPatterns returns the patterns to use for the given +// command line. It canonicalizes the patterns but does not +// evaluate any matches. +func cleanPatterns(patterns []string) []string { + if len(patterns) == 0 { + return []string{"."} + } + var out []string + for _, a := range patterns { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + out = append(out, a) + } + return out +} + +// isMetaPackage checks if name is a reserved package name that expands to multiple packages. +func isMetaPackage(name string) bool { + return name == "std" || name == "cmd" || name == "all" +} + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} + +// hasFilepathPrefix reports whether the path s begins with the +// elements in prefix. +func hasFilepathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == filepath.Separator { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == filepath.Separator && s[:len(prefix)] == prefix + } +} + +// isStandardImportPath reports whether $GOROOT/src/path should be considered +// part of the standard distribution. For historical reasons we allow people to add +// their own code to $GOROOT instead of using $GOPATH, but we assume that +// code will start with a domain name (dot in the first element). +// +// Note that this function is meant to evaluate whether a directory found in GOROOT +// should be treated as part of the standard library. It should not be used to decide +// that a directory found in GOPATH should be rejected: directories in GOPATH +// need not have dots in the first element, and they just take their chances +// with future collisions in the standard library. +func isStandardImportPath(path string) bool { + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + elem := path[:i] + return !strings.Contains(elem, ".") +} + +// isRelativePath reports whether pattern should be interpreted as a directory +// path relative to the current directory, as opposed to a pattern matching +// import paths. +func isRelativePath(pattern string) bool { + return strings.HasPrefix(pattern, "./") || strings.HasPrefix(pattern, "../") || pattern == "." || pattern == ".." +} + +// inDir checks whether path is in the file tree rooted at dir. +// If so, inDir returns an equivalent path relative to dir. +// If not, inDir returns an empty string. +// inDir makes some effort to succeed even in the presence of symbolic links. +// TODO(rsc): Replace internal/test.inDir with a call to this function for Go 1.12. +func inDir(path, dir string) string { + if rel := inDirLex(path, dir); rel != "" { + return rel + } + xpath, err := filepath.EvalSymlinks(path) + if err != nil || xpath == path { + xpath = "" + } else { + if rel := inDirLex(xpath, dir); rel != "" { + return rel + } + } + + xdir, err := filepath.EvalSymlinks(dir) + if err == nil && xdir != dir { + if rel := inDirLex(path, xdir); rel != "" { + return rel + } + if xpath != "" { + if rel := inDirLex(xpath, xdir); rel != "" { + return rel + } + } + } + return "" +} + +// inDirLex is like inDir but only checks the lexical form of the file names. +// It does not consider symbolic links. +// TODO(rsc): This is a copy of str.HasFilePathPrefix, modified to +// return the suffix. Most uses of str.HasFilePathPrefix should probably +// be calling InDir instead. +func inDirLex(path, dir string) string { + pv := strings.ToUpper(filepath.VolumeName(path)) + dv := strings.ToUpper(filepath.VolumeName(dir)) + path = path[len(pv):] + dir = dir[len(dv):] + switch { + default: + return "" + case pv != dv: + return "" + case len(path) == len(dir): + if path == dir { + return "." + } + return "" + case dir == "": + return path + case len(path) > len(dir): + if dir[len(dir)-1] == filepath.Separator { + if path[:len(dir)] == dir { + return path[len(dir):] + } + return "" + } + if path[len(dir)] == filepath.Separator && path[:len(dir)] == dir { + if len(path) == len(dir)+1 { + return "." + } + return path[len(dir)+1:] + } + return "" + } +} diff --git a/vendor/cuelang.org/go/cue/load/source.go b/vendor/cuelang.org/go/cue/load/source.go new file mode 100644 index 000000000..048558564 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/source.go @@ -0,0 +1,63 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/format" +) + +// A Source represents file contents. +type Source interface { + contents() ([]byte, *ast.File, error) +} + +// FromString creates a Source from the given string. +func FromString(s string) Source { + return stringSource(s) +} + +// FromBytes creates a Source from the given bytes. The contents are not +// copied and should not be modified. +func FromBytes(b []byte) Source { + return bytesSource(b) +} + +// FromFile creates a Source from the given *ast.File. The file should not be +// modified. It is assumed the file is error-free. +func FromFile(f *ast.File) Source { + return (*fileSource)(f) +} + +type stringSource string + +func (s stringSource) contents() ([]byte, *ast.File, error) { + return []byte(s), nil, nil +} + +type bytesSource []byte + +func (s bytesSource) contents() ([]byte, *ast.File, error) { + return []byte(s), nil, nil +} + +type fileSource ast.File + +func (s *fileSource) contents() ([]byte, *ast.File, error) { + f := (*ast.File)(s) + // TODO: wasteful formatting, but needed for now. + b, err := format.Node(f) + return b, f, err +} diff --git a/vendor/cuelang.org/go/cue/load/tags.go b/vendor/cuelang.org/go/cue/load/tags.go new file mode 100644 index 000000000..6692ac20c --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/tags.go @@ -0,0 +1,291 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package load + +import ( + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/cli" +) + +// A tag binds an identifier to a field to allow passing command-line values. +// +// A tag is of the form +// @tag(,[type=(string|int|number|bool)][,short=+]) +// +// The name is mandatory and type defaults to string. Tags are set using the -t +// option on the command line. -t name=value will parse value for the type +// defined for name and set the field for which this tag was defined to this +// value. A tag may be associated with multiple fields. +// +// Tags also allow shorthands. If a shorthand bar is declared for a tag with +// name foo, then -t bar is identical to -t foo=bar. +// +// It is a deliberate choice to not allow other values to be associated with +// shorthands than the shorthand name itself. Doing so would create a powerful +// mechanism that would assign different values to different fields based on the +// same shorthand, duplicating functionality that is already available in CUE. +type tag struct { + key string + kind cue.Kind + shorthands []string + + field *ast.Field +} + +func parseTag(pos token.Pos, body string) (t tag, err errors.Error) { + t.kind = cue.StringKind + + a := internal.ParseAttrBody(pos, body) + + t.key, _ = a.String(0) + if !ast.IsValidIdent(t.key) { + return t, errors.Newf(pos, "invalid identifier %q", t.key) + } + + if s, ok, _ := a.Lookup(1, "type"); ok { + switch s { + case "string": + case "int": + t.kind = cue.IntKind + case "number": + t.kind = cue.NumberKind + case "bool": + t.kind = cue.BoolKind + default: + return t, errors.Newf(pos, "invalid type %q", s) + } + } + + if s, ok, _ := a.Lookup(1, "short"); ok { + for _, s := range strings.Split(s, "|") { + if !ast.IsValidIdent(t.key) { + return t, errors.Newf(pos, "invalid identifier %q", s) + } + t.shorthands = append(t.shorthands, s) + } + } + + return t, nil +} + +func (t *tag) inject(value string, l *loader) errors.Error { + e, err := cli.ParseValue(token.NoPos, t.key, value, t.kind) + if err != nil { + return err + } + injected := ast.NewBinExpr(token.AND, t.field.Value, e) + if l.replacements == nil { + l.replacements = map[ast.Node]ast.Node{} + } + l.replacements[t.field.Value] = injected + t.field.Value = injected + return nil +} + +// findTags defines which fields may be associated with tags. +// +// TODO: should we limit the depth at which tags may occur? +func findTags(b *build.Instance) (tags []tag, errs errors.Error) { + findInvalidTags := func(x ast.Node, msg string) { + ast.Walk(x, nil, func(n ast.Node) { + if f, ok := n.(*ast.Field); ok { + for _, a := range f.Attrs { + if key, _ := a.Split(); key == "tag" { + errs = errors.Append(errs, errors.Newf(a.Pos(), msg)) + // TODO: add position of x. + } + } + } + }) + } + for _, f := range b.Files { + ast.Walk(f, func(n ast.Node) bool { + switch x := n.(type) { + case *ast.ListLit: + findInvalidTags(n, "@tag not allowed within lists") + return false + + case *ast.Comprehension: + findInvalidTags(n, "@tag not allowed within comprehension") + return false + + case *ast.Field: + // TODO: allow optional fields? + _, _, err := ast.LabelName(x.Label) + if err != nil || x.Optional != token.NoPos { + findInvalidTags(n, "@tag not allowed within optional fields") + return false + } + + for _, a := range x.Attrs { + key, body := a.Split() + if key != "tag" { + continue + } + t, err := parseTag(a.Pos(), body) + if err != nil { + errs = errors.Append(errs, err) + continue + } + t.field = x + tags = append(tags, t) + } + } + return true + }, nil) + } + return tags, errs +} + +func injectTags(tags []string, l *loader) errors.Error { + // Parses command line args + for _, s := range tags { + p := strings.Index(s, "=") + found := l.buildTags[s] + if p > 0 { // key-value + for _, t := range l.tags { + if t.key == s[:p] { + found = true + if err := t.inject(s[p+1:], l); err != nil { + return err + } + } + } + if !found { + return errors.Newf(token.NoPos, "no tag for %q", s[:p]) + } + } else { // shorthand + for _, t := range l.tags { + for _, sh := range t.shorthands { + if sh == s { + found = true + if err := t.inject(s, l); err != nil { + return err + } + } + } + } + if !found { + return errors.Newf(token.NoPos, "tag %q not used in any file", s) + } + } + } + return nil +} + +// shouldBuildFile determines whether a File should be included based on its +// attributes. +func shouldBuildFile(f *ast.File, fp *fileProcessor) (bool, errors.Error) { + tags := fp.c.Tags + + a, errs := getBuildAttr(f) + if errs != nil { + return false, errs + } + if a == nil { + return true, nil + } + + _, body := a.Split() + + expr, err := parser.ParseExpr("", body) + if err != nil { + return false, errors.Promote(err, "") + } + + tagMap := map[string]bool{} + for _, t := range tags { + tagMap[t] = !strings.ContainsRune(t, '=') + } + + c := checker{tags: tagMap, loader: fp.c.loader} + include := c.shouldInclude(expr) + if c.err != nil { + return false, c.err + } + return include, nil +} + +func getBuildAttr(f *ast.File) (*ast.Attribute, errors.Error) { + var a *ast.Attribute + for _, d := range f.Decls { + switch x := d.(type) { + case *ast.Attribute: + key, _ := x.Split() + if key != "if" { + continue + } + if a != nil { + err := errors.Newf(d.Pos(), "multiple @if attributes") + err = errors.Append(err, + errors.Newf(a.Pos(), "previous declaration here")) + return nil, err + } + a = x + + case *ast.Package: + break + } + } + return a, nil +} + +type checker struct { + loader *loader + tags map[string]bool + err errors.Error +} + +func (c *checker) shouldInclude(expr ast.Expr) bool { + switch x := expr.(type) { + case *ast.Ident: + c.loader.buildTags[x.Name] = true + return c.tags[x.Name] + + case *ast.BinaryExpr: + switch x.Op { + case token.LAND: + return c.shouldInclude(x.X) && c.shouldInclude(x.Y) + + case token.LOR: + return c.shouldInclude(x.X) || c.shouldInclude(x.Y) + + default: + c.err = errors.Append(c.err, errors.Newf(token.NoPos, + "invalid operator %v", x.Op)) + return false + } + + case *ast.UnaryExpr: + if x.Op != token.NOT { + c.err = errors.Append(c.err, errors.Newf(token.NoPos, + "invalid operator %v", x.Op)) + } + return !c.shouldInclude(x.X) + + default: + c.err = errors.Append(c.err, errors.Newf(token.NoPos, + "invalid type %T in build attribute", expr)) + return false + } +} diff --git a/vendor/cuelang.org/go/cue/load/test.cue b/vendor/cuelang.org/go/cue/load/test.cue new file mode 100644 index 000000000..3b90117c2 --- /dev/null +++ b/vendor/cuelang.org/go/cue/load/test.cue @@ -0,0 +1,3 @@ +package test + +"Hello world!" \ No newline at end of file diff --git a/vendor/cuelang.org/go/cue/marshal.go b/vendor/cuelang.org/go/cue/marshal.go new file mode 100644 index 000000000..8d8652526 --- /dev/null +++ b/vendor/cuelang.org/go/cue/marshal.go @@ -0,0 +1,223 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "bytes" + "compress/gzip" + "encoding/gob" + "path/filepath" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/export" +) + +// root. +type instanceData struct { + Root bool + Path string + Files []fileData +} + +type fileData struct { + Name string + Data []byte +} + +const version = 1 + +type unmarshaller struct { + ctxt *build.Context + imports map[string]*instanceData +} + +func (b *unmarshaller) load(pos token.Pos, path string) *build.Instance { + bi := b.imports[path] + if bi == nil { + return nil + } + return b.build(bi) +} + +func (b *unmarshaller) build(bi *instanceData) *build.Instance { + p := b.ctxt.NewInstance(bi.Path, b.load) + p.ImportPath = bi.Path + for _, f := range bi.Files { + _ = p.AddFile(f.Name, f.Data) + } + p.Complete() + return p +} + +func compileInstances(r *Runtime, data []*instanceData) (instances []*Instance, err error) { + b := unmarshaller{ + ctxt: build.NewContext(), + imports: map[string]*instanceData{}, + } + for _, i := range data { + if i.Path == "" { + if !i.Root { + return nil, errors.Newf(token.NoPos, + "data contains non-root package without import path") + } + continue + } + b.imports[i.Path] = i + } + + builds := []*build.Instance{} + for _, i := range data { + if !i.Root { + continue + } + builds = append(builds, b.build(i)) + } + + return r.build(builds) +} + +// Unmarshal creates an Instance from bytes generated by the MarshalBinary +// method of an instance. +func (r *Runtime) Unmarshal(b []byte) ([]*Instance, error) { + if len(b) == 0 { + return nil, errors.Newf(token.NoPos, "unmarshal failed: empty buffer") + } + + switch b[0] { + case version: + default: + return nil, errors.Newf(token.NoPos, + "unmarshal failed: unsupported version %d, regenerate data", b[0]) + } + + reader, err := gzip.NewReader(bytes.NewReader(b[1:])) + if err != nil { + return nil, errors.Newf(token.NoPos, "unmarshal failed: %v", err) + } + + data := []*instanceData{} + err = gob.NewDecoder(reader).Decode(&data) + if err != nil { + return nil, errors.Newf(token.NoPos, "unmarshal failed: %v", err) + } + + return compileInstances(r, data) +} + +// Marshal creates bytes from a group of instances. Imported instances will +// be included in the emission. +// +// The stored instances are functionally the same, but preserving of file +// information is only done on a best-effort basis. +func (r *Runtime) Marshal(instances ...*Instance) (b []byte, err error) { + ctx := r.index().newContext() + + staged := []instanceData{} + done := map[string]int{} + + var errs errors.Error + + var stageInstance func(i *Instance) (pos int) + stageInstance = func(i *Instance) (pos int) { + if p, ok := done[i.ImportPath]; ok { + return p + } + // TODO: support exporting instance + file, _ := export.Def(r.idx.Runtime, i.inst.ID(), i.root) + imports := []string{} + file.VisitImports(func(i *ast.ImportDecl) { + for _, spec := range i.Specs { + info, _ := astutil.ParseImportSpec(spec) + imports = append(imports, info.ID) + } + }) + + if i.PkgName != "" { + p, name, _ := internal.PackageInfo(file) + if p == nil { + pkg := &ast.Package{Name: ast.NewIdent(i.PkgName)} + file.Decls = append([]ast.Decl{pkg}, file.Decls...) + } else if name != i.PkgName { + // p is guaranteed to be generated by Def, so it is "safe" to + // modify. + p.Name = ast.NewIdent(i.PkgName) + } + } + + b, err := format.Node(file) + errs = errors.Append(errs, errors.Promote(err, "marshal")) + + filename := "unmarshal" + if i.inst != nil && len(i.inst.Files) == 1 { + filename = i.inst.Files[0].Filename + + dir := i.Dir + if i.inst != nil && i.inst.Root != "" { + dir = i.inst.Root + } + if dir != "" { + filename = filepath.FromSlash(filename) + filename, _ = filepath.Rel(dir, filename) + filename = filepath.ToSlash(filename) + } + } + // TODO: this should probably be changed upstream, but as the path + // is for reference purposes only, this is safe. + importPath := filepath.ToSlash(i.ImportPath) + + staged = append(staged, instanceData{ + Path: importPath, + Files: []fileData{{filename, b}}, + }) + + p := len(staged) - 1 + + for _, imp := range imports { + i := ctx.getImportFromPath(imp) + if i == nil || !strings.Contains(imp, ".") { + continue // a builtin package. + } + stageInstance(i) + } + + return p + } + + for _, i := range instances { + staged[stageInstance(i)].Root = true + } + + buf := &bytes.Buffer{} + buf.WriteByte(version) + + zw := gzip.NewWriter(buf) + if err := gob.NewEncoder(zw).Encode(staged); err != nil { + return nil, err + } + + if err := zw.Close(); err != nil { + return nil, err + } + + return buf.Bytes(), nil + +} diff --git a/vendor/cuelang.org/go/cue/op.go b/vendor/cuelang.org/go/cue/op.go new file mode 100644 index 000000000..26f564d29 --- /dev/null +++ b/vendor/cuelang.org/go/cue/op.go @@ -0,0 +1,182 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "cuelang.org/go/cue/token" + "cuelang.org/go/internal/core/adt" +) + +// Op indicates the operation at the top of an expression tree of the expression +// use to evaluate a value. +type Op = adt.Op + +// Values of Op. +const ( + NoOp = adt.NoOp + + AndOp = adt.AndOp + OrOp = adt.OrOp + + SelectorOp = adt.SelectorOp + IndexOp = adt.IndexOp + SliceOp = adt.SliceOp + CallOp = adt.CallOp + + BooleanAndOp = adt.BoolAndOp + BooleanOrOp = adt.BoolOrOp + + EqualOp = adt.EqualOp + NotOp = adt.NotOp + NotEqualOp = adt.NotEqualOp + LessThanOp = adt.LessThanOp + LessThanEqualOp = adt.LessEqualOp + GreaterThanOp = adt.GreaterThanOp + GreaterThanEqualOp = adt.GreaterEqualOp + + RegexMatchOp = adt.MatchOp + NotRegexMatchOp = adt.NotMatchOp + + AddOp = adt.AddOp + SubtractOp = adt.SubtractOp + MultiplyOp = adt.MultiplyOp + FloatQuotientOp = adt.FloatQuotientOp + IntQuotientOp = adt.IntQuotientOp + IntRemainderOp = adt.IntRemainderOp + IntDivideOp = adt.IntDivideOp + IntModuloOp = adt.IntModuloOp + + InterpolationOp = adt.InterpolationOp +) + +// isCmp reports whether an op is a comparator. +func (op op) isCmp() bool { + return opEql <= op && op <= opGeq +} + +func (op op) unifyType() (unchecked, ok bool) { + if op == opUnifyUnchecked { + return true, true + } + return false, op == opUnify +} + +type op uint16 + +const ( + opUnknown op = iota + + opUnify + opUnifyUnchecked + opDisjunction + + opLand + opLor + opNot + + opEql + opNeq + opMat + opNMat + + opLss + opGtr + opLeq + opGeq + + opAdd + opSub + opMul + opQuo + opRem + + opIDiv + opIMod + opIQuo + opIRem +) + +var opStrings = []string{ + opUnknown: "??", + + opUnify: "&", + // opUnifyUnchecked is internal only. Syntactically this is + // represented as embedding. + opUnifyUnchecked: "&!", + opDisjunction: "|", + + opLand: "&&", + opLor: "||", + opNot: "!", + + opEql: "==", + opNeq: "!=", + opMat: "=~", + opNMat: "!~", + + opLss: "<", + opGtr: ">", + opLeq: "<=", + opGeq: ">=", + + opAdd: "+", + opSub: "-", + opMul: "*", + opQuo: "/", + + opIDiv: "div", + opIMod: "mod", + opIQuo: "quo", + opIRem: "rem", +} + +func (op op) String() string { return opStrings[op] } + +var tokenMap = map[token.Token]op{ + token.OR: opDisjunction, // | + token.AND: opUnify, // & + + token.ADD: opAdd, // + + token.SUB: opSub, // - + token.MUL: opMul, // * + token.QUO: opQuo, // / + + token.IDIV: opIDiv, // div + token.IMOD: opIMod, // mod + token.IQUO: opIQuo, // quo + token.IREM: opIRem, // rem + + token.LAND: opLand, // && + token.LOR: opLor, // || + + token.EQL: opEql, // == + token.LSS: opLss, // < + token.GTR: opGtr, // > + token.NOT: opNot, // ! + + token.NEQ: opNeq, // != + token.LEQ: opLeq, // <= + token.GEQ: opGeq, // >= + token.MAT: opMat, // =~ + token.NMAT: opNMat, // !~ +} + +var opMap = map[op]token.Token{} + +func init() { + for t, o := range tokenMap { + opMap[o] = t + } +} diff --git a/vendor/cuelang.org/go/cue/parser/doc.go b/vendor/cuelang.org/go/cue/parser/doc.go new file mode 100644 index 000000000..adde13989 --- /dev/null +++ b/vendor/cuelang.org/go/cue/parser/doc.go @@ -0,0 +1,23 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package parser implements a parser for CUE source files. Input may be +// provided in a variety of forms (see the various Parse* functions); the output +// is an abstract syntax tree (AST) representing the CUE source. The parser is +// invoked through one of the Parse* functions. +// +// The parser accepts a larger language than is syntactically permitted by the +// CUE spec, for simplicity, and for improved robustness in the presence of +// syntax errors. +package parser // import "cuelang.org/go/cue/parser" diff --git a/vendor/cuelang.org/go/cue/parser/fuzz.go b/vendor/cuelang.org/go/cue/parser/fuzz.go new file mode 100644 index 000000000..76d9ff062 --- /dev/null +++ b/vendor/cuelang.org/go/cue/parser/fuzz.go @@ -0,0 +1,25 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build gofuzz + +package parser + +func Fuzz(b []byte) int { + _, err := ParseFile("go-fuzz", b) + if err != nil { + return 0 + } + return 1 +} diff --git a/vendor/cuelang.org/go/cue/parser/interface.go b/vendor/cuelang.org/go/cue/parser/interface.go new file mode 100644 index 000000000..2fad23978 --- /dev/null +++ b/vendor/cuelang.org/go/cue/parser/interface.go @@ -0,0 +1,232 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file contains the exported entry points for invoking the + +package parser + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal/source" +) + +// Option specifies a parse option. +type Option func(p *parser) + +var ( + // PackageClauseOnly causes parsing to stop after the package clause. + PackageClauseOnly Option = packageClauseOnly + packageClauseOnly = func(p *parser) { + p.mode |= packageClauseOnlyMode + } + + // ImportsOnly causes parsing to stop parsing after the import declarations. + ImportsOnly Option = importsOnly + importsOnly = func(p *parser) { + p.mode |= importsOnlyMode + } + + // ParseComments causes comments to be parsed. + ParseComments Option = parseComments + parseComments = func(p *parser) { + p.mode |= parseCommentsMode + } + + // Trace causes parsing to print a trace of parsed productions. + Trace Option = traceOpt + traceOpt = func(p *parser) { + p.mode |= traceMode + } + + // DeclarationErrors causes parsing to report declaration errors. + DeclarationErrors Option = declarationErrors + declarationErrors = func(p *parser) { + p.mode |= declarationErrorsMode + } + + // AllErrors causes all errors to be reported (not just the first 10 on different lines). + AllErrors Option = allErrors + allErrors = func(p *parser) { + p.mode |= allErrorsMode + } + + // AllowPartial allows the parser to be used on a prefix buffer. + AllowPartial Option = allowPartial + allowPartial = func(p *parser) { + p.mode |= partialMode + } +) + +// FromVersion specifies until which legacy version the parser should provide +// backwards compatibility. +func FromVersion(version int) Option { + if version >= 0 { + version++ + } + // Versions: + // <0: major version 0 (counting -1000 + x, where x = 100*m+p in 0.m.p + // >=0: x+1 in 1.x.y + return func(p *parser) { p.version = version } +} + +func version0(minor, patch int) int { + return -1000 + 100*minor + patch +} + +// DeprecationError is a sentinel error to indicate that an error is +// related to an unsupported old CUE syntax. +type DeprecationError struct { + Version int +} + +func (e *DeprecationError) Error() string { + return "try running `cue fix` using CUE v0.2.2 on the file or module to upgrade" +} + +// Latest specifies the latest version of the parser, effectively setting +// the strictest implementation. +const Latest = latest + +const latest = 1000 + +// FileOffset specifies the File position info to use. +func FileOffset(pos int) Option { + return func(p *parser) { p.offset = pos } +} + +// A mode value is a set of flags (or 0). +// They control the amount of source code parsed and other optional +// parser functionality. +type mode uint + +const ( + packageClauseOnlyMode mode = 1 << iota // stop parsing after package clause + importsOnlyMode // stop parsing after import declarations + parseCommentsMode // parse comments and add them to AST + partialMode + traceMode // print a trace of parsed productions + declarationErrorsMode // report declaration errors + allErrorsMode // report all errors (not just the first 10 on different lines) +) + +// ParseFile parses the source code of a single CUE source file and returns +// the corresponding File node. The source code may be provided via +// the filename of the source file, or via the src parameter. +// +// If src != nil, ParseFile parses the source from src and the filename is +// only used when recording position information. The type of the argument +// for the src parameter must be string, []byte, or io.Reader. +// If src == nil, ParseFile parses the file specified by filename. +// +// The mode parameter controls the amount of source text parsed and other +// optional parser functionality. Position information is recorded in the +// file set fset, which must not be nil. +// +// If the source couldn't be read, the returned AST is nil and the error +// indicates the specific failure. If the source was read but syntax +// errors were found, the result is a partial AST (with Bad* nodes +// representing the fragments of erroneous source code). Multiple errors +// are returned via a ErrorList which is sorted by file position. +func ParseFile(filename string, src interface{}, mode ...Option) (f *ast.File, err error) { + + // get source + text, err := source.Read(filename, src) + if err != nil { + return nil, err + } + + var pp parser + defer func() { + if pp.panicking { + _ = recover() + } + + // set result values + if f == nil { + // source is not a valid Go source file - satisfy + // ParseFile API and return a valid (but) empty + // *File + f = &ast.File{ + // Scope: NewScope(nil), + } + } + + err = errors.Sanitize(pp.errors) + }() + + // parse source + pp.init(filename, text, mode) + f = pp.parseFile() + if f == nil { + return nil, pp.errors + } + f.Filename = filename + astutil.Resolve(f, pp.errf) + + return f, pp.errors +} + +// ParseExpr is a convenience function for parsing an expression. +// The arguments have the same meaning as for Parse, but the source must +// be a valid CUE (type or value) expression. Specifically, fset must not +// be nil. +func ParseExpr(filename string, src interface{}, mode ...Option) (ast.Expr, error) { + // get source + text, err := source.Read(filename, src) + if err != nil { + return nil, err + } + + var p parser + defer func() { + if p.panicking { + _ = recover() + } + err = errors.Sanitize(p.errors) + }() + + // parse expr + p.init(filename, text, mode) + // Set up pkg-level scopes to avoid nil-pointer errors. + // This is not needed for a correct expression x as the + // parser will be ok with a nil topScope, but be cautious + // in case of an erroneous x. + e := p.parseRHS() + + // If a comma was inserted, consume it; + // report an error if there's more tokens. + if p.tok == token.COMMA && p.lit == "\n" { + p.next() + } + if p.mode&partialMode == 0 { + p.expect(token.EOF) + } + + if p.errors != nil { + return nil, p.errors + } + astutil.ResolveExpr(e, p.errf) + + return e, p.errors +} + +// parseExprString is a convenience function for obtaining the AST of an +// expression x. The position information recorded in the AST is undefined. The +// filename used in error messages is the empty string. +func parseExprString(x string) (ast.Expr, error) { + return ParseExpr("", []byte(x)) +} diff --git a/vendor/cuelang.org/go/cue/parser/parser.go b/vendor/cuelang.org/go/cue/parser/parser.go new file mode 100644 index 000000000..41daabd53 --- /dev/null +++ b/vendor/cuelang.org/go/cue/parser/parser.go @@ -0,0 +1,1712 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "fmt" + "strings" + "unicode" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/scanner" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +// The parser structure holds the parser's internal state. +type parser struct { + file *token.File + offset int + errors errors.Error + scanner scanner.Scanner + + // Tracing/debugging + mode mode // parsing mode + trace bool // == (mode & Trace != 0) + panicking bool // set if we are bailing out due to too many errors. + indent int // indentation used for tracing output + + // Comments + leadComment *ast.CommentGroup + comments *commentState + + // Next token + pos token.Pos // token position + tok token.Token // one token look-ahead + lit string // token literal + + // Error recovery + // (used to limit the number of calls to syncXXX functions + // w/o making scanning progress - avoids potential endless + // loops across multiple parser functions during error recovery) + syncPos token.Pos // last synchronization position + syncCnt int // number of calls to syncXXX without progress + + // Non-syntactic parser control + exprLev int // < 0: in control clause, >= 0: in expression + + imports []*ast.ImportSpec // list of imports + + version int +} + +func (p *parser) init(filename string, src []byte, mode []Option) { + p.offset = -1 + for _, f := range mode { + f(p) + } + p.file = token.NewFile(filename, p.offset, len(src)) + + var m scanner.Mode + if p.mode&parseCommentsMode != 0 { + m = scanner.ScanComments + } + eh := func(pos token.Pos, msg string, args []interface{}) { + p.errors = errors.Append(p.errors, errors.Newf(pos, msg, args...)) + } + p.scanner.Init(p.file, src, eh, m) + + p.trace = p.mode&traceMode != 0 // for convenience (p.trace is used frequently) + + p.comments = &commentState{pos: -1} + + p.next() +} + +type commentState struct { + parent *commentState + pos int8 + groups []*ast.CommentGroup + + // lists are not attached to nodes themselves. Enclosed expressions may + // miss a comment due to commas and line termination. closeLists ensures + // that comments will be passed to someone. + isList int + lastChild ast.Node + lastPos int8 +} + +// openComments reserves the next doc comment for the caller and flushes +func (p *parser) openComments() *commentState { + child := &commentState{ + parent: p.comments, + } + if c := p.comments; c != nil && c.isList > 0 { + if c.lastChild != nil { + var groups []*ast.CommentGroup + for _, cg := range c.groups { + if cg.Position == 0 { + groups = append(groups, cg) + } + } + groups = append(groups, c.lastChild.Comments()...) + for _, cg := range c.groups { + if cg.Position != 0 { + cg.Position = c.lastPos + groups = append(groups, cg) + } + } + ast.SetComments(c.lastChild, groups) + c.groups = nil + } else { + c.lastChild = nil + // attach before next + for _, cg := range c.groups { + cg.Position = 0 + } + child.groups = c.groups + c.groups = nil + } + } + if p.leadComment != nil { + child.groups = append(child.groups, p.leadComment) + p.leadComment = nil + } + p.comments = child + return child +} + +// openList is used to treat a list of comments as a single comment +// position in a production. +func (p *parser) openList() { + if p.comments.isList > 0 { + p.comments.isList++ + return + } + c := &commentState{ + parent: p.comments, + isList: 1, + } + p.comments = c +} + +func (c *commentState) add(g *ast.CommentGroup) { + g.Position = c.pos + c.groups = append(c.groups, g) +} + +func (p *parser) closeList() { + c := p.comments + if c.lastChild != nil { + for _, cg := range c.groups { + cg.Position = c.lastPos + c.lastChild.AddComment(cg) + } + c.groups = nil + } + switch c.isList--; { + case c.isList < 0: + if !p.panicking { + err := errors.Newf(p.pos, "unmatched close list") + p.errors = errors.Append(p.errors, err) + p.panicking = true + panic(err) + } + case c.isList == 0: + parent := c.parent + if len(c.groups) > 0 { + parent.groups = append(parent.groups, c.groups...) + } + parent.pos++ + p.comments = parent + } +} + +func (c *commentState) closeNode(p *parser, n ast.Node) ast.Node { + if p.comments != c { + if !p.panicking { + err := errors.Newf(p.pos, "unmatched comments") + p.errors = errors.Append(p.errors, err) + p.panicking = true + panic(err) + } + return n + } + p.comments = c.parent + if c.parent != nil { + c.parent.lastChild = n + c.parent.lastPos = c.pos + c.parent.pos++ + } + for _, cg := range c.groups { + if n != nil { + if cg != nil { + n.AddComment(cg) + } + } + } + c.groups = nil + return n +} + +func (c *commentState) closeExpr(p *parser, n ast.Expr) ast.Expr { + c.closeNode(p, n) + return n +} + +func (c *commentState) closeClause(p *parser, n ast.Clause) ast.Clause { + c.closeNode(p, n) + return n +} + +// ---------------------------------------------------------------------------- +// Parsing support + +func (p *parser) printTrace(a ...interface{}) { + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + pos := p.file.Position(p.pos) + fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) + i := 2 * p.indent + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *parser, msg string) *parser { + p.printTrace(msg, "(") + p.indent++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *parser) { + p.indent-- + p.printTrace(")") +} + +// Advance to the next +func (p *parser) next0() { + // Because of one-token look-ahead, print the previous token + // when tracing as it provides a more readable output. The + // very first token (!p.pos.IsValid()) is not initialized + // (it is ILLEGAL), so don't print it . + if p.trace && p.pos.IsValid() { + s := p.tok.String() + switch { + case p.tok.IsLiteral(): + p.printTrace(s, p.lit) + case p.tok.IsOperator(), p.tok.IsKeyword(): + p.printTrace("\"" + s + "\"") + default: + p.printTrace(s) + } + } + + p.pos, p.tok, p.lit = p.scanner.Scan() +} + +// Consume a comment and return it and the line on which it ends. +func (p *parser) consumeComment() (comment *ast.Comment, endline int) { + // /*-style comments may end on a different line than where they start. + // Scan the comment for '\n' chars and adjust endline accordingly. + endline = p.file.Line(p.pos) + if p.lit[1] == '*' { + p.assertV0(p.pos, 0, 10, "block quotes") + + // don't use range here - no need to decode Unicode code points + for i := 0; i < len(p.lit); i++ { + if p.lit[i] == '\n' { + endline++ + } + } + } + + comment = &ast.Comment{Slash: p.pos, Text: p.lit} + p.next0() + + return +} + +// Consume a group of adjacent comments, add it to the parser's +// comments list, and return it together with the line at which +// the last comment in the group ends. A non-comment token or n +// empty lines terminate a comment group. +func (p *parser) consumeCommentGroup(prevLine, n int) (comments *ast.CommentGroup, endline int) { + var list []*ast.Comment + var rel token.RelPos + endline = p.file.Line(p.pos) + switch endline - prevLine { + case 0: + rel = token.Blank + case 1: + rel = token.Newline + default: + rel = token.NewSection + } + for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n { + var comment *ast.Comment + comment, endline = p.consumeComment() + list = append(list, comment) + } + + cg := &ast.CommentGroup{List: list} + ast.SetRelPos(cg, rel) + comments = cg + return +} + +// Advance to the next non-comment In the process, collect +// any comment groups encountered, and refield the last lead and +// and line comments. +// +// A lead comment is a comment group that starts and ends in a +// line without any other tokens and that is followed by a non-comment +// token on the line immediately after the comment group. +// +// A line comment is a comment group that follows a non-comment +// token on the same line, and that has no tokens after it on the line +// where it ends. +// +// Lead and line comments may be considered documentation that is +// stored in the AST. +func (p *parser) next() { + // A leadComment may not be consumed if it leads an inner token of a node. + if p.leadComment != nil { + p.comments.add(p.leadComment) + } + p.leadComment = nil + prev := p.pos + p.next0() + p.comments.pos++ + + if p.tok == token.COMMENT { + var comment *ast.CommentGroup + var endline int + + currentLine := p.file.Line(p.pos) + prevLine := p.file.Line(prev) + if prevLine == currentLine { + // The comment is on same line as the previous token; it + // cannot be a lead comment but may be a line comment. + comment, endline = p.consumeCommentGroup(prevLine, 0) + if p.file.Line(p.pos) != endline { + // The next token is on a different line, thus + // the last comment group is a line comment. + comment.Line = true + } + } + + // consume successor comments, if any + endline = -1 + for p.tok == token.COMMENT { + if comment != nil { + p.comments.add(comment) + } + comment, endline = p.consumeCommentGroup(prevLine, 1) + prevLine = currentLine + currentLine = p.file.Line(p.pos) + + } + + if endline+1 == p.file.Line(p.pos) && p.tok != token.EOF { + // The next token is following on the line immediately after the + // comment group, thus the last comment group is a lead comment. + comment.Doc = true + p.leadComment = comment + } else { + p.comments.add(comment) + } + } + + if p.tok == token.IDENT && p.lit[0] == '`' { + p.assertV0(p.pos, 0, 13, "quoted identifiers") + } +} + +// assertV0 indicates the last version at which a certain feature was +// supported. +func (p *parser) assertV0(pos token.Pos, minor, patch int, name string) { + v := version0(minor, patch) + if p.version != 0 && p.version > v { + p.errors = errors.Append(p.errors, + errors.Wrapf(&DeprecationError{v}, pos, + "%s deprecated as of v0.%d.%d", name, minor, patch+1)) + } +} + +func (p *parser) errf(pos token.Pos, msg string, args ...interface{}) { + // ePos := p.file.Position(pos) + ePos := pos + + // If AllErrors is not set, discard errors reported on the same line + // as the last recorded error and stop parsing if there are more than + // 10 errors. + if p.mode&allErrorsMode == 0 { + errors := errors.Errors(p.errors) + n := len(errors) + if n > 0 && errors[n-1].Position().Line() == ePos.Line() { + return // discard - likely a spurious error + } + if n > 10 { + p.panicking = true + panic("too many errors") + } + } + + p.errors = errors.Append(p.errors, errors.Newf(ePos, msg, args...)) +} + +func (p *parser) errorExpected(pos token.Pos, obj string) { + if pos != p.pos { + p.errf(pos, "expected %s", obj) + return + } + // the error happened at the current position; + // make the error message more specific + if p.tok == token.COMMA && p.lit == "\n" { + p.errf(pos, "expected %s, found newline", obj) + return + } + + if p.tok.IsLiteral() { + p.errf(pos, "expected %s, found '%s' %s", obj, p.tok, p.lit) + } else { + p.errf(pos, "expected %s, found '%s'", obj, p.tok) + } +} + +func (p *parser) expect(tok token.Token) token.Pos { + pos := p.pos + if p.tok != tok { + p.errorExpected(pos, "'"+tok.String()+"'") + } + p.next() // make progress + return pos +} + +// expectClosing is like expect but provides a better error message +// for the common case of a missing comma before a newline. +func (p *parser) expectClosing(tok token.Token, context string) token.Pos { + if p.tok != tok && p.tok == token.COMMA && p.lit == "\n" { + p.errf(p.pos, "missing ',' before newline in %s", context) + p.next() + } + return p.expect(tok) +} + +func (p *parser) expectComma() { + // semicolon is optional before a closing ')', ']', '}', or newline + if p.tok != token.RPAREN && p.tok != token.RBRACE && p.tok != token.EOF { + switch p.tok { + case token.COMMA: + p.next() + default: + p.errorExpected(p.pos, "','") + syncExpr(p) + } + } +} + +func (p *parser) atComma(context string, follow ...token.Token) bool { + if p.tok == token.COMMA { + return true + } + for _, t := range follow { + if p.tok == t { + return false + } + } + // TODO: find a way to detect crossing lines now we don't have a semi. + if p.lit == "\n" { + p.errf(p.pos, "missing ',' before newline") + } else { + p.errf(p.pos, "missing ',' in %s", context) + } + return true // "insert" comma and continue +} + +// syncExpr advances to the next field in a field list. +// Used for synchronization after an error. +func syncExpr(p *parser) { + for { + switch p.tok { + case token.COMMA: + // Return only if parser made some progress since last + // sync or if it has not reached 10 sync calls without + // progress. Otherwise consume at least one token to + // avoid an endless parser loop (it is possible that + // both parseOperand and parseStmt call syncStmt and + // correctly do not advance, thus the need for the + // invocation limit p.syncCnt). + if p.pos == p.syncPos && p.syncCnt < 10 { + p.syncCnt++ + return + } + if p.syncPos.Before(p.pos) { + p.syncPos = p.pos + p.syncCnt = 0 + return + } + // Reaching here indicates a parser bug, likely an + // incorrect token list in this function, but it only + // leads to skipping of possibly correct code if a + // previous error is present, and thus is preferred + // over a non-terminating parse. + case token.EOF: + return + } + p.next() + } +} + +// safePos returns a valid file position for a given position: If pos +// is valid to begin with, safePos returns pos. If pos is out-of-range, +// safePos returns the EOF position. +// +// This is hack to work around "artificial" end positions in the AST which +// are computed by adding 1 to (presumably valid) token positions. If the +// token positions are invalid due to parse errors, the resulting end position +// may be past the file's EOF position, which would lead to panics if used +// later on. +func (p *parser) safePos(pos token.Pos) (res token.Pos) { + defer func() { + if recover() != nil { + res = p.file.Pos(p.file.Base()+p.file.Size(), pos.RelPos()) // EOF position + } + }() + _ = p.file.Offset(pos) // trigger a panic if position is out-of-range + return pos +} + +// ---------------------------------------------------------------------------- +// Identifiers + +func (p *parser) parseIdent() *ast.Ident { + c := p.openComments() + pos := p.pos + name := "_" + if p.tok == token.IDENT { + name = p.lit + p.next() + } else { + p.expect(token.IDENT) // use expect() error handling + } + ident := &ast.Ident{NamePos: pos, Name: name} + c.closeNode(p, ident) + return ident +} + +func (p *parser) parseKeyIdent() *ast.Ident { + c := p.openComments() + pos := p.pos + name := p.lit + p.next() + ident := &ast.Ident{NamePos: pos, Name: name} + c.closeNode(p, ident) + return ident +} + +// ---------------------------------------------------------------------------- +// Expressions + +// parseOperand returns an expression. +// Callers must verify the result. +func (p *parser) parseOperand() (expr ast.Expr) { + if p.trace { + defer un(trace(p, "Operand")) + } + + switch p.tok { + case token.IDENT: + return p.parseIdent() + + case token.LBRACE: + return p.parseStruct() + + case token.LBRACK: + return p.parseList() + + case token.BOTTOM: + c := p.openComments() + x := &ast.BottomLit{Bottom: p.pos} + p.next() + return c.closeExpr(p, x) + + case token.NULL, token.TRUE, token.FALSE, token.INT, token.FLOAT, token.STRING: + c := p.openComments() + x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} + p.next() + return c.closeExpr(p, x) + + case token.INTERPOLATION: + return p.parseInterpolation() + + case token.LPAREN: + c := p.openComments() + defer func() { c.closeNode(p, expr) }() + lparen := p.pos + p.next() + p.exprLev++ + p.openList() + x := p.parseRHS() // types may be parenthesized: (some type) + p.closeList() + p.exprLev-- + rparen := p.expect(token.RPAREN) + return &ast.ParenExpr{ + Lparen: lparen, + X: x, + Rparen: rparen} + + default: + if p.tok.IsKeyword() { + return p.parseKeyIdent() + } + } + + // we have an error + c := p.openComments() + pos := p.pos + p.errorExpected(pos, "operand") + syncExpr(p) + return c.closeExpr(p, &ast.BadExpr{From: pos, To: p.pos}) +} + +func (p *parser) parseIndexOrSlice(x ast.Expr) (expr ast.Expr) { + if p.trace { + defer un(trace(p, "IndexOrSlice")) + } + + c := p.openComments() + defer func() { c.closeNode(p, expr) }() + c.pos = 1 + + const N = 2 + lbrack := p.expect(token.LBRACK) + + p.exprLev++ + var index [N]ast.Expr + var colons [N - 1]token.Pos + if p.tok != token.COLON { + index[0] = p.parseRHS() + } + nColons := 0 + for p.tok == token.COLON && nColons < len(colons) { + colons[nColons] = p.pos + nColons++ + p.next() + if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF { + index[nColons] = p.parseRHS() + } + } + p.exprLev-- + rbrack := p.expect(token.RBRACK) + + if nColons > 0 { + return &ast.SliceExpr{ + X: x, + Lbrack: lbrack, + Low: index[0], + High: index[1], + Rbrack: rbrack} + } + + return &ast.IndexExpr{ + X: x, + Lbrack: lbrack, + Index: index[0], + Rbrack: rbrack} +} + +func (p *parser) parseCallOrConversion(fun ast.Expr) (expr *ast.CallExpr) { + if p.trace { + defer un(trace(p, "CallOrConversion")) + } + c := p.openComments() + defer func() { c.closeNode(p, expr) }() + + p.openList() + defer p.closeList() + + lparen := p.expect(token.LPAREN) + + p.exprLev++ + var list []ast.Expr + for p.tok != token.RPAREN && p.tok != token.EOF { + list = append(list, p.parseRHS()) // builtins may expect a type: make(some type, ...) + if !p.atComma("argument list", token.RPAREN) { + break + } + p.next() + } + p.exprLev-- + rparen := p.expectClosing(token.RPAREN, "argument list") + + return &ast.CallExpr{ + Fun: fun, + Lparen: lparen, + Args: list, + Rparen: rparen} +} + +// TODO: inline this function in parseFieldList once we no longer user comment +// position information in parsing. +func (p *parser) consumeDeclComma() { + if p.atComma("struct literal", token.RBRACE, token.EOF) { + p.next() + } +} + +func (p *parser) parseFieldList() (list []ast.Decl) { + if p.trace { + defer un(trace(p, "FieldList")) + } + p.openList() + defer p.closeList() + + for p.tok != token.RBRACE && p.tok != token.EOF { + switch p.tok { + case token.ATTRIBUTE: + list = append(list, p.parseAttribute()) + p.consumeDeclComma() + + case token.ELLIPSIS: + c := p.openComments() + ellipsis := &ast.Ellipsis{Ellipsis: p.pos} + p.next() + c.closeNode(p, ellipsis) + list = append(list, ellipsis) + + default: + list = append(list, p.parseField()) + } + + // TODO: handle next comma here, after disallowing non-colon separator + // and we have eliminated the need comment positions. + } + + if len(list) > 1 { + for _, d := range list { + if internal.IsBulkField(d) { + p.assertV0(p.pos, 2, 0, `combining bulk optional fields with other fields`) + break + } + } + } + + return +} + +func (p *parser) parseLetDecl() (decl ast.Decl, ident *ast.Ident) { + if p.trace { + defer un(trace(p, "Field")) + } + + c := p.openComments() + + letPos := p.expect(token.LET) + if p.tok != token.IDENT { + c.closeNode(p, ident) + return nil, &ast.Ident{ + NamePos: letPos, + Name: "let", + } + } + defer func() { c.closeNode(p, decl) }() + + ident = p.parseIdent() + assign := p.expect(token.BIND) + expr := p.parseRHS() + + p.consumeDeclComma() + + return &ast.LetClause{ + Let: letPos, + Ident: ident, + Equal: assign, + Expr: expr, + }, nil +} + +func (p *parser) parseComprehension() (decl ast.Decl, ident *ast.Ident) { + if p.trace { + defer un(trace(p, "Comprehension")) + } + + c := p.openComments() + defer func() { c.closeNode(p, decl) }() + + tok := p.tok + pos := p.pos + clauses, fc := p.parseComprehensionClauses(true) + if fc != nil { + ident = &ast.Ident{ + NamePos: pos, + Name: tok.String(), + } + fc.closeNode(p, ident) + return nil, ident + } + + sc := p.openComments() + expr := p.parseStruct() + sc.closeExpr(p, expr) + + if p.atComma("struct literal", token.RBRACE) { // TODO: may be EOF + p.next() + } + + return &ast.Comprehension{ + Clauses: clauses, + Value: expr, + }, nil +} + +func (p *parser) parseField() (decl ast.Decl) { + if p.trace { + defer un(trace(p, "Field")) + } + + c := p.openComments() + defer func() { c.closeNode(p, decl) }() + + pos := p.pos + + this := &ast.Field{Label: nil} + m := this + + for i := 0; ; i++ { + tok := p.tok + + label, expr, decl, ok := p.parseLabel(false) + if decl != nil { + return decl + } + m.Label = label + + if !ok { + if expr == nil { + expr = p.parseRHS() + } + if a, ok := expr.(*ast.Alias); ok { + if i > 0 { + p.errorExpected(p.pos, "label or ':'") + return &ast.BadDecl{From: pos, To: p.pos} + } + p.consumeDeclComma() + return a + } + e := &ast.EmbedDecl{Expr: expr} + p.consumeDeclComma() + return e + } + + if p.tok == token.OPTION { + m.Optional = p.pos + p.next() + } + + if p.tok == token.COLON || p.tok == token.ISA { + break + } + + // TODO: consider disallowing comprehensions with more than one label. + // This can be a bit awkward in some cases, but it would naturally + // enforce the proper style that a comprehension be defined in the + // smallest possible scope. + // allowComprehension = false + + switch p.tok { + case token.COMMA: + p.expectComma() // sync parser. + fallthrough + + case token.RBRACE, token.EOF: + if i == 0 { + if a, ok := expr.(*ast.Alias); ok { + p.assertV0(p.pos, 1, 3, `old-style alias; use "let X = expr"`) + + return a + } + switch tok { + case token.IDENT, token.LBRACK, token.STRING, token.INTERPOLATION, + token.NULL, token.TRUE, token.FALSE, + token.FOR, token.IF, token.LET, token.IN: + return &ast.EmbedDecl{Expr: expr} + } + } + fallthrough + + default: + p.errorExpected(p.pos, "label or ':'") + return &ast.BadDecl{From: pos, To: p.pos} + } + } + + m.TokenPos = p.pos + m.Token = p.tok + if p.tok == token.ISA { + p.assertV0(p.pos, 2, 0, "use of '::'") + } + if p.tok != token.COLON && p.tok != token.ISA { + p.errorExpected(pos, "':' or '::'") + } + p.next() // : or :: + + for { + if l, ok := m.Label.(*ast.ListLit); ok && len(l.Elts) != 1 { + p.errf(l.Pos(), "square bracket must have exactly one element") + } + + tok := p.tok + label, expr, _, ok := p.parseLabel(true) + if !ok || (p.tok != token.COLON && p.tok != token.ISA && p.tok != token.OPTION) { + if expr == nil { + expr = p.parseRHS() + } + if a, ok := expr.(*ast.Alias); ok { + p.errf(expr.Pos(), "alias %q not allowed as value", debugStr(a.Ident)) + } + m.Value = expr + break + } + field := &ast.Field{Label: label} + m.Value = &ast.StructLit{Elts: []ast.Decl{field}} + m = field + + if tok != token.LSS && p.tok == token.OPTION { + m.Optional = p.pos + p.next() + } + + m.TokenPos = p.pos + m.Token = p.tok + if p.tok == token.ISA { + p.assertV0(p.pos, 2, 0, "use of '::'") + } + if p.tok != token.COLON && p.tok != token.ISA { + if p.tok.IsLiteral() { + p.errf(p.pos, "expected ':' or '::'; found %s", p.lit) + } else { + p.errf(p.pos, "expected ':' or '::'; found %s", p.tok) + } + break + } + p.next() + } + + if attrs := p.parseAttributes(); attrs != nil { + m.Attrs = attrs + } + + p.consumeDeclComma() + + return this +} + +func (p *parser) parseAttributes() (attrs []*ast.Attribute) { + p.openList() + for p.tok == token.ATTRIBUTE { + attrs = append(attrs, p.parseAttribute()) + } + p.closeList() + return attrs +} + +func (p *parser) parseAttribute() *ast.Attribute { + c := p.openComments() + a := &ast.Attribute{At: p.pos, Text: p.lit} + p.next() + c.closeNode(p, a) + return a +} + +func (p *parser) parseLabel(rhs bool) (label ast.Label, expr ast.Expr, decl ast.Decl, ok bool) { + tok := p.tok + switch tok { + + case token.FOR, token.IF: + if rhs { + expr = p.parseExpr() + break + } + comp, ident := p.parseComprehension() + if comp != nil { + return nil, nil, comp, false + } + expr = ident + + case token.LET: + let, ident := p.parseLetDecl() + if let != nil { + return nil, nil, let, false + } + expr = ident + + case token.IDENT, token.STRING, token.INTERPOLATION, + token.NULL, token.TRUE, token.FALSE, token.IN: + expr = p.parseExpr() + + case token.LBRACK: + expr = p.parseRHS() + switch x := expr.(type) { + case *ast.ListLit: + // Note: caller must verify this list is suitable as a label. + label, ok = x, true + } + } + + switch x := expr.(type) { + case *ast.BasicLit: + switch x.Kind { + case token.STRING, token.NULL, token.TRUE, token.FALSE: + // Keywords that represent operands. + + // Allowing keywords to be used as a labels should not interfere with + // generating good errors: any keyword can only appear on the RHS of a + // field (after a ':'), whereas labels always appear on the LHS. + + label, ok = x, true + } + + case *ast.Ident: + if strings.HasPrefix(x.Name, "__") { + p.errf(x.NamePos, "identifiers starting with '__' are reserved") + } + + expr = p.parseAlias(x) + if a, ok := expr.(*ast.Alias); ok { + if _, ok = a.Expr.(ast.Label); !ok { + break + } + label = a + } else { + label = x + } + ok = true + + case ast.Label: + label, ok = x, true + } + return label, expr, nil, ok +} + +func (p *parser) parseStruct() (expr ast.Expr) { + lbrace := p.expect(token.LBRACE) + + if p.trace { + defer un(trace(p, "StructLit")) + } + + elts := p.parseStructBody() + rbrace := p.expectClosing(token.RBRACE, "struct literal") + return &ast.StructLit{ + Lbrace: lbrace, + Elts: elts, + Rbrace: rbrace, + } +} + +func (p *parser) parseStructBody() []ast.Decl { + if p.trace { + defer un(trace(p, "StructBody")) + } + + p.exprLev++ + var elts []ast.Decl + + // TODO: consider "stealing" non-lead comments. + // for _, cg := range p.comments.groups { + // if cg != nil { + // elts = append(elts, cg) + // } + // } + // p.comments.groups = p.comments.groups[:0] + + if p.tok != token.RBRACE { + elts = p.parseFieldList() + } + p.exprLev-- + + return elts +} + +// parseComprehensionClauses parses either new-style (first==true) +// or old-style (first==false). +// Should we now disallow keywords as identifiers? If not, we need to +// return a list of discovered labels as the alternative. +func (p *parser) parseComprehensionClauses(first bool) (clauses []ast.Clause, c *commentState) { + // TODO: reuse Template spec, which is possible if it doesn't check the + // first is an identifier. + + for { + switch p.tok { + case token.FOR: + c := p.openComments() + forPos := p.expect(token.FOR) + if first { + switch p.tok { + case token.COLON, token.ISA, token.BIND, token.OPTION, + token.COMMA, token.EOF: + return nil, c + } + } + + var key, value *ast.Ident + var colon token.Pos + value = p.parseIdent() + if p.tok == token.COMMA { + colon = p.expect(token.COMMA) + key = value + value = p.parseIdent() + } + c.pos = 4 + // params := p.parseParams(nil, ARROW) + clauses = append(clauses, c.closeClause(p, &ast.ForClause{ + For: forPos, + Key: key, + Colon: colon, + Value: value, + In: p.expect(token.IN), + Source: p.parseRHS(), + })) + + case token.IF: + c := p.openComments() + ifPos := p.expect(token.IF) + if first { + switch p.tok { + case token.COLON, token.ISA, token.BIND, token.OPTION, + token.COMMA, token.EOF: + return nil, c + } + } + + clauses = append(clauses, c.closeClause(p, &ast.IfClause{ + If: ifPos, + Condition: p.parseRHS(), + })) + + case token.LET: + c := p.openComments() + letPos := p.expect(token.LET) + + ident := p.parseIdent() + assign := p.expect(token.BIND) + expr := p.parseRHS() + + clauses = append(clauses, c.closeClause(p, &ast.LetClause{ + Let: letPos, + Ident: ident, + Equal: assign, + Expr: expr, + })) + + default: + return clauses, nil + } + if p.tok == token.COMMA { + p.next() + } + + first = false + } +} + +func (p *parser) parseList() (expr ast.Expr) { + lbrack := p.expect(token.LBRACK) + + if p.trace { + defer un(trace(p, "ListLiteral")) + } + + elts := p.parseListElements() + + if clauses, _ := p.parseComprehensionClauses(false); clauses != nil { + var expr ast.Expr + p.assertV0(p.pos, 1, 3, "old-style list comprehensions") + if len(elts) != 1 { + p.errf(lbrack.Add(1), "list comprehension must have exactly one element") + } + if len(elts) > 0 { + expr = elts[0] + } + rbrack := p.expectClosing(token.RBRACK, "list comprehension") + + return &ast.ListComprehension{ + Lbrack: lbrack, + Expr: expr, + Clauses: clauses, + Rbrack: rbrack, + } + } + + if p.tok == token.ELLIPSIS { + ellipsis := &ast.Ellipsis{ + Ellipsis: p.pos, + } + elts = append(elts, ellipsis) + p.next() + if p.tok != token.COMMA && p.tok != token.RBRACK { + ellipsis.Type = p.parseRHS() + } + if p.atComma("list literal", token.RBRACK) { + p.next() + } + } + + rbrack := p.expectClosing(token.RBRACK, "list literal") + return &ast.ListLit{ + Lbrack: lbrack, + Elts: elts, + Rbrack: rbrack} +} + +func (p *parser) parseListElements() (list []ast.Expr) { + if p.trace { + defer un(trace(p, "ListElements")) + } + p.openList() + defer p.closeList() + + for p.tok != token.RBRACK && p.tok != token.ELLIPSIS && p.tok != token.EOF { + expr, ok := p.parseListElement() + list = append(list, expr) + if !ok { + break + } + } + + return +} + +func (p *parser) parseListElement() (expr ast.Expr, ok bool) { + if p.trace { + defer un(trace(p, "ListElement")) + } + c := p.openComments() + defer func() { c.closeNode(p, expr) }() + + switch p.tok { + case token.FOR, token.IF: + tok := p.tok + pos := p.pos + clauses, fc := p.parseComprehensionClauses(true) + if clauses != nil { + sc := p.openComments() + expr := p.parseStruct() + sc.closeExpr(p, expr) + + if p.atComma("list literal", token.RBRACK) { // TODO: may be EOF + p.next() + } + + return &ast.Comprehension{ + Clauses: clauses, + Value: expr, + }, true + } + + expr = &ast.Ident{ + NamePos: pos, + Name: tok.String(), + } + fc.closeNode(p, expr) + + default: + expr = p.parseUnaryExpr() + } + + expr = p.parseBinaryExprTail(token.LowestPrec+1, expr) + expr = p.parseAlias(expr) + + // Enforce there is an explicit comma. We could also allow the + // omission of commas in lists, but this gives rise to some ambiguities + // with list comprehensions. + if p.tok == token.COMMA && p.lit != "," { + p.next() + // Allow missing comma for last element, though, to be compliant + // with JSON. + if p.tok == token.RBRACK || p.tok == token.FOR || p.tok == token.IF { + return expr, false + } + p.errf(p.pos, "missing ',' before newline in list literal") + } else if !p.atComma("list literal", token.RBRACK, token.FOR, token.IF) { + return expr, false + } + p.next() + + return expr, true +} + +// parseAlias turns an expression into an alias. +func (p *parser) parseAlias(lhs ast.Expr) (expr ast.Expr) { + if p.tok != token.BIND { + return lhs + } + pos := p.pos + p.next() + expr = p.parseRHS() + if expr == nil { + panic("empty return") + } + switch x := lhs.(type) { + case *ast.Ident: + return &ast.Alias{Ident: x, Equal: pos, Expr: expr} + } + p.errf(p.pos, "expected identifier for alias") + return expr +} + +// checkExpr checks that x is an expression (and not a type). +func (p *parser) checkExpr(x ast.Expr) ast.Expr { + switch unparen(x).(type) { + case *ast.BadExpr: + case *ast.BottomLit: + case *ast.Ident: + case *ast.BasicLit: + case *ast.Interpolation: + case *ast.StructLit: + case *ast.ListLit: + case *ast.ListComprehension: + case *ast.ParenExpr: + panic("unreachable") + case *ast.SelectorExpr: + case *ast.IndexExpr: + case *ast.SliceExpr: + case *ast.CallExpr: + case *ast.UnaryExpr: + case *ast.BinaryExpr: + default: + // all other nodes are not proper expressions + p.errorExpected(x.Pos(), "expression") + x = &ast.BadExpr{ + From: x.Pos(), To: p.safePos(x.End()), + } + } + return x +} + +// If x is of the form (T), unparen returns unparen(T), otherwise it returns x. +func unparen(x ast.Expr) ast.Expr { + if p, isParen := x.(*ast.ParenExpr); isParen { + x = unparen(p.X) + } + return x +} + +// If lhs is set and the result is an identifier, it is not resolved. +func (p *parser) parsePrimaryExpr() ast.Expr { + if p.trace { + defer un(trace(p, "PrimaryExpr")) + } + + return p.parsePrimaryExprTail(p.parseOperand()) +} + +func (p *parser) parsePrimaryExprTail(operand ast.Expr) ast.Expr { + x := operand +L: + for { + switch p.tok { + case token.PERIOD: + c := p.openComments() + c.pos = 1 + p.next() + switch p.tok { + case token.IDENT: + x = &ast.SelectorExpr{ + X: p.checkExpr(x), + Sel: p.parseIdent(), + } + case token.STRING: + if strings.HasPrefix(p.lit, `"`) && !strings.HasPrefix(p.lit, `""`) { + str := &ast.BasicLit{ + ValuePos: p.pos, + Kind: token.STRING, + Value: p.lit, + } + p.next() + x = &ast.SelectorExpr{ + X: p.checkExpr(x), + Sel: str, + } + break + } + fallthrough + default: + pos := p.pos + p.errorExpected(pos, "selector") + p.next() // make progress + x = &ast.SelectorExpr{X: x, Sel: &ast.Ident{NamePos: pos, Name: "_"}} + } + c.closeNode(p, x) + case token.LBRACK: + x = p.parseIndexOrSlice(p.checkExpr(x)) + case token.LPAREN: + x = p.parseCallOrConversion(p.checkExpr(x)) + default: + break L + } + } + + return x +} + +// If lhs is set and the result is an identifier, it is not resolved. +func (p *parser) parseUnaryExpr() ast.Expr { + if p.trace { + defer un(trace(p, "UnaryExpr")) + } + + switch p.tok { + case token.ADD, token.SUB, token.NOT, token.MUL, + token.LSS, token.LEQ, token.GEQ, token.GTR, + token.NEQ, token.MAT, token.NMAT: + pos, op := p.pos, p.tok + c := p.openComments() + p.next() + return c.closeExpr(p, &ast.UnaryExpr{ + OpPos: pos, + Op: op, + X: p.checkExpr(p.parseUnaryExpr()), + }) + } + + return p.parsePrimaryExpr() +} + +func (p *parser) tokPrec() (token.Token, int) { + tok := p.tok + if tok == token.IDENT { + switch p.lit { + case "quo": + return token.IQUO, 7 + case "rem": + return token.IREM, 7 + case "div": + return token.IDIV, 7 + case "mod": + return token.IMOD, 7 + default: + return tok, 0 + } + } + return tok, tok.Precedence() +} + +// If lhs is set and the result is an identifier, it is not resolved. +func (p *parser) parseBinaryExpr(prec1 int) ast.Expr { + if p.trace { + defer un(trace(p, "BinaryExpr")) + } + p.openList() + defer p.closeList() + + return p.parseBinaryExprTail(prec1, p.parseUnaryExpr()) +} + +func (p *parser) parseBinaryExprTail(prec1 int, x ast.Expr) ast.Expr { + for { + op, prec := p.tokPrec() + if prec < prec1 { + return x + } + c := p.openComments() + c.pos = 1 + pos := p.expect(p.tok) + x = c.closeExpr(p, &ast.BinaryExpr{ + X: p.checkExpr(x), + OpPos: pos, + Op: op, + // Treat nested expressions as RHS. + Y: p.checkExpr(p.parseBinaryExpr(prec + 1))}) + } +} + +func (p *parser) parseInterpolation() (expr ast.Expr) { + c := p.openComments() + defer func() { c.closeNode(p, expr) }() + + p.openList() + defer p.closeList() + + cc := p.openComments() + + lit := p.lit + pos := p.pos + p.next() + last := &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: lit} + exprs := []ast.Expr{last} + + for p.tok == token.LPAREN { + c.pos = 1 + p.expect(token.LPAREN) + cc.closeExpr(p, last) + + exprs = append(exprs, p.parseRHS()) + + cc = p.openComments() + if p.tok != token.RPAREN { + p.errf(p.pos, "expected ')' for string interpolation") + } + lit = p.scanner.ResumeInterpolation() + pos = p.pos + p.next() + last = &ast.BasicLit{ + ValuePos: pos, + Kind: token.STRING, + Value: lit, + } + exprs = append(exprs, last) + } + cc.closeExpr(p, last) + return &ast.Interpolation{Elts: exprs} +} + +// Callers must check the result (using checkExpr), depending on context. +func (p *parser) parseExpr() (expr ast.Expr) { + if p.trace { + defer un(trace(p, "Expression")) + } + + c := p.openComments() + defer func() { c.closeExpr(p, expr) }() + + return p.parseBinaryExpr(token.LowestPrec + 1) +} + +func (p *parser) parseRHS() ast.Expr { + x := p.checkExpr(p.parseExpr()) + return x +} + +// ---------------------------------------------------------------------------- +// Declarations + +func isValidImport(lit string) bool { + const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" + s, _ := literal.Unquote(lit) // go/scanner returns a legal string literal + if p := strings.LastIndexByte(s, ':'); p >= 0 { + s = s[:p] + } + for _, r := range s { + if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { + return false + } + } + return s != "" +} + +func (p *parser) parseImportSpec(_ int) *ast.ImportSpec { + if p.trace { + defer un(trace(p, "ImportSpec")) + } + + c := p.openComments() + + var ident *ast.Ident + if p.tok == token.IDENT { + ident = p.parseIdent() + } + + pos := p.pos + var path string + if p.tok == token.STRING { + path = p.lit + if !isValidImport(path) { + p.errf(pos, "invalid import path: %s", path) + } + p.next() + p.expectComma() // call before accessing p.linecomment + } else { + p.expect(token.STRING) // use expect() error handling + if p.tok == token.COMMA { + p.expectComma() // call before accessing p.linecomment + } + } + // collect imports + spec := &ast.ImportSpec{ + Name: ident, + Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path}, + } + c.closeNode(p, spec) + p.imports = append(p.imports, spec) + + return spec +} + +func (p *parser) parseImports() *ast.ImportDecl { + if p.trace { + defer un(trace(p, "Imports")) + } + c := p.openComments() + + ident := p.parseIdent() + var lparen, rparen token.Pos + var list []*ast.ImportSpec + if p.tok == token.LPAREN { + lparen = p.pos + p.next() + p.openList() + for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ { + list = append(list, p.parseImportSpec(iota)) + } + p.closeList() + rparen = p.expect(token.RPAREN) + p.expectComma() + } else { + list = append(list, p.parseImportSpec(0)) + } + + d := &ast.ImportDecl{ + Import: ident.Pos(), + Lparen: lparen, + Specs: list, + Rparen: rparen, + } + c.closeNode(p, d) + return d +} + +// ---------------------------------------------------------------------------- +// Source files + +func (p *parser) parseFile() *ast.File { + if p.trace { + defer un(trace(p, "File")) + } + + c := p.comments + + // Don't bother parsing the rest if we had errors scanning the first + // Likely not a Go source file at all. + if p.errors != nil { + return nil + } + p.openList() + + var decls []ast.Decl + + for p.tok == token.ATTRIBUTE { + decls = append(decls, p.parseAttribute()) + p.consumeDeclComma() + } + + // The package clause is not a declaration: it does not appear in any + // scope. + if p.tok == token.IDENT && p.lit == "package" { + c := p.openComments() + + pos := p.pos + var name *ast.Ident + p.expect(token.IDENT) + name = p.parseIdent() + if name.Name == "_" && p.mode&declarationErrorsMode != 0 { + p.errf(p.pos, "invalid package name _") + } + + pkg := &ast.Package{ + PackagePos: pos, + Name: name, + } + decls = append(decls, pkg) + p.expectComma() + c.closeNode(p, pkg) + } + + for p.tok == token.ATTRIBUTE { + decls = append(decls, p.parseAttribute()) + p.consumeDeclComma() + } + + if p.mode&packageClauseOnlyMode == 0 { + // import decls + for p.tok == token.IDENT && p.lit == "import" { + decls = append(decls, p.parseImports()) + } + + if p.mode&importsOnlyMode == 0 { + // rest of package decls + // TODO: loop and allow multiple expressions. + decls = append(decls, p.parseFieldList()...) + p.expect(token.EOF) + } + } + p.closeList() + + f := &ast.File{ + Imports: p.imports, + Decls: decls, + } + c.closeNode(p, f) + return f +} diff --git a/vendor/cuelang.org/go/cue/parser/print.go b/vendor/cuelang.org/go/cue/parser/print.go new file mode 100644 index 000000000..95f2d5cb5 --- /dev/null +++ b/vendor/cuelang.org/go/cue/parser/print.go @@ -0,0 +1,300 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package parser + +import ( + "fmt" + "strconv" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +func init() { + internal.DebugStr = debugStr +} + +func debugStr(x interface{}) (out string) { + if n, ok := x.(ast.Node); ok { + comments := "" + for _, g := range n.Comments() { + comments += debugStr(g) + } + if comments != "" { + defer func() { out = "<" + comments + out + ">" }() + } + } + switch v := x.(type) { + case *ast.File: + out := "" + out += debugStr(v.Decls) + return out + + case *ast.Package: + out := "package " + out += debugStr(v.Name) + return out + + case *ast.LetClause: + out := "let " + out += debugStr(v.Ident) + out += "=" + out += debugStr(v.Expr) + return out + + case *ast.Alias: + out := debugStr(v.Ident) + out += "=" + out += debugStr(v.Expr) + return out + + case *ast.BottomLit: + return "_|_" + + case *ast.BasicLit: + return v.Value + + case *ast.Interpolation: + for _, e := range v.Elts { + out += debugStr(e) + } + return out + + case *ast.EmbedDecl: + out += debugStr(v.Expr) + return out + + case *ast.ImportDecl: + out := "import " + if v.Lparen != token.NoPos { + out += "( " + out += debugStr(v.Specs) + out += " )" + } else { + out += debugStr(v.Specs) + } + return out + + case *ast.Comprehension: + out := debugStr(v.Clauses) + out += debugStr(v.Value) + return out + + case *ast.StructLit: + out := "{" + out += debugStr(v.Elts) + out += "}" + return out + + case *ast.ListLit: + out := "[" + out += debugStr(v.Elts) + out += "]" + return out + + case *ast.Ellipsis: + out := "..." + if v.Type != nil { + out += debugStr(v.Type) + } + return out + + case *ast.ListComprehension: + out := "[" + out += debugStr(v.Expr) + out += " " + out += debugStr(v.Clauses) + out += "]" + return out + + case *ast.ForClause: + out := "for " + if v.Key != nil { + out += debugStr(v.Key) + out += ": " + } + out += debugStr(v.Value) + out += " in " + out += debugStr(v.Source) + return out + + case *ast.IfClause: + out := "if " + out += debugStr(v.Condition) + return out + + case *ast.Field: + out := debugStr(v.Label) + if v.Optional != token.NoPos { + out += "?" + } + if v.Value != nil { + switch v.Token { + case token.ILLEGAL, token.COLON: + out += ": " + default: + out += fmt.Sprintf(" %s ", v.Token) + } + out += debugStr(v.Value) + for _, a := range v.Attrs { + out += " " + out += debugStr(a) + } + } + return out + + case *ast.Attribute: + return v.Text + + case *ast.Ident: + return v.Name + + case *ast.TemplateLabel: + out := "<" + out += debugStr(v.Ident) + out += ">" + return out + + case *ast.SelectorExpr: + return debugStr(v.X) + "." + debugStr(v.Sel) + + case *ast.CallExpr: + out := debugStr(v.Fun) + out += "(" + out += debugStr(v.Args) + out += ")" + return out + + case *ast.ParenExpr: + out := "(" + out += debugStr(v.X) + out += ")" + return out + + case *ast.UnaryExpr: + return v.Op.String() + debugStr(v.X) + + case *ast.BinaryExpr: + out := debugStr(v.X) + op := v.Op.String() + if 'a' <= op[0] && op[0] <= 'z' { + op = fmt.Sprintf(" %s ", op) + } + out += op + out += debugStr(v.Y) + return out + + case []*ast.CommentGroup: + var a []string + for _, c := range v { + a = append(a, debugStr(c)) + } + return strings.Join(a, "\n") + + case *ast.CommentGroup: + str := "[" + if v.Doc { + str += "d" + } + if v.Line { + str += "l" + } + str += strconv.Itoa(int(v.Position)) + var a = []string{} + for _, c := range v.List { + a = append(a, c.Text) + } + return str + strings.Join(a, " ") + "] " + + case *ast.IndexExpr: + out := debugStr(v.X) + out += "[" + out += debugStr(v.Index) + out += "]" + return out + + case *ast.SliceExpr: + out := debugStr(v.X) + out += "[" + out += debugStr(v.Low) + out += ":" + out += debugStr(v.High) + out += "]" + return out + + case *ast.ImportSpec: + out := "" + if v.Name != nil { + out += debugStr(v.Name) + out += " " + } + out += debugStr(v.Path) + return out + + case []ast.Decl: + if len(v) == 0 { + return "" + } + out := "" + for _, d := range v { + out += debugStr(d) + out += sep + } + return out[:len(out)-len(sep)] + + case []ast.Clause: + if len(v) == 0 { + return "" + } + out := "" + for _, c := range v { + out += debugStr(c) + out += " " + } + return out + + case []ast.Expr: + if len(v) == 0 { + return "" + } + out := "" + for _, d := range v { + out += debugStr(d) + out += sep + } + return out[:len(out)-len(sep)] + + case []*ast.ImportSpec: + if len(v) == 0 { + return "" + } + out := "" + for _, d := range v { + out += debugStr(d) + out += sep + } + return out[:len(out)-len(sep)] + + default: + if v == nil { + return "" + } + return fmt.Sprintf("<%T>", x) + } +} + +const sep = ", " diff --git a/vendor/cuelang.org/go/cue/path.go b/vendor/cuelang.org/go/cue/path.go new file mode 100644 index 000000000..28d0182e0 --- /dev/null +++ b/vendor/cuelang.org/go/cue/path.go @@ -0,0 +1,312 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "strconv" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/adt" + "github.com/cockroachdb/apd/v2" +) + +// A Selector is a component of a path. +type Selector struct { + sel selector +} + +// String reports the CUE representation of a selector. +func (sel Selector) String() string { + return sel.sel.String() +} + +type selector interface { + String() string + + feature(ctx adt.Runtime) adt.Feature + kind() adt.FeatureType +} + +// A Path is series of selectors to query a CUE value. +type Path struct { + path []Selector +} + +// MakePath creates a Path from a sequence of selectors. +func MakePath(selectors ...Selector) Path { + return Path{path: selectors} +} + +// ParsePath parses a CUE expression into a Path. Any error resulting from +// this conversion can be obtained by calling Err on the result. +func ParsePath(s string) Path { + expr, err := parser.ParseExpr("", s) + if err != nil { + return MakePath(Selector{pathError{errors.Promote(err, "invalid path")}}) + } + + return Path{path: toSelectors(expr)} +} + +// Selectors reports the individual selectors of a path. +func (p Path) Selectors() []Selector { + return p.path +} + +// String reports the CUE representation of p. +func (p Path) String() string { + if err := p.Err(); err != nil { + return "_|_" + } + + b := &strings.Builder{} + for i, sel := range p.path { + x := sel.sel + // TODO: use '.' in all cases, once supported. + switch { + case x.kind() == adt.IntLabel: + b.WriteByte('[') + b.WriteString(x.String()) + b.WriteByte(']') + continue + case i > 0: + b.WriteByte('.') + } + + b.WriteString(x.String()) + } + return b.String() +} + +func toSelectors(expr ast.Expr) []Selector { + switch x := expr.(type) { + case *ast.Ident: + return []Selector{identSelector(x)} + + case *ast.IndexExpr: + a := toSelectors(x.X) + var sel Selector + if b, ok := x.Index.(*ast.BasicLit); !ok { + sel = Selector{pathError{ + errors.Newf(token.NoPos, "non-constant expression %s", + internal.DebugStr(x.Index))}} + } else { + sel = basicLitSelector(b) + } + return append(a, sel) + + case *ast.SelectorExpr: + a := toSelectors(x.X) + return append(a, identSelector(x.Sel)) + + default: + return []Selector{Selector{pathError{ + errors.Newf(token.NoPos, "invalid label %s ", internal.DebugStr(x)), + }}} + } +} + +func basicLitSelector(b *ast.BasicLit) Selector { + switch b.Kind { + case token.INT: + var n literal.NumInfo + if err := literal.ParseNum(b.Value, &n); err != nil { + return Selector{pathError{ + errors.Newf(token.NoPos, "invalid string index %s", b.Value), + }} + } + var d apd.Decimal + _ = n.Decimal(&d) + i, err := d.Int64() + if err != nil { + return Selector{pathError{ + errors.Newf(token.NoPos, "integer %s out of range", b.Value), + }} + } + return Index(int(i)) + + case token.STRING: + info, _, _, _ := literal.ParseQuotes(b.Value, b.Value) + if !info.IsDouble() { + return Selector{pathError{ + errors.Newf(token.NoPos, "invalid string index %s", b.Value)}} + } + s, _ := literal.Unquote(b.Value) + return Selector{stringSelector(s)} + + default: + return Selector{pathError{ + errors.Newf(token.NoPos, "invalid literal %s", b.Value), + }} + } +} + +func identSelector(label ast.Label) Selector { + switch x := label.(type) { + case *ast.Ident: + if isHiddenOrDefinition(x.Name) { + return Selector{definitionSelector(x.Name)} + } + return Selector{stringSelector(x.Name)} + + case *ast.BasicLit: + return basicLitSelector(x) + + default: + return Selector{pathError{ + errors.Newf(token.NoPos, "invalid label %s ", internal.DebugStr(x)), + }} + } +} + +// Err reports errors that occurred when generating the path. +func (p Path) Err() error { + var errs errors.Error + for _, x := range p.path { + if err, ok := x.sel.(pathError); ok { + errs = errors.Append(errs, err.Error) + } + } + return errs +} + +func isHiddenOrDefinition(s string) bool { + return strings.HasPrefix(s, "#") || strings.HasPrefix(s, "_") +} + +// A Def marks a string as a definition label. An # will be added if a string is +// not prefixed with an # or _# already. Hidden labels are qualified by the +// package in which they are looked up. +func Def(s string) Selector { + if !isHiddenOrDefinition(s) { + s = "#" + s + } + return Selector{definitionSelector(s)} +} + +type definitionSelector string + +// String returns the CUE representation of the definition. +func (d definitionSelector) String() string { + return string(d) +} + +func (d definitionSelector) kind() adt.FeatureType { + switch { + case strings.HasPrefix(string(d), "#"): + return adt.DefinitionLabel + case strings.HasPrefix(string(d), "_#"): + return adt.HiddenDefinitionLabel + case strings.HasPrefix(string(d), "_"): + return adt.HiddenLabel + default: + panic("invalid definition") + } +} + +func (d definitionSelector) feature(r adt.Runtime) adt.Feature { + return adt.MakeIdentLabel(r, string(d), "") +} + +// A Str is a CUE string label. Definition selectors are defined with Def. +func Str(s string) Selector { + return Selector{stringSelector(s)} +} + +type stringSelector string + +func (s stringSelector) String() string { + str := string(s) + if isHiddenOrDefinition(str) || !ast.IsValidIdent(str) { + return literal.Label.Quote(str) + } + return str +} + +func (s stringSelector) kind() adt.FeatureType { return adt.StringLabel } + +func (s stringSelector) feature(r adt.Runtime) adt.Feature { + return adt.MakeStringLabel(r, string(s)) +} + +// An Index selects a list element by index. +func Index(x int) Selector { + f, err := adt.MakeLabel(nil, int64(x), adt.IntLabel) + if err != nil { + return Selector{pathError{err}} + } + return Selector{indexSelector(f)} +} + +type indexSelector adt.Feature + +func (s indexSelector) String() string { + return strconv.Itoa(adt.Feature(s).Index()) +} + +func (s indexSelector) kind() adt.FeatureType { return adt.IntLabel } + +func (s indexSelector) feature(r adt.Runtime) adt.Feature { + return adt.Feature(s) +} + +// TODO: allow import paths to be represented? +// +// // ImportPath defines a lookup at the root of an instance. It must be the first +// // element of a Path. +// func ImportPath(s string) Selector { +// return importSelector(s) +// } + +// type importSelector string + +// func (s importSelector) String() string { +// return literal.String.Quote(string(s)) +// } + +// func (s importSelector) feature(r adt.Runtime) adt.Feature { +// return adt.InvalidLabel +// } + +// TODO: allow looking up in parent scopes? + +// // Parent returns a Selector for looking up in the parent of a current node. +// // Parent selectors may only occur at the start of a Path. +// func Parent() Selector { +// return parentSelector{} +// } + +// type parentSelector struct{} + +// func (p parentSelector) String() string { return "__up" } +// func (p parentSelector) feature(r adt.Runtime) adt.Feature { +// return adt.InvalidLabel +// } + +type pathError struct { + errors.Error +} + +func (p pathError) String() string { return p.Error.Error() } +func (p pathError) kind() adt.FeatureType { return 0 } +func (p pathError) feature(r adt.Runtime) adt.Feature { + return adt.InvalidLabel +} diff --git a/vendor/cuelang.org/go/cue/scanner/fuzz.go b/vendor/cuelang.org/go/cue/scanner/fuzz.go new file mode 100644 index 000000000..68a894a49 --- /dev/null +++ b/vendor/cuelang.org/go/cue/scanner/fuzz.go @@ -0,0 +1,39 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build gofuzz + +package scanner + +import ( + "cuelang.org/go/cue/token" +) + +func Fuzz(b []byte) int { + retCode := 1 + eh := func(_ token.Pos, msg string, args []interface{}) { + retCode = 0 + } + + var s Scanner + s.Init(token.NewFile("", 1, len(b)), b, eh, ScanComments) + + for { + _, tok, _ := s.Scan() + if tok == token.EOF { + break + } + } + return retCode +} diff --git a/vendor/cuelang.org/go/cue/scanner/scanner.go b/vendor/cuelang.org/go/cue/scanner/scanner.go new file mode 100644 index 000000000..6caaf1191 --- /dev/null +++ b/vendor/cuelang.org/go/cue/scanner/scanner.go @@ -0,0 +1,1020 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package scanner implements a scanner for CUE source text. It takes a []byte +// as source which can then be tokenized through repeated calls to the Scan +// method. +package scanner // import "cuelang.org/go/cue/scanner" + +import ( + "bytes" + "fmt" + "path/filepath" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "cuelang.org/go/cue/token" +) + +// An ErrorHandler is a generic error handler used throughout CUE packages. +// +// The position points to the beginning of the offending value. +type ErrorHandler func(pos token.Pos, msg string, args []interface{}) + +// A Scanner holds the Scanner's internal state while processing +// a given text. It can be allocated as part of another data +// structure but must be initialized via Init before use. +type Scanner struct { + // immutable state + file *token.File // source file handle + dir string // directory portion of file.Name() + src []byte // source + errh ErrorHandler // error reporting; or nil + mode Mode // scanning mode + + // scanning state + ch rune // current character + offset int // character offset + rdOffset int // reading offset (position after current character) + lineOffset int // current line offset + linesSinceLast int + spacesSinceLast int + insertEOL bool // insert a comma before next newline + + quoteStack []quoteInfo + + // public state - ok to modify + ErrorCount int // number of errors encountered +} + +type quoteInfo struct { + char rune + numChar int + numHash int +} + +const bom = 0xFEFF // byte order mark, only permitted as very first character + +// Read the next Unicode char into s.ch. +// s.ch < 0 means end-of-file. +func (s *Scanner) next() { + if s.rdOffset < len(s.src) { + s.offset = s.rdOffset + if s.ch == '\n' { + s.lineOffset = s.offset + s.file.AddLine(s.offset) + } + r, w := rune(s.src[s.rdOffset]), 1 + switch { + case r == 0: + s.errf(s.offset, "illegal character NUL") + case r >= utf8.RuneSelf: + // not ASCII + r, w = utf8.DecodeRune(s.src[s.rdOffset:]) + if r == utf8.RuneError && w == 1 { + s.errf(s.offset, "illegal UTF-8 encoding") + } else if r == bom && s.offset > 0 { + s.errf(s.offset, "illegal byte order mark") + } + } + s.rdOffset += w + s.ch = r + } else { + s.offset = len(s.src) + if s.ch == '\n' { + s.lineOffset = s.offset + s.file.AddLine(s.offset) + } + s.ch = -1 // eof + } +} + +// A Mode value is a set of flags (or 0). +// They control scanner behavior. +type Mode uint + +// These constants are options to the Init function. +const ( + ScanComments Mode = 1 << iota // return comments as COMMENT tokens + dontInsertCommas // do not automatically insert commas - for testing only +) + +// Init prepares the scanner s to tokenize the text src by setting the +// scanner at the beginning of src. The scanner uses the file set file +// for position information and it adds line information for each line. +// It is ok to re-use the same file when re-scanning the same file as +// line information which is already present is ignored. Init causes a +// panic if the file size does not match the src size. +// +// Calls to Scan will invoke the error handler err if they encounter a +// syntax error and err is not nil. Also, for each error encountered, +// the Scanner field ErrorCount is incremented by one. The mode parameter +// determines how comments are handled. +// +// Note that Init may call err if there is an error in the first character +// of the file. +func (s *Scanner) Init(file *token.File, src []byte, eh ErrorHandler, mode Mode) { + // Explicitly initialize all fields since a scanner may be reused. + if file.Size() != len(src) { + panic(fmt.Sprintf("file size (%d) does not match src len (%d)", file.Size(), len(src))) + } + s.file = file + s.dir, _ = filepath.Split(file.Name()) + s.src = src + s.errh = eh + s.mode = mode + + s.ch = ' ' + s.offset = 0 + s.rdOffset = 0 + s.lineOffset = 0 + s.insertEOL = false + s.ErrorCount = 0 + + s.next() + if s.ch == bom { + s.next() // ignore BOM at file beginning + } +} + +func (s *Scanner) errf(offs int, msg string, args ...interface{}) { + if s.errh != nil { + s.errh(s.file.Pos(offs, 0), msg, args) + } + s.ErrorCount++ +} + +var prefix = []byte("//line ") + +func (s *Scanner) interpretLineComment(text []byte) { + if bytes.HasPrefix(text, prefix) { + // get filename and line number, if any + if i := bytes.LastIndex(text, []byte{':'}); i > 0 { + if line, err := strconv.Atoi(string(text[i+1:])); err == nil && line > 0 { + // valid //line filename:line comment + filename := string(bytes.TrimSpace(text[len(prefix):i])) + if filename != "" { + filename = filepath.Clean(filename) + if !filepath.IsAbs(filename) { + // make filename relative to current directory + filename = filepath.Join(s.dir, filename) + } + } + // update scanner position + s.file.AddLineInfo(s.lineOffset+len(text)+1, filename, line) // +len(text)+1 since comment applies to next line + } + } + } +} + +func (s *Scanner) scanComment() string { + // initial '/' already consumed; s.ch == '/' || s.ch == '*' + offs := s.offset - 1 // position of initial '/' + hasCR := false + + if s.ch == '/' { + //-style comment + s.next() + for s.ch != '\n' && s.ch >= 0 { + if s.ch == '\r' { + hasCR = true + } + s.next() + } + if offs == s.lineOffset { + // comment starts at the beginning of the current line + s.interpretLineComment(s.src[offs:s.offset]) + } + goto exit + } + + s.errf(offs, "comment not terminated") + +exit: + lit := s.src[offs:s.offset] + if hasCR { + // TODO: preserve /r/n + lit = stripCR(lit) + } + + return string(lit) +} + +func (s *Scanner) findLineEnd() bool { + // initial '/' already consumed + + defer func(offs int) { + // reset scanner state to where it was upon calling findLineEnd + s.ch = '/' + s.offset = offs + s.rdOffset = offs + 1 + s.next() // consume initial '/' again + }(s.offset - 1) + + // read ahead until a newline, EOF, or non-comment token is found + for s.ch == '/' || s.ch == '*' { + if s.ch == '/' { + //-style comment always contains a newline + return true + } + /*-style comment: look for newline */ + s.next() + for s.ch >= 0 { + ch := s.ch + if ch == '\n' { + return true + } + s.next() + if ch == '*' && s.ch == '/' { + s.next() + break + } + } + s.skipWhitespace(0) // s.insertSemi is set + if s.ch < 0 || s.ch == '\n' { + return true + } + if s.ch != '/' { + // non-comment token + return false + } + s.next() // consume '/' + } + + return false +} + +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch >= utf8.RuneSelf && unicode.IsLetter(ch) +} + +func isDigit(ch rune) bool { + // TODO(mpvl): Is this correct? + return '0' <= ch && ch <= '9' || ch >= utf8.RuneSelf && unicode.IsDigit(ch) +} + +func (s *Scanner) scanFieldIdentifier() string { + offs := s.offset + if s.ch == '_' { + s.next() + } + if s.ch == '#' { + s.next() + // TODO: remove this block to allow # + if isDigit(s.ch) { + return string(s.src[offs:s.offset]) + } + } + for isLetter(s.ch) || isDigit(s.ch) || s.ch == '_' || s.ch == '$' { + s.next() + } + return string(s.src[offs:s.offset]) +} + +func (s *Scanner) scanIdentifier() string { + offs := s.offset + for isLetter(s.ch) || isDigit(s.ch) || s.ch == '_' || s.ch == '$' { + s.next() + } + return string(s.src[offs:s.offset]) +} + +func isExtendedIdent(r rune) bool { + return strings.IndexRune("-_#$%. ", r) >= 0 +} + +func (s *Scanner) scanQuotedIdentifier() string { + offs := s.offset - 1 // quote already consumed + hasInvalid := false + for ; ; s.next() { + switch { + default: + if !hasInvalid { + s.errf(s.offset, "invalid character '%s' in identifier", string(s.ch)) + hasInvalid = true + } + continue + + case isLetter(s.ch) || isDigit(s.ch) || isExtendedIdent(s.ch): + continue + + case s.ch == '`': + s.next() + return string(s.src[offs:s.offset]) + + case s.ch == '\n': + s.errf(s.offset, "quoted identifier not terminated") + return string(s.src[offs:s.offset]) + } + } +} + +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch - '0') + case ch == '_': + return 0 + case 'a' <= ch && ch <= 'f': + return int(ch - 'a' + 10) + case 'A' <= ch && ch <= 'F': + return int(ch - 'A' + 10) + } + return 16 // larger than any legal digit val +} + +func (s *Scanner) scanMantissa(base int) { + var last rune + for digitVal(s.ch) < base { + if last == '_' && s.ch == '_' { + s.errf(s.offset, "illegal '_' in number") + } + last = s.ch + s.next() + } + if last == '_' { + s.errf(s.offset-1, "illegal '_' in number") + } +} + +func (s *Scanner) scanNumber(seenDecimalPoint bool) (token.Token, string) { + // digitVal(s.ch) < 10 + offs := s.offset + tok := token.INT + + if seenDecimalPoint { + offs-- + tok = token.FLOAT + s.scanMantissa(10) + goto exponent + } + + if s.ch == '0' { + // int or float + offs := s.offset + s.next() + if s.ch == 'x' || s.ch == 'X' { + // hexadecimal int + s.next() + s.scanMantissa(16) + if s.offset-offs <= 2 { + // only scanned "0x" or "0X" + s.errf(offs, "illegal hexadecimal number") + } + } else if s.ch == 'b' { + // binary int + s.next() + s.scanMantissa(2) + if s.offset-offs <= 2 { + // only scanned "0b" + s.errf(offs, "illegal binary number") + } + } else if s.ch == 'o' { + // octal int + s.next() + s.scanMantissa(8) + if s.offset-offs <= 2 { + // only scanned "0o" + s.errf(offs, "illegal octal number") + } + } else { + // 0 or float + seenDigits := false + if s.ch >= '0' && s.ch <= '9' { + seenDigits = true + s.scanMantissa(10) + } + if s.ch == '.' || s.ch == 'e' || s.ch == 'E' { + goto fraction + } + if seenDigits { + // integer other than 0 may not start with 0 + s.errf(offs, "illegal integer number") + } + } + goto exit + } + + // decimal int or float + s.scanMantissa(10) + + // TODO: allow 3h4s, etc. + // switch s.ch { + // case 'h', 'm', 's', "µ"[0], 'u', 'n': + // } + +fraction: + if s.ch == '.' { + if p := s.offset + 1; p < len(s.src) && s.src[p] == '.' { + // interpret dot as part of a range. + goto exit + } + tok = token.FLOAT + s.next() + s.scanMantissa(10) + } + +exponent: + switch s.ch { + case 'K', 'M', 'G', 'T', 'P': + tok = token.INT // TODO: Or should we allow this to be a float? + s.next() + if s.ch == 'i' { + s.next() + } + goto exit + } + + if s.ch == 'e' || s.ch == 'E' { + tok = token.FLOAT + s.next() + if s.ch == '-' || s.ch == '+' { + s.next() + } + s.scanMantissa(10) + } + +exit: + return tok, string(s.src[offs:s.offset]) +} + +// scanEscape parses an escape sequence where rune is the accepted +// escaped quote. In case of a syntax error, it stops at the offending +// character (without consuming it) and returns false. Otherwise +// it returns true. +// +// Must be compliant with https://tools.ietf.org/html/rfc4627. +func (s *Scanner) scanEscape(quote quoteInfo) (ok, interpolation bool) { + for i := 0; i < quote.numHash; i++ { + if s.ch != '#' { + return true, false + } + s.next() + } + + offs := s.offset + + var n int + var base, max uint32 + switch s.ch { + case '(': + return true, true + case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '/', quote.char: + s.next() + return true, false + case '0', '1', '2', '3', '4', '5', '6', '7': + n, base, max = 3, 8, 255 + case 'x': + s.next() + n, base, max = 2, 16, 255 + case 'u': + s.next() + n, base, max = 4, 16, unicode.MaxRune + case 'U': + s.next() + n, base, max = 8, 16, unicode.MaxRune + default: + msg := "unknown escape sequence" + if s.ch < 0 { + msg = "escape sequence not terminated" + } + s.errf(offs, msg) + return false, false + } + + var x uint32 + for n > 0 { + d := uint32(digitVal(s.ch)) + if d >= base { + if s.ch < 0 { + s.errf(s.offset, "escape sequence not terminated") + } else { + s.errf(s.offset, "illegal character %#U in escape sequence", s.ch) + } + return false, false + } + x = x*base + d + s.next() + n-- + } + + // TODO: this is valid JSON, so remove, but normalize and report an error + // if for unmatched surrogate pairs . + if x > max { + s.errf(offs, "escape sequence is invalid Unicode code point") + return false, false + } + + return true, false +} + +func (s *Scanner) scanString(offs int, quote quoteInfo) (token.Token, string) { + // ", """, ', or ''' opening already consumed + + tok := token.STRING + + hasCR := false + extra := 0 + for { + ch := s.ch + if (quote.numChar != 3 && ch == '\n') || ch < 0 { + s.errf(offs, "string literal not terminated") + lit := s.src[offs:s.offset] + if hasCR { + lit = stripCR(lit) + } + return tok, string(lit) + } + + s.next() + ch, ok := s.consumeStringClose(ch, quote) + if ok { + break + } + if ch == '\r' && quote.numChar == 3 { + hasCR = true + } + if ch == '\\' { + if _, interpolation := s.scanEscape(quote); interpolation { + tok = token.INTERPOLATION + extra = 1 + s.quoteStack = append(s.quoteStack, quote) + break + } + } + } + lit := s.src[offs : s.offset+extra] + if hasCR { + lit = stripCR(lit) + } + return tok, string(lit) +} + +func (s *Scanner) consumeQuotes(quote rune, max int) (next rune, n int) { + for ; n < max; n++ { + if s.ch != quote { + return s.ch, n + } + s.next() + } + return s.ch, n +} + +func (s *Scanner) consumeStringClose(ch rune, quote quoteInfo) (next rune, atEnd bool) { + if quote.char != ch { + return ch, false + } + numChar := quote.numChar + n := numChar + quote.numHash + want := quote.char + for i := 1; i < n; i++ { + if i == numChar { + want = '#' + } + if want != s.ch { + return ch, false + } + ch = s.ch + s.next() + } + return s.ch, true +} + +func (s *Scanner) checkHashCount(offs int, quote quoteInfo) { + for i := 0; i < quote.numHash; i++ { + if s.ch != '#' { + s.errf(offs, "string literal not terminated") + return + } + s.next() + } +} + +func stripCR(b []byte) []byte { + c := make([]byte, len(b)) + i := 0 + for _, ch := range b { + if ch != '\r' { + c[i] = ch + i++ + } + } + return c[:i] +} + +// scanAttribute scans aa full attribute of the form @foo(str). An attribute +// is a lexical entry and as such whitespace is treated as normal characters +// within the attribute. +func (s *Scanner) scanAttribute() (tok token.Token, lit string) { + offs := s.offset - 1 // @ already consumed + + s.scanIdentifier() + + if _, tok, _ := s.Scan(); tok == token.LPAREN { + s.scanAttributeTokens(token.RPAREN) + } else { + s.errf(s.offset, "invalid attribute: expected '('") + } + return token.ATTRIBUTE, string(s.src[offs:s.offset]) +} + +func (s *Scanner) scanAttributeTokens(close token.Token) { + for { + switch _, tok, _ := s.Scan(); tok { + case close: + return + case token.EOF: + s.errf(s.offset, "attribute missing '%s'", close) + return + + case token.INTERPOLATION: + s.errf(s.offset, "interpolation not allowed in attribute") + s.popInterpolation() + s.recoverParen(1) + case token.LPAREN: + s.scanAttributeTokens(token.RPAREN) + case token.LBRACE: + s.scanAttributeTokens(token.RBRACE) + case token.LBRACK: + s.scanAttributeTokens(token.RBRACK) + case token.RPAREN, token.RBRACK, token.RBRACE: + s.errf(s.offset, "unexpected '%s'", tok) + } + } +} + +// recoverParen is an approximate recovery mechanism to recover from invalid +// attributes. +func (s *Scanner) recoverParen(open int) { + for { + switch s.ch { + case '\n', -1: + return + case '(': + open++ + case ')': + if open--; open == 0 { + return + } + } + s.next() + } +} + +func (s *Scanner) skipWhitespace(inc int) { + for { + switch s.ch { + case ' ', '\t': + s.spacesSinceLast += inc + case '\n': + s.linesSinceLast += inc + if s.insertEOL { + return + } + case '\r': + default: + return + } + s.next() + } +} + +// Helper functions for scanning multi-byte tokens such as >> += >>= . +// Different routines recognize different length tok_i based on matches +// of ch_i. If a token ends in '=', the result is tok1 or tok3 +// respectively. Otherwise, the result is tok0 if there was no other +// matching character, or tok2 if the matching character was ch2. + +func (s *Scanner) switch2(tok0, tok1 token.Token) token.Token { + if s.ch == '=' { + s.next() + return tok1 + } + return tok0 +} + +func (s *Scanner) popInterpolation() quoteInfo { + quote := s.quoteStack[len(s.quoteStack)-1] + s.quoteStack = s.quoteStack[:len(s.quoteStack)-1] + return quote +} + +// ResumeInterpolation resumes scanning of a string interpolation. +func (s *Scanner) ResumeInterpolation() string { + quote := s.popInterpolation() + _, str := s.scanString(s.offset-1, quote) + return str +} + +// Scan scans the next token and returns the token position, the token, +// and its literal string if applicable. The source end is indicated by +// EOF. +// +// If the returned token is a literal (IDENT, INT, FLOAT, +// IMAG, CHAR, STRING) or COMMENT, the literal string +// has the corresponding value. +// +// If the returned token is a keyword, the literal string is the keyword. +// +// If the returned token is Comma, the corresponding +// literal string is "," if the comma was present in the source, +// and "\n" if the semicolon was inserted because of a newline or +// at EOF. +// +// If the returned token is ILLEGAL, the literal string is the +// offending character. +// +// In all other cases, Scan returns an empty literal string. +// +// For more tolerant parsing, Scan will return a valid token if +// possible even if a syntax error was encountered. Thus, even +// if the resulting token sequence contains no illegal tokens, +// a client may not assume that no error occurred. Instead it +// must check the scanner's ErrorCount or the number of calls +// of the error handler, if there was one installed. +// +// Scan adds line information to the file added to the file +// set with Init. Token positions are relative to that file +// and thus relative to the file set. +func (s *Scanner) Scan() (pos token.Pos, tok token.Token, lit string) { +scanAgain: + s.skipWhitespace(1) + + var rel token.RelPos + switch { + case s.linesSinceLast > 1: + rel = token.NewSection + case s.linesSinceLast == 1: + rel = token.Newline + case s.spacesSinceLast > 0: + rel = token.Blank + default: + rel = token.NoSpace + } + // current token start + offset := s.offset + pos = s.file.Pos(offset, rel) + + // determine token value + insertEOL := false + var quote quoteInfo + switch ch := s.ch; { + case '0' <= ch && ch <= '9': + insertEOL = true + tok, lit = s.scanNumber(false) + case isLetter(ch), ch == '$', ch == '#': + lit = s.scanFieldIdentifier() + if len(lit) > 1 { + // keywords are longer than one letter - avoid lookup otherwise + tok = token.Lookup(lit) + insertEOL = true + break + } + if ch != '#' || (s.ch != '\'' && s.ch != '"' && s.ch != '#') { + tok = token.IDENT + insertEOL = true + break + } + quote.numHash = 1 + ch = s.ch + fallthrough + default: + s.next() // always make progress + switch ch { + case -1: + if s.insertEOL { + s.insertEOL = false // EOF consumed + return s.file.Pos(offset, token.Elided), token.COMMA, "\n" + } + tok = token.EOF + case '_': + if s.ch == '|' { + // Unconditionally require this to be followed by another + // underscore to avoid needing an extra lookahead. + // Note that `_|x` is always equal to _. + s.next() + if s.ch != '_' { + s.errf(s.file.Offset(pos), "illegal token '_|'; expected '_'") + insertEOL = s.insertEOL // preserve insertComma info + tok = token.ILLEGAL + lit = "_|" + break + } + s.next() + tok = token.BOTTOM + lit = "_|_" + } else { + tok = token.IDENT + lit = "_" + s.scanFieldIdentifier() + } + insertEOL = true + case '`': + tok = token.IDENT + lit = s.scanQuotedIdentifier() + insertEOL = true + + case '\n': + // we only reach here if s.insertComma was + // set in the first place and exited early + // from s.skipWhitespace() + s.insertEOL = false // newline consumed + p := s.file.Pos(offset, token.Elided) + s.skipWhitespace(1) + // Don't elide comma before a ',' or ':' to ensure JSON + // conformance. Note that cue fmt should immediately undo those. + if s.ch == ',' || s.ch == ':' { + return s.Scan() + } + return p, token.COMMA, "\n" + + case '#': + for quote.numHash++; s.ch == '#'; quote.numHash++ { + s.next() + } + ch = s.ch + if ch != '\'' && ch != '"' { + break + } + s.next() + fallthrough + case '"', '\'': + insertEOL = true + quote.char = ch + quote.numChar = 1 + offs := s.offset - 1 - quote.numHash + switch _, n := s.consumeQuotes(ch, 2); n { + case 0: + quote.numChar = 1 + tok, lit = s.scanString(offs, quote) + case 1: + s.checkHashCount(offs, quote) + tok, lit = token.STRING, string(s.src[offs:s.offset]) + case 2: + quote.numChar = 3 + switch s.ch { + case '\n': + s.next() + tok, lit = s.scanString(offs, quote) + case '\r': + s.next() + if s.ch == '\n' { + s.next() + tok, lit = s.scanString(offs, quote) + break + } + fallthrough + default: + s.errf(offs, "expected newline after multiline quote %s", + s.src[offs:s.offset]) + tok, lit = token.STRING, string(s.src[offs:s.offset]) + } + } + case '@': + insertEOL = true + tok, lit = s.scanAttribute() + case ':': + if s.ch == ':' { + s.next() + tok = token.ISA + } else { + tok = token.COLON + } + case ';': + tok = token.SEMICOLON + insertEOL = true + case '?': + tok = token.OPTION + insertEOL = true + case '.': + if '0' <= s.ch && s.ch <= '9' { + insertEOL = true + tok, lit = s.scanNumber(true) + } else if s.ch == '.' { + s.next() + if s.ch == '.' { + s.next() + tok = token.ELLIPSIS + } else { + s.errf(s.file.Offset(pos), "illegal token '..'; expected '.'") + } + } else { + tok = token.PERIOD + } + case ',': + tok = token.COMMA + lit = "," + case '(': + tok = token.LPAREN + case ')': + insertEOL = true + tok = token.RPAREN + case '[': + tok = token.LBRACK + case ']': + insertEOL = true + tok = token.RBRACK + case '{': + tok = token.LBRACE + case '}': + insertEOL = true + tok = token.RBRACE + case '+': + tok = token.ADD // Consider ++ for list concatenate. + case '-': + tok = token.SUB + case '*': + tok = token.MUL + case '/': + if s.ch == '/' { + // comment + if s.insertEOL && s.findLineEnd() { + // reset position to the beginning of the comment + s.ch = '/' + s.offset = s.file.Offset(pos) + s.rdOffset = s.offset + 1 + s.insertEOL = false // newline consumed + return s.file.Pos(offset, token.Elided), token.COMMA, "\n" + } + comment := s.scanComment() + if s.mode&ScanComments == 0 { + // skip comment + s.insertEOL = false // newline consumed + goto scanAgain + } + tok = token.COMMENT + lit = comment + } else { + tok = token.QUO + } + // We no longer use %, but seems like a useful token to use for + // something else at some point. + // case '%': + case '<': + if s.ch == '-' { + s.next() + tok = token.ARROW + } else { + tok = s.switch2(token.LSS, token.LEQ) + } + case '>': + tok = s.switch2(token.GTR, token.GEQ) + case '=': + if s.ch == '~' { + s.next() + tok = token.MAT + } else { + tok = s.switch2(token.BIND, token.EQL) + } + case '!': + if s.ch == '~' { + s.next() + tok = token.NMAT + } else { + tok = s.switch2(token.NOT, token.NEQ) + } + case '&': + switch s.ch { + case '&': + s.next() + tok = token.LAND + default: + tok = token.AND + } + case '|': + if s.ch == '|' { + s.next() + tok = token.LOR + } else { + tok = token.OR + } + default: + // next reports unexpected BOMs - don't repeat + if ch != bom { + s.errf(s.file.Offset(pos), "illegal character %#U", ch) + } + insertEOL = s.insertEOL // preserve insertSemi info + tok = token.ILLEGAL + lit = string(ch) + } + } + if s.mode&dontInsertCommas == 0 { + s.insertEOL = insertEOL + } + + s.linesSinceLast = 0 + s.spacesSinceLast = 0 + return +} diff --git a/vendor/cuelang.org/go/cue/token/position.go b/vendor/cuelang.org/go/cue/token/position.go new file mode 100644 index 000000000..937108382 --- /dev/null +++ b/vendor/cuelang.org/go/cue/token/position.go @@ -0,0 +1,472 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package token + +import ( + "fmt" + "sort" + "sync" +) + +// ----------------------------------------------------------------------------- +// Positions + +// Position describes an arbitrary source position +// including the file, line, and column location. +// A Position is valid if the line number is > 0. +type Position struct { + Filename string // filename, if any + Offset int // offset, starting at 0 + Line int // line number, starting at 1 + Column int // column number, starting at 1 (byte count) + // RelPos Pos // relative position information +} + +// IsValid reports whether the position is valid. +func (pos *Position) IsValid() bool { return pos.Line > 0 } + +// String returns a string in one of several forms: +// +// file:line:column valid position with file name +// line:column valid position without file name +// file invalid position with file name +// - invalid position without file name +// +func (pos Position) String() string { + s := pos.Filename + if pos.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", pos.Line, pos.Column) + } + if s == "" { + s = "-" + } + return s +} + +// Pos is a compact encoding of a source position within a file, as well as +// relative positioning information. It can be converted into a Position for a +// more convenient, but much larger, representation. +// +type Pos struct { + file *File + offset int +} + +// File returns the file that contains the position p or nil if there is no +// such file (for instance for p == NoPos). +// +func (p Pos) File() *File { + if p.index() == 0 { + return nil + } + return p.file +} + +func (p Pos) Line() int { + if p.file == nil { + return 0 + } + return p.Position().Line +} + +func (p Pos) Column() int { + if p.file == nil { + return 0 + } + return p.Position().Column +} + +func (p Pos) Filename() string { + if p.file == nil { + return "" + } + return p.Position().Filename +} + +func (p Pos) Position() Position { + if p.file == nil { + return Position{} + } + return p.file.Position(p) +} + +func (p Pos) String() string { + return p.Position().String() +} + +// NoPos is the zero value for Pos; there is no file and line information +// associated with it, and NoPos().IsValid() is false. NoPos is always +// smaller than any other Pos value. The corresponding Position value +// for NoPos is the zero value for Position. +var NoPos = Pos{} + +// RelPos indicates the relative position of token to the previous token. +type RelPos int + +const ( + // NoRelPos indicates no relative position is specified. + NoRelPos RelPos = iota + + // Elided indicates that the token for which this position is defined is + // not rendered at all. + Elided + + // NoSpace indicates there is no whitespace after this token. + NoSpace + + // Blank means there is horizontal space after this token. + Blank + + // Newline means there is a single newline after this token. + Newline + + // NewSection means there are two or more newlines after this token. + NewSection + + relMask = 0xf + relShift = 4 +) + +var relNames = []string{ + "invalid", "elided", "nospace", "blank", "newline", "section", +} + +func (p RelPos) String() string { return relNames[p] } + +func (p RelPos) Pos() Pos { + return Pos{nil, int(p)} +} + +// HasRelPos repors whether p has a relative position. +func (p Pos) HasRelPos() bool { + return p.offset&relMask != 0 + +} + +func (p Pos) Before(q Pos) bool { + return p.file == q.file && p.Offset() < q.Offset() +} + +// Offset reports the byte offset relative to the file. +func (p Pos) Offset() int { + return p.Position().Offset +} + +// Add creates a new position relative to the p offset by n. +func (p Pos) Add(n int) Pos { + return Pos{p.file, p.offset + toPos(index(n))} +} + +// IsValid reports whether the position is valid. +func (p Pos) IsValid() bool { + return p != NoPos +} + +// IsNewline reports whether the relative information suggests this node should +// be printed on a new lien. +func (p Pos) IsNewline() bool { + return p.RelPos() >= Newline +} + +func (p Pos) WithRel(rel RelPos) Pos { + return Pos{p.file, p.offset&^relMask | int(rel)} +} + +func (p Pos) RelPos() RelPos { + return RelPos(p.offset & relMask) +} + +func (p Pos) index() index { + return index(p.offset) >> relShift +} + +func toPos(x index) int { + return (int(x) << relShift) +} + +// ----------------------------------------------------------------------------- +// File + +type index int + +// A File has a name, size, and line offset table. +type File struct { + mutex sync.RWMutex + name string // file name as provided to AddFile + base index // Pos index range for this file is [base...base+size] + size index // file size as provided to AddFile + + // lines and infos are protected by set.mutex + lines []index // lines contains the offset of the first character for each line (the first entry is always 0) + infos []lineInfo +} + +// NewFile returns a new file. +func NewFile(filename string, base, size int) *File { + if base < 0 { + base = 1 + } + return &File{sync.RWMutex{}, filename, index(base), index(size), []index{0}, nil} +} + +// Name returns the file name of file f as registered with AddFile. +func (f *File) Name() string { + return f.name +} + +// Base returns the base offset of file f as registered with AddFile. +func (f *File) Base() int { + return int(f.base) +} + +// Size returns the size of file f as registered with AddFile. +func (f *File) Size() int { + return int(f.size) +} + +// LineCount returns the number of lines in file f. +func (f *File) LineCount() int { + f.mutex.RLock() + n := len(f.lines) + f.mutex.RUnlock() + return n +} + +// AddLine adds the line offset for a new line. +// The line offset must be larger than the offset for the previous line +// and smaller than the file size; otherwise the line offset is ignored. +// +func (f *File) AddLine(offset int) { + x := index(offset) + f.mutex.Lock() + if i := len(f.lines); (i == 0 || f.lines[i-1] < x) && x < f.size { + f.lines = append(f.lines, x) + } + f.mutex.Unlock() +} + +// MergeLine merges a line with the following line. It is akin to replacing +// the newline character at the end of the line with a space (to not change the +// remaining offsets). To obtain the line number, consult e.g. Position.Line. +// MergeLine will panic if given an invalid line number. +// +func (f *File) MergeLine(line int) { + if line <= 0 { + panic("illegal line number (line numbering starts at 1)") + } + f.mutex.Lock() + defer f.mutex.Unlock() + if line >= len(f.lines) { + panic("illegal line number") + } + // To merge the line numbered with the line numbered , + // we need to remove the entry in lines corresponding to the line + // numbered . The entry in lines corresponding to the line + // numbered is located at index , since indices in lines + // are 0-based and line numbers are 1-based. + copy(f.lines[line:], f.lines[line+1:]) + f.lines = f.lines[:len(f.lines)-1] +} + +// SetLines sets the line offsets for a file and reports whether it succeeded. +// The line offsets are the offsets of the first character of each line; +// for instance for the content "ab\nc\n" the line offsets are {0, 3}. +// An empty file has an empty line offset table. +// Each line offset must be larger than the offset for the previous line +// and smaller than the file size; otherwise SetLines fails and returns +// false. +// Callers must not mutate the provided slice after SetLines returns. +// +func (f *File) SetLines(lines []int) bool { + // verify validity of lines table + size := f.size + for i, offset := range lines { + if i > 0 && offset <= lines[i-1] || size <= index(offset) { + return false + } + } + + // set lines table + f.mutex.Lock() + f.lines = f.lines[:0] + for _, l := range lines { + f.lines = append(f.lines, index(l)) + } + f.mutex.Unlock() + return true +} + +// SetLinesForContent sets the line offsets for the given file content. +// It ignores position-altering //line comments. +func (f *File) SetLinesForContent(content []byte) { + var lines []index + line := index(0) + for offset, b := range content { + if line >= 0 { + lines = append(lines, line) + } + line = -1 + if b == '\n' { + line = index(offset) + 1 + } + } + + // set lines table + f.mutex.Lock() + f.lines = lines + f.mutex.Unlock() +} + +// A lineInfo object describes alternative file and line number +// information (such as provided via a //line comment in a .go +// file) for a given file offset. +type lineInfo struct { + // fields are exported to make them accessible to gob + Offset int + Filename string + Line int +} + +// AddLineInfo adds alternative file and line number information for +// a given file offset. The offset must be larger than the offset for +// the previously added alternative line info and smaller than the +// file size; otherwise the information is ignored. +// +// AddLineInfo is typically used to register alternative position +// information for //line filename:line comments in source files. +// +func (f *File) AddLineInfo(offset int, filename string, line int) { + x := index(offset) + f.mutex.Lock() + if i := len(f.infos); i == 0 || index(f.infos[i-1].Offset) < x && x < f.size { + f.infos = append(f.infos, lineInfo{offset, filename, line}) + } + f.mutex.Unlock() +} + +// Pos returns the Pos value for the given file offset; +// the offset must be <= f.Size(). +// f.Pos(f.Offset(p)) == p. +// +func (f *File) Pos(offset int, rel RelPos) Pos { + if index(offset) > f.size { + panic("illegal file offset") + } + return Pos{f, toPos(f.base+index(offset)) + int(rel)} +} + +// Offset returns the offset for the given file position p; +// p must be a valid Pos value in that file. +// f.Offset(f.Pos(offset)) == offset. +// +func (f *File) Offset(p Pos) int { + x := p.index() + if x < f.base || x > f.base+index(f.size) { + panic("illegal Pos value") + } + return int(x - f.base) +} + +// Line returns the line number for the given file position p; +// p must be a Pos value in that file or NoPos. +// +func (f *File) Line(p Pos) int { + return f.Position(p).Line +} + +func searchLineInfos(a []lineInfo, x int) int { + return sort.Search(len(a), func(i int) bool { return a[i].Offset > x }) - 1 +} + +// unpack returns the filename and line and column number for a file offset. +// If adjusted is set, unpack will return the filename and line information +// possibly adjusted by //line comments; otherwise those comments are ignored. +// +func (f *File) unpack(offset index, adjusted bool) (filename string, line, column int) { + filename = f.name + if i := searchInts(f.lines, offset); i >= 0 { + line, column = int(i+1), int(offset-f.lines[i]+1) + } + if adjusted && len(f.infos) > 0 { + // almost no files have extra line infos + if i := searchLineInfos(f.infos, int(offset)); i >= 0 { + alt := &f.infos[i] + filename = alt.Filename + if i := searchInts(f.lines, index(alt.Offset)); i >= 0 { + line += alt.Line - i - 1 + } + } + } + return +} + +func (f *File) position(p Pos, adjusted bool) (pos Position) { + offset := p.index() - f.base + pos.Offset = int(offset) + pos.Filename, pos.Line, pos.Column = f.unpack(offset, adjusted) + return +} + +// PositionFor returns the Position value for the given file position p. +// If adjusted is set, the position may be adjusted by position-altering +// //line comments; otherwise those comments are ignored. +// p must be a Pos value in f or NoPos. +// +func (f *File) PositionFor(p Pos, adjusted bool) (pos Position) { + x := p.index() + if p != NoPos { + if x < f.base || x > f.base+f.size { + panic("illegal Pos value") + } + pos = f.position(p, adjusted) + } + return +} + +// Position returns the Position value for the given file position p. +// Calling f.Position(p) is equivalent to calling f.PositionFor(p, true). +// +func (f *File) Position(p Pos) (pos Position) { + return f.PositionFor(p, true) +} + +// ----------------------------------------------------------------------------- +// Helper functions + +func searchInts(a []index, x index) int { + // This function body is a manually inlined version of: + // + // return sort.Search(len(a), func(i int) bool { return a[i] > x }) - 1 + // + // With better compiler optimizations, this may not be needed in the + // future, but at the moment this change improves the go/printer + // benchmark performance by ~30%. This has a direct impact on the + // speed of gofmt and thus seems worthwhile (2011-04-29). + // TODO(gri): Remove this when compilers have caught up. + i, j := 0, len(a) + for i < j { + h := i + (j-i)/2 // avoid overflow when computing h + // i ≤ h < j + if a[h] <= x { + i = h + 1 + } else { + j = h + } + } + return i - 1 +} diff --git a/vendor/cuelang.org/go/cue/token/token.go b/vendor/cuelang.org/go/cue/token/token.go new file mode 100644 index 000000000..5e1544344 --- /dev/null +++ b/vendor/cuelang.org/go/cue/token/token.go @@ -0,0 +1,266 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package token defines constants representing the lexical tokens of the Go +// programming language and basic operations on tokens (printing, predicates). +package token // import "cuelang.org/go/cue/token" + +import "strconv" + +// Token is the set of lexical tokens of the CUE configuration language. +type Token int + +// The list of tokens. +const ( + // Special tokens + ILLEGAL Token = iota + EOF + COMMENT + ATTRIBUTE // @foo(bar,baz=4) + + literalBeg + // Identifiers and basic type literals + // (these tokens stand for classes of literals) + IDENT // main, _tmp + INT // 12_345Mi, 0700, 0xdeadbeef, 1.2M + FLOAT // 123.45, + // DURATION // 3m4s TODO + STRING // "abc" + INTERPOLATION // a part of a template string, e.g. `"age: \(` + BOTTOM // _|_ + + literalEnd + + operatorBeg + // Operators and delimiters + ADD // + + SUB // - + MUL // * + POW // ^ + QUO // / + + IQUO // quo + IREM // rem + IDIV // div + IMOD // mod + + AND // & + OR // | + + LAND // && + LOR // || + + BIND // = + EQL // == + LSS // < + GTR // > + NOT // ! + ARROW // <- + + NEQ // != + LEQ // <= + GEQ // >= + + MAT // =~ + NMAT // !~ + + LPAREN // ( + LBRACK // [ + LBRACE // { + COMMA // , + PERIOD // . + ELLIPSIS // ... + + RPAREN // ) + RBRACK // ] + RBRACE // } + SEMICOLON // ; + COLON // : + ISA // :: + OPTION // ? + operatorEnd + + keywordBeg + + IF + FOR + IN + LET + + TRUE + FALSE + NULL + + keywordEnd +) + +var tokens = [...]string{ + ILLEGAL: "ILLEGAL", + + EOF: "EOF", + COMMENT: "COMMENT", + + IDENT: "IDENT", + INT: "INT", + FLOAT: "FLOAT", + STRING: "STRING", + INTERPOLATION: "INTERPOLATION", + ATTRIBUTE: "ATTRIBUTE", + + ADD: "+", + SUB: "-", + MUL: "*", + POW: "^", + QUO: "/", + + IQUO: "quo", + IREM: "rem", + IDIV: "div", + IMOD: "mod", + + AND: "&", + OR: "|", + + LAND: "&&", + LOR: "||", + + BIND: "=", + EQL: "==", + LSS: "<", + GTR: ">", + NOT: "!", + ARROW: "<-", + + NEQ: "!=", + LEQ: "<=", + GEQ: ">=", + + MAT: "=~", + NMAT: "!~", + + LPAREN: "(", + LBRACK: "[", + LBRACE: "{", + COMMA: ",", + PERIOD: ".", + ELLIPSIS: "...", + + RPAREN: ")", + RBRACK: "]", + RBRACE: "}", + SEMICOLON: ";", + COLON: ":", + ISA: "::", + OPTION: "?", + + BOTTOM: "_|_", + + FALSE: "false", + TRUE: "true", + NULL: "null", + + FOR: "for", + IF: "if", + IN: "in", + LET: "let", +} + +// String returns the string corresponding to the token tok. +// For operators, delimiters, and keywords the string is the actual +// token character sequence (e.g., for the token ADD, the string is +// "+"). For all other tokens the string corresponds to the token +// constant name (e.g. for the token IDENT, the string is "IDENT"). +func (tok Token) String() string { + s := "" + if 0 <= tok && tok < Token(len(tokens)) { + s = tokens[tok] + } + if s == "" { + s = "token(" + strconv.Itoa(int(tok)) + ")" + } + return s +} + +// A set of constants for precedence-based expression parsing. +// Non-operators have lowest precedence, followed by operators +// starting with precedence 1 up to unary operators. The highest +// precedence serves as "catch-all" precedence for selector, +// indexing, and other operator and delimiter tokens. +const ( + LowestPrec = lowestPrec + UnaryPrec = unaryPrec + HighestPrec = highestPrec +) + +const ( + lowestPrec = 0 // non-operators + unaryPrec = 8 + highestPrec = 9 +) + +// Precedence returns the operator precedence of the binary +// operator op. If op is not a binary operator, the result +// is LowestPrecedence. +// +func (tok Token) Precedence() int { + switch tok { + case OR: + return 1 + case AND: + return 2 + case LOR: + return 3 + case LAND: + return 4 + case EQL, NEQ, LSS, LEQ, GTR, GEQ, MAT, NMAT: + return 5 + case ADD, SUB: + return 6 + case MUL, QUO, IDIV, IMOD, IQUO, IREM: + return 7 + } + return lowestPrec +} + +var keywords map[string]Token + +func init() { + keywords = make(map[string]Token) + for i := keywordBeg + 1; i < keywordEnd; i++ { + keywords[tokens[i]] = i + } +} + +// Lookup maps an identifier to its keyword token or IDENT (if not a keyword). +// +func Lookup(ident string) Token { + if tok, isKeyword := keywords[ident]; isKeyword { + return tok + } + return IDENT +} + +// Predicates + +// IsLiteral returns true for tokens corresponding to identifiers +// and basic type literals; it returns false otherwise. +func (tok Token) IsLiteral() bool { return literalBeg < tok && tok < literalEnd } + +// IsOperator returns true for tokens corresponding to operators and +// delimiters; it returns false otherwise. +func (tok Token) IsOperator() bool { return operatorBeg < tok && tok < operatorEnd } + +// IsKeyword returns true for tokens corresponding to keywords; +// it returns false otherwise. +func (tok Token) IsKeyword() bool { return keywordBeg < tok && tok < keywordEnd } diff --git a/vendor/cuelang.org/go/cue/types.go b/vendor/cuelang.org/go/cue/types.go new file mode 100644 index 000000000..b8bec8097 --- /dev/null +++ b/vendor/cuelang.org/go/cue/types.go @@ -0,0 +1,2423 @@ +// Copyright 2018 The CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cue + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "math" + "math/big" + "strconv" + "strings" + + "github.com/cockroachdb/apd/v2" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + "cuelang.org/go/internal/core/adt" + "cuelang.org/go/internal/core/convert" + "cuelang.org/go/internal/core/eval" + "cuelang.org/go/internal/core/export" + "cuelang.org/go/internal/core/runtime" + "cuelang.org/go/internal/core/subsume" + "cuelang.org/go/internal/core/validate" +) + +// Kind determines the underlying type of a Value. +type Kind = adt.Kind + +const BottomKind Kind = 0 + +const ( + // NullKind indicates a null value. + NullKind Kind = adt.NullKind + + // BoolKind indicates a boolean value. + BoolKind = adt.BoolKind + + // IntKind represents an integral number. + IntKind = adt.IntKind + + // FloatKind represents a decimal float point number that cannot be + // converted to an integer. The underlying number may still be integral, + // but resulting from an operation that enforces the float type. + FloatKind = adt.FloatKind + + // StringKind indicates any kind of string. + StringKind = adt.StringKind + + // BytesKind is a blob of data. + BytesKind = adt.BytesKind + + // StructKind is a kev-value map. + StructKind = adt.StructKind + + // ListKind indicates a list of values. + ListKind = adt.ListKind + + // _numberKind is used as a implementation detail inside + // Kind.String to indicate NumberKind. + + // NumberKind represents any kind of number. + NumberKind = IntKind | FloatKind + + TopKind = adt.TopKind +) + +// An structValue represents a JSON object. +// +// TODO: remove +type structValue struct { + ctx *context + v Value + obj *adt.Vertex + features []adt.Feature +} + +// Len reports the number of fields in this struct. +func (o *structValue) Len() int { + if o.obj == nil { + return 0 + } + return len(o.features) +} + +// At reports the key and value of the ith field, i < o.Len(). +func (o *structValue) At(i int) (key string, v Value) { + f := o.features[i] + return o.ctx.LabelStr(f), newChildValue(o, i) +} + +func (o *structValue) at(i int) (v *adt.Vertex, isOpt bool) { + f := o.features[i] + arc := o.obj.Lookup(f) + if arc == nil { + arc = &adt.Vertex{ + Parent: o.v.v, + Label: f, + } + o.obj.MatchAndInsert(o.ctx.opCtx, arc) + arc.Finalize(o.ctx.opCtx) + isOpt = true + } + return arc, isOpt +} + +// Lookup reports the field for the given key. The returned Value is invalid +// if it does not exist. +func (o *structValue) Lookup(key string) Value { + f := o.ctx.StrLabel(key) + i := 0 + len := o.Len() + for ; i < len; i++ { + if o.features[i] == f { + break + } + } + if i == len { + // TODO: better message. + ctx := o.ctx + x := ctx.mkErr(o.obj, codeNotExist, "value %q not found", key) + return newErrValue(o.v, x) + } + return newChildValue(o, i) +} + +// MarshalJSON returns a valid JSON encoding or reports an error if any of the +// fields is invalid. +func (o *structValue) marshalJSON() (b []byte, err errors.Error) { + b = append(b, '{') + n := o.Len() + for i := 0; i < n; i++ { + k, v := o.At(i) + s, err := json.Marshal(k) + if err != nil { + return nil, unwrapJSONError(err) + } + b = append(b, s...) + b = append(b, ':') + bb, err := json.Marshal(v) + if err != nil { + return nil, unwrapJSONError(err) + } + b = append(b, bb...) + if i < n-1 { + b = append(b, ',') + } + } + b = append(b, '}') + return b, nil +} + +var _ errors.Error = &marshalError{} + +type marshalError struct { + err errors.Error + b *adt.Bottom +} + +func toMarshalErr(v Value, b *adt.Bottom) error { + return &marshalError{v.toErr(b), b} +} + +func marshalErrf(v Value, src adt.Node, code errCode, msg string, args ...interface{}) error { + arguments := append([]interface{}{code, msg}, args...) + b := v.idx.mkErr(src, arguments...) + return toMarshalErr(v, b) +} + +func (e *marshalError) Error() string { + return fmt.Sprintf("cue: marshal error: %v", e.err) +} + +func (e *marshalError) Bottom() *adt.Bottom { return e.b } +func (e *marshalError) Path() []string { return e.err.Path() } +func (e *marshalError) Msg() (string, []interface{}) { return e.err.Msg() } +func (e *marshalError) Position() token.Pos { return e.err.Position() } +func (e *marshalError) InputPositions() []token.Pos { + return e.err.InputPositions() +} + +func unwrapJSONError(err error) errors.Error { + switch x := err.(type) { + case *json.MarshalerError: + return unwrapJSONError(x.Err) + case *marshalError: + return x + case errors.Error: + return &marshalError{x, nil} + default: + return &marshalError{errors.Wrapf(err, token.NoPos, "json error"), nil} + } +} + +// An Iterator iterates over values. +// +type Iterator struct { + val Value + ctx *context + arcs []field + p int + cur Value + f adt.Feature + isOpt bool +} + +type field struct { + arc *adt.Vertex + isOptional bool +} + +// Next advances the iterator to the next value and reports whether there was +// any. It must be called before the first call to Value or Key. +func (i *Iterator) Next() bool { + if i.p >= len(i.arcs) { + i.cur = Value{} + return false + } + f := i.arcs[i.p] + f.arc.Finalize(i.ctx.opCtx) + i.cur = makeValue(i.val.idx, f.arc) + i.f = f.arc.Label + i.isOpt = f.isOptional + i.p++ + return true +} + +// Value returns the current value in the list. It will panic if Next advanced +// past the last entry. +func (i *Iterator) Value() Value { + return i.cur +} + +func (i *Iterator) Feature() adt.Feature { + return i.f +} + +// Label reports the label of the value if i iterates over struct fields and +// "" otherwise. +func (i *Iterator) Label() string { + if i.f == 0 { + return "" + } + return i.ctx.LabelStr(i.f) +} + +// IsHidden reports if a field is hidden from the data model. +func (i *Iterator) IsHidden() bool { + return i.f.IsHidden() +} + +// IsOptional reports if a field is optional. +func (i *Iterator) IsOptional() bool { + return i.isOpt +} + +// IsDefinition reports if a field is a definition. +func (i *Iterator) IsDefinition() bool { + return i.f.IsDef() +} + +// marshalJSON iterates over the list and generates JSON output. HasNext +// will return false after this operation. +func marshalList(l *Iterator) (b []byte, err errors.Error) { + b = append(b, '[') + if l.Next() { + for i := 0; ; i++ { + x, err := json.Marshal(l.Value()) + if err != nil { + return nil, unwrapJSONError(err) + } + b = append(b, x...) + if !l.Next() { + break + } + b = append(b, ',') + } + } + b = append(b, ']') + return b, nil +} + +func (v Value) getNum(k adt.Kind) (*adt.Num, errors.Error) { + v, _ = v.Default() + ctx := v.ctx() + if err := v.checkKind(ctx, k); err != nil { + return nil, v.toErr(err) + } + n, _ := v.eval(ctx).(*adt.Num) + return n, nil +} + +// MantExp breaks x into its mantissa and exponent components and returns the +// exponent. If a non-nil mant argument is provided its value is set to the +// mantissa of x. The components satisfy x == mant × 10**exp. It returns an +// error if v is not a number. +// +// The components are not normalized. For instance, 2.00 is represented mant == +// 200 and exp == -2. Calling MantExp with a nil argument is an efficient way to +// get the exponent of the receiver. +func (v Value) MantExp(mant *big.Int) (exp int, err error) { + n, err := v.getNum(adt.NumKind) + if err != nil { + return 0, err + } + if n.X.Form != 0 { + return 0, ErrInfinite + } + if mant != nil { + mant.Set(&n.X.Coeff) + if n.X.Negative { + mant.Neg(mant) + } + } + return int(n.X.Exponent), nil +} + +// Decimal is for internal use only. The Decimal type that is returned is +// subject to change. +func (v Value) Decimal() (d *internal.Decimal, err error) { + n, err := v.getNum(adt.NumKind) + if err != nil { + return nil, err + } + return &n.X, nil +} + +// AppendInt appends the string representation of x in the given base to buf and +// returns the extended buffer, or an error if the underlying number was not +// an integer. +func (v Value) AppendInt(buf []byte, base int) ([]byte, error) { + i, err := v.Int(nil) + if err != nil { + return nil, err + } + return i.Append(buf, base), nil +} + +// AppendFloat appends to buf the string form of the floating-point number x. +// It returns an error if v is not a number. +func (v Value) AppendFloat(buf []byte, fmt byte, prec int) ([]byte, error) { + n, err := v.getNum(adt.NumKind) + if err != nil { + return nil, err + } + ctx := apd.BaseContext + nd := int(apd.NumDigits(&n.X.Coeff)) + int(n.X.Exponent) + if n.X.Form == apd.Infinite { + if n.X.Negative { + buf = append(buf, '-') + } + return append(buf, string('∞')...), nil + } + if fmt == 'f' && nd > 0 { + ctx.Precision = uint32(nd + prec) + } else { + ctx.Precision = uint32(prec) + } + var d apd.Decimal + ctx.Round(&d, &n.X) + return d.Append(buf, fmt), nil +} + +var ( + // ErrBelow indicates that a value was rounded down in a conversion. + ErrBelow = errors.New("value was rounded down") + + // ErrAbove indicates that a value was rounded up in a conversion. + ErrAbove = errors.New("value was rounded up") + + // ErrInfinite indicates that a value is infinite. + ErrInfinite = errors.New("infinite") +) + +// Int converts the underlying integral number to an big.Int. It reports an +// error if the underlying value is not an integer type. If a non-nil *Int +// argument z is provided, Int stores the result in z instead of allocating a +// new Int. +func (v Value) Int(z *big.Int) (*big.Int, error) { + n, err := v.getNum(adt.IntKind) + if err != nil { + return nil, err + } + if z == nil { + z = &big.Int{} + } + if n.X.Exponent != 0 { + panic("cue: exponent should always be nil for integer types") + } + z.Set(&n.X.Coeff) + if n.X.Negative { + z.Neg(z) + } + return z, nil +} + +// Int64 converts the underlying integral number to int64. It reports an +// error if the underlying value is not an integer type or cannot be represented +// as an int64. The result is (math.MinInt64, ErrAbove) for x < math.MinInt64, +// and (math.MaxInt64, ErrBelow) for x > math.MaxInt64. +func (v Value) Int64() (int64, error) { + n, err := v.getNum(adt.IntKind) + if err != nil { + return 0, err + } + if !n.X.Coeff.IsInt64() { + if n.X.Negative { + return math.MinInt64, ErrAbove + } + return math.MaxInt64, ErrBelow + } + i := n.X.Coeff.Int64() + if n.X.Negative { + i = -i + } + return i, nil +} + +// Uint64 converts the underlying integral number to uint64. It reports an +// error if the underlying value is not an integer type or cannot be represented +// as a uint64. The result is (0, ErrAbove) for x < 0, and +// (math.MaxUint64, ErrBelow) for x > math.MaxUint64. +func (v Value) Uint64() (uint64, error) { + n, err := v.getNum(adt.IntKind) + if err != nil { + return 0, err + } + if n.X.Negative { + return 0, ErrAbove + } + if !n.X.Coeff.IsUint64() { + return math.MaxUint64, ErrBelow + } + i := n.X.Coeff.Uint64() + return i, nil +} + +// trimZeros trims 0's for better JSON respresentations. +func trimZeros(s string) string { + n1 := len(s) + s2 := strings.TrimRight(s, "0") + n2 := len(s2) + if p := strings.IndexByte(s2, '.'); p != -1 { + if p == n2-1 { + return s[:len(s2)+1] + } + return s2 + } + if n1-n2 <= 4 { + return s + } + return fmt.Sprint(s2, "e+", n1-n2) +} + +var ( + smallestPosFloat64 *apd.Decimal + smallestNegFloat64 *apd.Decimal + maxPosFloat64 *apd.Decimal + maxNegFloat64 *apd.Decimal +) + +func init() { + const ( + // math.SmallestNonzeroFloat64: 1 / 2**(1023 - 1 + 52) + smallest = "4.940656458412465441765687928682213723651e-324" + // math.MaxFloat64: 2**1023 * (2**53 - 1) / 2**52 + max = "1.797693134862315708145274237317043567981e+308" + ) + ctx := apd.BaseContext + ctx.Precision = 40 + + var err error + smallestPosFloat64, _, err = ctx.NewFromString(smallest) + if err != nil { + panic(err) + } + smallestNegFloat64, _, err = ctx.NewFromString("-" + smallest) + if err != nil { + panic(err) + } + maxPosFloat64, _, err = ctx.NewFromString(max) + if err != nil { + panic(err) + } + maxNegFloat64, _, err = ctx.NewFromString("-" + max) + if err != nil { + panic(err) + } +} + +// Float64 returns the float64 value nearest to x. It reports an error if v is +// not a number. If x is too small to be represented by a float64 (|x| < +// math.SmallestNonzeroFloat64), the result is (0, ErrBelow) or (-0, ErrAbove), +// respectively, depending on the sign of x. If x is too large to be represented +// by a float64 (|x| > math.MaxFloat64), the result is (+Inf, ErrAbove) or +// (-Inf, ErrBelow), depending on the sign of x. +func (v Value) Float64() (float64, error) { + n, err := v.getNum(adt.NumKind) + if err != nil { + return 0, err + } + if n.X.Negative { + if n.X.Cmp(smallestNegFloat64) == 1 { + return -0, ErrAbove + } + if n.X.Cmp(maxNegFloat64) == -1 { + return math.Inf(-1), ErrBelow + } + } else { + if n.X.Cmp(smallestPosFloat64) == -1 { + return 0, ErrBelow + } + if n.X.Cmp(maxPosFloat64) == 1 { + return math.Inf(1), ErrAbove + } + } + f, _ := n.X.Float64() + return f, nil +} + +func (v Value) appendPath(a []string) []string { + for _, f := range v.v.Path() { + switch f.Typ() { + case adt.IntLabel: + a = append(a, strconv.FormatInt(int64(f.Index()), 10)) + + case adt.StringLabel: + label := v.idx.LabelStr(f) + if !f.IsDef() && !f.IsHidden() { + if !ast.IsValidIdent(label) { + label = literal.String.Quote(label) + } + } + a = append(a, label) + default: + a = append(a, f.SelectorString(v.idx.Runtime)) + } + } + return a +} + +// Value holds any value, which may be a Boolean, Error, List, Null, Number, +// Struct, or String. +type Value struct { + idx *index + v *adt.Vertex +} + +func newErrValue(v Value, b *adt.Bottom) Value { + node := &adt.Vertex{BaseValue: b} + if v.v != nil { + node.Label = v.v.Label + node.Parent = v.v.Parent + } + node.UpdateStatus(adt.Finalized) + node.AddConjunct(adt.MakeRootConjunct(nil, b)) + return makeValue(v.idx, node) +} + +func newVertexRoot(ctx *context, x *adt.Vertex) Value { + if ctx.opCtx != nil { + // This is indicative of an zero Value. In some cases this is called + // with an error value. + x.Finalize(ctx.opCtx) + } else { + x.UpdateStatus(adt.Finalized) + } + return makeValue(ctx.index, x) +} + +func newValueRoot(ctx *context, x adt.Expr) Value { + if n, ok := x.(*adt.Vertex); ok { + return newVertexRoot(ctx, n) + } + node := &adt.Vertex{} + node.AddConjunct(adt.MakeRootConjunct(nil, x)) + return newVertexRoot(ctx, node) +} + +func newChildValue(o *structValue, i int) Value { + arc, _ := o.at(i) + return makeValue(o.v.idx, arc) +} + +// Dereference reports the value v refers to if v is a reference or v itself +// otherwise. +func Dereference(v Value) Value { + n := v.v + if n == nil || len(n.Conjuncts) != 1 { + return v + } + + c := n.Conjuncts[0] + r, _ := c.Expr().(adt.Resolver) + if r == nil { + return v + } + + ctx := v.ctx() + n, b := ctx.opCtx.Resolve(c.Env, r) + if b != nil { + return newErrValue(v, b) + } + n.Finalize(ctx.opCtx) + return makeValue(v.idx, n) +} + +// MakeValue converts an adt.Value and given OpContext to a Value. The context +// must be directly or indirectly obtained from the NewRuntime defined in this +// package and it will panic if this is not the case. +// +// For internal use only. +func MakeValue(ctx *adt.OpContext, v adt.Value) Value { + runtime := ctx.Impl().(*runtime.Runtime) + index := runtime.Data.(*index) + + return newValueRoot(index.newContext(), v) +} + +func makeValue(idx *index, v *adt.Vertex) Value { + if v.Status() == 0 || v.BaseValue == nil { + panic(fmt.Sprintf("not properly initialized (state: %v, value: %T)", + v.Status(), v.BaseValue)) + } + return Value{idx, v} +} + +func remakeValue(base Value, env *adt.Environment, v adt.Expr) Value { + // TODO: right now this is necessary because disjunctions do not have + // populated conjuncts. + if v, ok := v.(*adt.Vertex); ok && v.Status() >= adt.Partial { + return Value{base.idx, v} + } + n := &adt.Vertex{Label: base.v.Label} + n.AddConjunct(adt.MakeRootConjunct(env, v)) + n = base.ctx().manifest(n) + n.Parent = base.v.Parent + return makeValue(base.idx, n) +} + +func remakeFinal(base Value, env *adt.Environment, v adt.Value) Value { + n := &adt.Vertex{Parent: base.v.Parent, Label: base.v.Label, BaseValue: v} + n.UpdateStatus(adt.Finalized) + return makeValue(base.idx, n) +} + +func (v Value) ctx() *context { + return v.idx.newContext() +} + +func (v Value) makeChild(ctx *context, i uint32, a *adt.Vertex) Value { + a.Parent = v.v + return makeValue(v.idx, a) +} + +// Eval resolves the references of a value and returns the result. +// This method is not necessary to obtain concrete values. +func (v Value) Eval() Value { + if v.v == nil { + return v + } + x := v.v + // x = eval.FinalizeValue(v.idx.Runtime, v.v) + // x.Finalize(v.ctx().opCtx) + x = x.ToDataSingle() + return makeValue(v.idx, x) + // return remakeValue(v, nil, ctx.value(x)) +} + +// Default reports the default value and whether it existed. It returns the +// normal value if there is no default. +func (v Value) Default() (Value, bool) { + if v.v == nil { + return v, false + } + + d := v.v.Default() + if d == v.v { + return v, false + } + return makeValue(v.idx, d), true + + // d, ok := v.v.Value.(*adt.Disjunction) + // if !ok { + // return v, false + // } + + // var w *adt.Vertex + + // switch d.NumDefaults { + // case 0: + // return v, false + + // case 1: + // w = d.Values[0] + + // default: + // x := *v.v + // x.Value = &adt.Disjunction{ + // Src: d.Src, + // Values: d.Values[:d.NumDefaults], + // NumDefaults: 0, + // } + // w = &x + // } + + // w.Conjuncts = nil + // for _, c := range v.v.Conjuncts { + // // TODO: preserve field information. + // expr, _ := stripNonDefaults(c.Expr()) + // w.AddConjunct(adt.MakeConjunct(c.Env, expr)) + // } + + // return makeValue(v.idx, w), true + + // if !stripped { + // return v, false + // } + + // n := *v.v + // n.Conjuncts = conjuncts + // return Value{v.idx, &n}, true + + // isDefault := false + // for _, c := range v.v.Conjuncts { + // if hasDisjunction(c.Expr()) { + // isDefault = true + // break + // } + // } + + // if !isDefault { + // return v, false + // } + + // TODO: record expanded disjunctions in output. + // - Rename Disjunction to DisjunctionExpr + // - Introduce Disjuncts with Values. + // - In Expr introduce Star + // - Don't pick default by default? + + // Evaluate the value. + // x := eval.FinalizeValue(v.idx.Runtime, v.v) + // if b, _ := x.Value.(*adt.Bottom); b != nil { // && b.IsIncomplete() { + // return v, false + // } + // // Finalize and return here. + // return Value{v.idx, x}, isDefault +} + +// TODO: this should go: record preexpanded disjunctions in Vertex. +func hasDisjunction(expr adt.Expr) bool { + switch x := expr.(type) { + case *adt.DisjunctionExpr: + return true + case *adt.Conjunction: + for _, v := range x.Values { + if hasDisjunction(v) { + return true + } + } + case *adt.BinaryExpr: + switch x.Op { + case adt.OrOp: + return true + case adt.AndOp: + return hasDisjunction(x.X) || hasDisjunction(x.Y) + } + } + return false +} + +// TODO: this should go: record preexpanded disjunctions in Vertex. +func stripNonDefaults(expr adt.Expr) (r adt.Expr, stripped bool) { + switch x := expr.(type) { + case *adt.DisjunctionExpr: + if !x.HasDefaults { + return x, false + } + d := *x + d.Values = []adt.Disjunct{} + for _, v := range x.Values { + if v.Default { + d.Values = append(d.Values, v) + } + } + if len(d.Values) == 1 { + return d.Values[0].Val, true + } + return &d, true + + case *adt.BinaryExpr: + if x.Op != adt.AndOp { + return x, false + } + a, sa := stripNonDefaults(x.X) + b, sb := stripNonDefaults(x.Y) + if sa || sb { + bin := *x + bin.X = a + bin.Y = b + return &bin, true + } + return x, false + + default: + return x, false + } +} + +// Label reports he label used to obtain this value from the enclosing struct. +// +// TODO: get rid of this somehow. Probably by including a FieldInfo struct +// or the like. +func (v Value) Label() (string, bool) { + if v.v == nil || v.v.Label == 0 { + return "", false + } + return v.idx.LabelStr(v.v.Label), true +} + +// Kind returns the kind of value. It returns BottomKind for atomic values that +// are not concrete. For instance, it will return BottomKind for the bounds +// >=0. +func (v Value) Kind() Kind { + if v.v == nil { + return BottomKind + } + c := v.v.BaseValue + if !v.v.IsConcrete() { + return BottomKind + } + if v.IncompleteKind() == adt.ListKind && !v.IsClosed() { + return BottomKind + } + return c.Kind() +} + +// IncompleteKind returns a mask of all kinds that this value may be. +func (v Value) IncompleteKind() Kind { + if v.v == nil { + return BottomKind + } + return v.v.Kind() +} + +// MarshalJSON marshalls this value into valid JSON. +func (v Value) MarshalJSON() (b []byte, err error) { + b, err = v.marshalJSON() + if err != nil { + return nil, unwrapJSONError(err) + } + return b, nil +} + +func (v Value) marshalJSON() (b []byte, err error) { + v, _ = v.Default() + if v.v == nil { + return json.Marshal(nil) + } + ctx := v.idx.newContext() + x := v.eval(ctx) + + if _, ok := x.(adt.Resolver); ok { + return nil, marshalErrf(v, x, codeIncomplete, "value %q contains unresolved references", ctx.str(x)) + } + if !adt.IsConcrete(x) { + return nil, marshalErrf(v, x, codeIncomplete, "cannot convert incomplete value %q to JSON", ctx.str(x)) + } + + // TODO: implement marshalles in value. + switch k := x.Kind(); k { + case adt.NullKind: + return json.Marshal(nil) + case adt.BoolKind: + return json.Marshal(x.(*adt.Bool).B) + case adt.IntKind, adt.FloatKind, adt.NumKind: + b, err := x.(*adt.Num).X.MarshalText() + b = bytes.TrimLeft(b, "+") + return b, err + case adt.StringKind: + return json.Marshal(x.(*adt.String).Str) + case adt.BytesKind: + return json.Marshal(x.(*adt.Bytes).B) + case adt.ListKind: + i, _ := v.List() + return marshalList(&i) + case adt.StructKind: + obj, err := v.structValData(ctx) + if err != nil { + return nil, toMarshalErr(v, err) + } + return obj.marshalJSON() + case adt.BottomKind: + return nil, toMarshalErr(v, x.(*adt.Bottom)) + default: + return nil, marshalErrf(v, x, 0, "cannot convert value %q of type %T to JSON", ctx.str(x), x) + } +} + +// Syntax converts the possibly partially evaluated value into syntax. This +// can use used to print the value with package format. +func (v Value) Syntax(opts ...Option) ast.Node { + // TODO: the default should ideally be simplified representation that + // exactly represents the value. The latter can currently only be + // ensured with Raw(). + if v.v == nil { + return nil + } + var o options = getOptions(opts) + // var inst *Instance + + p := export.Profile{ + Simplify: !o.raw, + TakeDefaults: o.final, + ShowOptional: !o.omitOptional && !o.concrete, + ShowDefinitions: !o.omitDefinitions && !o.concrete, + ShowHidden: !o.omitHidden && !o.concrete, + ShowAttributes: !o.omitAttrs, + ShowDocs: o.docs, + } + + pkgID := v.instance().ID() + + bad := func(name string, err error) ast.Node { + const format = `"%s: internal error +Error: %s + +Profile: +%#v + +Value: +%v + +You could file a bug with the above information at: + https://github.com/cuelang/cue/issues/new?assignees=&labels=NeedsInvestigation&template=bug_report.md&title=. +` + cg := &ast.CommentGroup{Doc: true} + msg := fmt.Sprintf(format, name, err, p, v) + for _, line := range strings.Split(msg, "\n") { + cg.List = append(cg.List, &ast.Comment{Text: "// " + line}) + } + x := &ast.BadExpr{} + ast.AddComment(x, cg) + return x + } + + // var expr ast.Expr + var err error + var f *ast.File + if o.concrete || o.final { + // inst = v.instance() + var expr ast.Expr + expr, err = p.Value(v.idx.Runtime, pkgID, v.v) + if err != nil { + return bad(`"cuelang.org/go/internal/core/export".Value`, err) + } + + // This introduces gratuitous unshadowing! + f, err = astutil.ToFile(expr) + if err != nil { + return bad(`"cuelang.org/go/ast/astutil".ToFile`, err) + } + // return expr + } else { + f, err = p.Def(v.idx.Runtime, pkgID, v.v) + if err != nil { + return bad(`"cuelang.org/go/internal/core/export".Def`, err) + } + } + +outer: + for _, d := range f.Decls { + switch d.(type) { + case *ast.Package, *ast.ImportDecl: + return f + case *ast.CommentGroup, *ast.Attribute: + default: + break outer + } + } + + if len(f.Decls) == 1 { + if e, ok := f.Decls[0].(*ast.EmbedDecl); ok { + return e.Expr + } + } + return &ast.StructLit{ + Elts: f.Decls, + } +} + +// Decode initializes x with Value v. If x is a struct, it will validate the +// constraints specified in the field tags. +func (v Value) Decode(x interface{}) error { + // TODO: optimize + b, err := v.MarshalJSON() + if err != nil { + return err + } + return json.Unmarshal(b, x) +} + +// // EncodeJSON generates JSON for the given value. +// func (v Value) EncodeJSON(w io.Writer, v Value) error { +// return nil +// } + +// Doc returns all documentation comments associated with the field from which +// the current value originates. +func (v Value) Doc() []*ast.CommentGroup { + if v.v == nil { + return nil + } + return export.ExtractDoc(v.v) +} + +// Split returns a list of values from which v originated such that +// the unification of all these values equals v and for all returned values. +// It will also split unchecked unifications (embeddings), so unifying the +// split values may fail if actually unified. +// Source returns a non-nil value. +// +// Deprecated: use Expr. +func (v Value) Split() []Value { + if v.v == nil { + return nil + } + a := []Value{} + for _, x := range v.v.Conjuncts { + a = append(a, remakeValue(v, x.Env, x.Expr())) + } + return a +} + +// Source returns the original node for this value. The return value may not +// be a syntax.Expr. For instance, a struct kind may be represented by a +// struct literal, a field comprehension, or a file. It returns nil for +// computed nodes. Use Split to get all source values that apply to a field. +func (v Value) Source() ast.Node { + if v.v == nil { + return nil + } + if len(v.v.Conjuncts) == 1 { + return v.v.Conjuncts[0].Source() + } + return v.v.Value().Source() +} + +// Err returns the error represented by v or nil v is not an error. +func (v Value) Err() error { + if err := v.checkKind(v.ctx(), adt.BottomKind); err != nil { + return v.toErr(err) + } + return nil +} + +// Pos returns position information. +func (v Value) Pos() token.Pos { + if v.v == nil || v.Source() == nil { + return token.NoPos + } + pos := v.Source().Pos() + return pos +} + +// TODO: IsFinal: this value can never be changed. + +// IsClosed reports whether a list of struct is closed. It reports false when +// when the value is not a list or struct. +func (v Value) IsClosed() bool { + if v.v == nil { + return false + } + return v.v.IsClosed(v.ctx().opCtx) +} + +// IsConcrete reports whether the current value is a concrete scalar value +// (not relying on default values), a terminal error, a list, or a struct. +// It does not verify that values of lists or structs are concrete themselves. +// To check whether there is a concrete default, use v.Default().IsConcrete(). +func (v Value) IsConcrete() bool { + if v.v == nil { + return false // any is neither concrete, not a list or struct. + } + if b, ok := v.v.BaseValue.(*adt.Bottom); ok { + return !b.IsIncomplete() + } + if !adt.IsConcrete(v.v) { + return false + } + if v.IncompleteKind() == adt.ListKind && !v.IsClosed() { + return false + } + return true +} + +// // Deprecated: IsIncomplete +// // +// // It indicates that the value cannot be fully evaluated due to +// // insufficient information. +// func (v Value) IsIncomplete() bool { +// panic("deprecated") +// } + +// Exists reports whether this value existed in the configuration. +func (v Value) Exists() bool { + if v.v == nil { + return false + } + if err, ok := v.v.BaseValue.(*adt.Bottom); ok { + return err.Code != codeNotExist + } + return true +} + +func (v Value) checkKind(ctx *context, want adt.Kind) *adt.Bottom { + if v.v == nil { + return errNotExists + } + // TODO: use checkKind + x := v.eval(ctx) + if b, ok := x.(*adt.Bottom); ok { + return b + } + k := x.Kind() + if want != adt.BottomKind { + if k&want == adt.BottomKind { + return ctx.mkErr(x, "cannot use value %v (type %s) as %s", + ctx.opCtx.Str(x), k, want) + } + if !adt.IsConcrete(x) { + return ctx.mkErr(x, codeIncomplete, "non-concrete value %v", k) + } + } + return nil +} + +func makeInt(v Value, x int64) Value { + n := &adt.Num{K: adt.IntKind} + n.X.SetInt64(int64(x)) + return remakeFinal(v, nil, n) +} + +// Len returns the number of items of the underlying value. +// For lists it reports the capacity of the list. For structs it indicates the +// number of fields, for bytes the number of bytes. +func (v Value) Len() Value { + if v.v != nil { + switch x := v.eval(v.ctx()).(type) { + case *adt.Vertex: + if x.IsList() { + ctx := v.ctx() + n := &adt.Num{K: adt.IntKind} + n.X.SetInt64(int64(len(x.Elems()))) + if x.IsClosed(ctx.opCtx) { + return remakeFinal(v, nil, n) + } + // Note: this HAS to be a Conjunction value and cannot be + // an adt.BinaryExpr, as the expressions would be considered + // to be self-contained and unresolvable when evaluated + // (can never become concrete). + c := &adt.Conjunction{Values: []adt.Value{ + &adt.BasicType{K: adt.IntKind}, + &adt.BoundValue{Op: adt.GreaterEqualOp, Value: n}, + }} + return remakeFinal(v, nil, c) + + } + case *adt.Bytes: + return makeInt(v, int64(len(x.B))) + case *adt.String: + return makeInt(v, int64(len([]rune(x.Str)))) + } + } + const msg = "len not supported for type %v" + return remakeValue(v, nil, v.ctx().mkErr(v.v, msg, v.Kind())) + +} + +// Elem returns the value of undefined element types of lists and structs. +func (v Value) Elem() (Value, bool) { + if v.v == nil { + return Value{}, false + } + ctx := v.ctx().opCtx + x := &adt.Vertex{ + Parent: v.v, + Label: 0, + } + v.v.Finalize(ctx) + v.v.MatchAndInsert(ctx, x) + if len(x.Conjuncts) == 0 { + return Value{}, false + } + x.Finalize(ctx) + return makeValue(v.idx, x), true +} + +// // BulkOptionals returns all bulk optional fields as key-value pairs. +// // See also Elem and Template. +// func (v Value) BulkOptionals() [][2]Value { +// x, ok := v.path.cache.(*structLit) +// if !ok { +// return nil +// } +// return v.appendBulk(nil, x.optionals) +// } + +// func (v Value) appendBulk(a [][2]Value, x *optionals) [][2]Value { +// if x == nil { +// return a +// } +// a = v.appendBulk(a, x.left) +// a = v.appendBulk(a, x.right) +// for _, set := range x.fields { +// if set.key != nil { +// ctx := v.ctx() +// fn, ok := ctx.manifest(set.value).(*lambdaExpr) +// if !ok { +// // create error +// continue +// } +// x := fn.call(ctx, set.value, &basicType{K: stringKind}) + +// a = append(a, [2]Value{v.makeElem(set.key), v.makeElem(x)}) +// } +// } +// return a +// } + +// List creates an iterator over the values of a list or reports an error if +// v is not a list. +func (v Value) List() (Iterator, error) { + v, _ = v.Default() + ctx := v.ctx() + if err := v.checkKind(ctx, adt.ListKind); err != nil { + return Iterator{ctx: ctx}, v.toErr(err) + } + arcs := []field{} + for _, a := range v.v.Elems() { + if a.Label.IsInt() { + arcs = append(arcs, field{arc: a}) + } + } + return Iterator{ctx: ctx, val: v, arcs: arcs}, nil +} + +// Null reports an error if v is not null. +func (v Value) Null() error { + v, _ = v.Default() + if err := v.checkKind(v.ctx(), adt.NullKind); err != nil { + return v.toErr(err) + } + return nil +} + +// // IsNull reports whether v is null. +// func (v Value) IsNull() bool { +// return v.Null() == nil +// } + +// Bool returns the bool value of v or false and an error if v is not a boolean. +func (v Value) Bool() (bool, error) { + v, _ = v.Default() + ctx := v.ctx() + if err := v.checkKind(ctx, adt.BoolKind); err != nil { + return false, v.toErr(err) + } + return v.eval(ctx).(*adt.Bool).B, nil +} + +// String returns the string value if v is a string or an error otherwise. +func (v Value) String() (string, error) { + v, _ = v.Default() + ctx := v.ctx() + if err := v.checkKind(ctx, adt.StringKind); err != nil { + return "", v.toErr(err) + } + return v.eval(ctx).(*adt.String).Str, nil +} + +// Bytes returns a byte slice if v represents a list of bytes or an error +// otherwise. +func (v Value) Bytes() ([]byte, error) { + v, _ = v.Default() + ctx := v.ctx() + switch x := v.eval(ctx).(type) { + case *adt.Bytes: + return append([]byte(nil), x.B...), nil + case *adt.String: + return []byte(x.Str), nil + } + return nil, v.toErr(v.checkKind(ctx, adt.BytesKind|adt.StringKind)) +} + +// Reader returns a new Reader if v is a string or bytes type and an error +// otherwise. +func (v Value) Reader() (io.Reader, error) { + v, _ = v.Default() + ctx := v.ctx() + switch x := v.eval(ctx).(type) { + case *adt.Bytes: + return bytes.NewReader(x.B), nil + case *adt.String: + return strings.NewReader(x.Str), nil + } + return nil, v.toErr(v.checkKind(ctx, adt.StringKind|adt.BytesKind)) +} + +// TODO: distinguish between optional, hidden, etc. Probably the best approach +// is to mark options in context and have a single function for creating +// a structVal. + +// structVal returns an structVal or an error if v is not a struct. +func (v Value) structValData(ctx *context) (structValue, *adt.Bottom) { + return v.structValOpts(ctx, options{ + omitHidden: true, + omitDefinitions: true, + omitOptional: true, + }) +} + +func (v Value) structValFull(ctx *context) (structValue, *adt.Bottom) { + return v.structValOpts(ctx, options{allowScalar: true}) +} + +// structVal returns an structVal or an error if v is not a struct. +func (v Value) structValOpts(ctx *context, o options) (s structValue, err *adt.Bottom) { + v, _ = v.Default() + + obj := v.v + + if !o.allowScalar { + obj, err = v.getStruct() + if err != nil { + return structValue{}, err + } + } + + features := export.VertexFeatures(obj) + + k := 0 + for _, f := range features { + if f.IsDef() && (o.omitDefinitions || o.concrete) { + continue + } + if f.IsHidden() && o.omitHidden { + continue + } + if arc := obj.Lookup(f); arc == nil { + if o.omitOptional { + continue + } + // ensure it really exists. + v := adt.Vertex{ + Parent: obj, + Label: f, + } + obj.MatchAndInsert(ctx.opCtx, &v) + if len(v.Conjuncts) == 0 { + continue + } + } + features[k] = f + k++ + } + features = features[:k] + return structValue{ctx, v, obj, features}, nil +} + +// Struct returns the underlying struct of a value or an error if the value +// is not a struct. +func (v Value) Struct() (*Struct, error) { + ctx := v.ctx() + obj, err := v.structValOpts(ctx, options{}) + if err != nil { + return nil, v.toErr(err) + } + return &Struct{obj}, nil +} + +func (v Value) getStruct() (*adt.Vertex, *adt.Bottom) { + ctx := v.ctx() + if err := v.checkKind(ctx, adt.StructKind); err != nil { + if !err.ChildError { + return nil, err + } + } + return v.v, nil +} + +// Struct represents a CUE struct value. +type Struct struct { + structValue +} + +// FieldInfo contains information about a struct field. +type FieldInfo struct { + Selector string + Name string // Deprecated: use Selector + Pos int + Value Value + + IsDefinition bool + IsOptional bool + IsHidden bool +} + +func (s *Struct) Len() int { + return s.structValue.Len() +} + +// field reports information about the ith field, i < o.Len(). +func (s *Struct) Field(i int) FieldInfo { + a, opt := s.at(i) + ctx := s.v.ctx() + + v := makeValue(s.v.idx, a) + name := ctx.LabelStr(a.Label) + str := a.Label.SelectorString(ctx.opCtx) + return FieldInfo{str, name, i, v, a.Label.IsDef(), opt, a.Label.IsHidden()} +} + +// FieldByName looks up a field for the given name. If isIdent is true, it will +// look up a definition or hidden field (starting with `_` or `_#`). Otherwise +// it interprets name as an arbitrary string for a regular field. +func (s *Struct) FieldByName(name string, isIdent bool) (FieldInfo, error) { + f := s.v.ctx().Label(name, isIdent) + for i, a := range s.features { + if a == f { + return s.Field(i), nil + } + } + return FieldInfo{}, errNotFound +} + +// Fields creates an iterator over the Struct's fields. +func (s *Struct) Fields(opts ...Option) *Iterator { + iter, _ := s.v.Fields(opts...) + return iter +} + +// Fields creates an iterator over v's fields if v is a struct or an error +// otherwise. +func (v Value) Fields(opts ...Option) (*Iterator, error) { + o := options{omitDefinitions: true, omitHidden: true, omitOptional: true} + o.updateOptions(opts) + ctx := v.ctx() + obj, err := v.structValOpts(ctx, o) + if err != nil { + return &Iterator{ctx: ctx}, v.toErr(err) + } + + arcs := []field{} + for i := range obj.features { + arc, isOpt := obj.at(i) + arcs = append(arcs, field{arc: arc, isOptional: isOpt}) + } + return &Iterator{ctx: ctx, val: v, arcs: arcs}, nil +} + +// Lookup reports the value at a path starting from v. The empty path returns v +// itself. Use LookupDef for definitions or LookupField for any kind of field. +// +// The Exists() method can be used to verify if the returned value existed. +// Lookup cannot be used to look up hidden or optional fields or definitions. +// +// Deprecated: use LookupPath. At some point before v1.0.0, this method will +// be removed to be reused eventually for looking up a selector. +func (v Value) Lookup(path ...string) Value { + ctx := v.ctx() + for _, k := range path { + // TODO(eval) TODO(error): always search in full data and change error + // message if a field is found but is of the incorrect type. + obj, err := v.structValData(ctx) + if err != nil { + // TODO: return a Value at the same location and a new error? + return newErrValue(v, err) + } + v = obj.Lookup(k) + } + return v +} + +// Path returns the path to this value from the root of an Instance. +// +// This is currently only defined for values that have a fixed path within +// a configuration, and thus not those that are derived from Elem, Template, +// or programmatically generated values such as those returned by Unify. +func (v Value) Path() Path { + if v.v == nil { + return Path{} + } + p := v.v.Path() + a := make([]Selector, len(p)) + for i, f := range p { + switch f.Typ() { + case adt.IntLabel: + a[i] = Selector{indexSelector(f)} + + case adt.DefinitionLabel, adt.HiddenDefinitionLabel, adt.HiddenLabel: + a[i] = Selector{definitionSelector(f.SelectorString(v.idx.Runtime))} + + case adt.StringLabel: + a[i] = Selector{stringSelector(f.StringValue(v.idx.Runtime))} + } + } + return Path{path: a} +} + +// LookupPath reports the value for path p relative to v. +func (v Value) LookupPath(p Path) Value { + n := v.v +outer: + for _, sel := range p.path { + f := sel.sel.feature(v.idx.Runtime) + for _, a := range n.Arcs { + if a.Label == f { + n = a + continue outer + } + } + var x *adt.Bottom + if err, ok := sel.sel.(pathError); ok { + x = &adt.Bottom{Err: err.Error} + } else { + // TODO: better message. + x = v.idx.mkErr(n, codeNotExist, "value %q not found", sel.sel) + } + v := makeValue(v.idx, n) + return newErrValue(v, x) + } + return makeValue(v.idx, n) +} + +// LookupDef reports the definition with the given name within struct v. The +// Exists method of the returned value will report false if the definition did +// not exist. The Err method reports if any error occurred during evaluation. +func (v Value) LookupDef(name string) Value { + ctx := v.ctx() + o, err := v.structValFull(ctx) + if err != nil { + return newErrValue(v, err) + } + + f := v.ctx().Label(name, true) + for i, a := range o.features { + if a == f { + if f.IsHidden() || !f.IsDef() { // optional not possible for now + break + } + return newChildValue(&o, i) + } + } + if !strings.HasPrefix(name, "#") { + alt := v.LookupDef("#" + name) + // Use the original error message if this resulted in an error as well. + if alt.Err() == nil { + return alt + } + } + return newErrValue(v, ctx.mkErr(v.v, "definition %q not found", name)) +} + +var errNotFound = errors.Newf(token.NoPos, "field not found") + +// FieldByName looks up a field for the given name. If isIdent is true, it will +// look up a definition or hidden field (starting with `_` or `_#`). Otherwise +// it interprets name as an arbitrary string for a regular field. +func (v Value) FieldByName(name string, isIdent bool) (f FieldInfo, err error) { + s, err := v.Struct() + if err != nil { + return f, err + } + return s.FieldByName(name, isIdent) +} + +// LookupField reports information about a field of v. +// +// Deprecated: this API does not work with new-style definitions. Use FieldByName. +func (v Value) LookupField(name string) (FieldInfo, error) { + s, err := v.Struct() + if err != nil { + // TODO: return a Value at the same location and a new error? + return FieldInfo{}, err + } + f, err := s.FieldByName(name, true) + if err != nil { + return f, err + } + if f.IsHidden { + return f, errNotFound + } + return f, err +} + +// TODO: expose this API? +// +// // EvalExpr evaluates an expression within the scope of v, which must be +// // a struct. +// // +// // Expressions may refer to builtin packages if they can be uniquely identified. +// func (v Value) EvalExpr(expr ast.Expr) Value { +// ctx := v.ctx() +// result := evalExpr(ctx, v.eval(ctx), expr) +// return newValueRoot(ctx, result) +// } + +// Fill creates a new value by unifying v with the value of x at the given path. +// +// Values may be any Go value that can be converted to CUE, an ast.Expr or +// a Value. In the latter case, it will panic if the Value is not from the same +// Runtime. +// +// Any reference in v referring to the value at the given path will resolve +// to x in the newly created value. The resulting value is not validated. +func (v Value) Fill(x interface{}, path ...string) Value { + if v.v == nil { + return v + } + ctx := v.ctx() + for i := len(path) - 1; i >= 0; i-- { + x = map[string]interface{}{path[i]: x} + } + var value = convert.GoValueToValue(ctx.opCtx, x, true) + n, _ := value.(*adt.Vertex) + if n == nil { + n = &adt.Vertex{} + n.AddConjunct(adt.MakeRootConjunct(nil, value)) + } + n.Finalize(ctx.opCtx) + w := makeValue(v.idx, n) + return v.Unify(w) +} + +// Template returns a function that represents the template definition for a +// struct in a configuration file. It returns nil if v is not a struct kind or +// if there is no template associated with the struct. +// +// The returned function returns the value that would be unified with field +// given its name. +func (v Value) Template() func(label string) Value { + // TODO: rename to optional. + if v.v == nil { + return nil + } + + types := v.v.OptionalTypes() + if types&(adt.HasAdditional|adt.HasPattern) == 0 { + return nil + } + + parent := v.v + ctx := v.ctx().opCtx + return func(label string) Value { + f := ctx.StringLabel(label) + arc := &adt.Vertex{Parent: parent, Label: f} + v.v.MatchAndInsert(ctx, arc) + if len(arc.Conjuncts) == 0 { + return Value{} + } + arc.Finalize(ctx) + return makeValue(v.idx, arc) + } +} + +// Subsume reports nil when w is an instance of v or an error otherwise. +// +// Without options, the entire value is considered for assumption, which means +// Subsume tests whether v is a backwards compatible (newer) API version of w. +// Use the Final() to indicate that the subsumed value is data, and that +// +// Use the Final option to check subsumption if a w is known to be final, +// and should assumed to be closed. +// +// Options are currently ignored and the function will panic if any are passed. +// +// Value v and w must be obtained from the same build. +// TODO: remove this requirement. +func (v Value) Subsume(w Value, opts ...Option) error { + o := getOptions(opts) + p := subsume.CUE + switch { + case o.final && o.ignoreClosedness: + p = subsume.FinalOpen + case o.final: + p = subsume.Final + case o.ignoreClosedness: + p = subsume.API + } + p.Defaults = true + ctx := v.ctx().opCtx + return p.Value(ctx, v.v, w.v) +} + +// Deprecated: use Subsume. +// +// Subsumes reports whether w is an instance of v. +// +// Without options, Subsumes checks whether v is a backwards compatbile schema +// of w. +// +// By default, Subsumes tests whether two values are compatible +// Value v and w must be obtained from the same build. +// TODO: remove this requirement. +func (v Value) Subsumes(w Value) bool { + ctx := v.ctx().opCtx + p := subsume.Profile{Defaults: true} + return p.Check(ctx, v.v, w.v) +} + +func isDef(v *adt.Vertex) bool { + for ; v != nil; v = v.Parent { + if v.Label.IsDef() { + return true + } + } + return false +} + +func allowed(ctx *adt.OpContext, parent, n *adt.Vertex) *adt.Bottom { + if !parent.IsClosed(ctx) && !isDef(parent) { + return nil + } + + for _, a := range n.Arcs { + if !parent.Accept(ctx, a.Label) { + defer ctx.PopArc(ctx.PushArc(parent)) + label := a.Label.SelectorString(ctx) + parent.Accept(ctx, a.Label) + return ctx.NewErrf("field `%s` not allowed", label) + } + } + return nil +} + +func addConjuncts(dst, src *adt.Vertex) { + c := adt.MakeRootConjunct(nil, src) + if src.Closed { + var root adt.CloseInfo + c.CloseInfo = root.SpawnRef(src, src.Closed, nil) + } + dst.AddConjunct(c) +} + +// Unify reports the greatest lower bound of v and w. +// +// Value v and w must be obtained from the same build. +// TODO: remove this requirement. +func (v Value) Unify(w Value) Value { + if v.v == nil { + return w + } + if w.v == nil { + return v + } + + n := &adt.Vertex{} + addConjuncts(n, v.v) + addConjuncts(n, w.v) + + ctx := v.idx.newContext().opCtx + n.Finalize(ctx) + + n.Parent = v.v.Parent + n.Label = v.v.Label + n.Closed = v.v.Closed || w.v.Closed + + if err := allowed(ctx, v.v, n); err != nil { + return newErrValue(w, err) + } + if err := allowed(ctx, w.v, n); err != nil { + return newErrValue(v, err) + } + + return makeValue(v.idx, n) +} + +// UnifyAccept is as v.Unify(w), but will disregard any field that is allowed +// in the Value accept. +func (v Value) UnifyAccept(w Value, accept Value) Value { + if v.v == nil { + return w + } + if w.v == nil { + return v + } + if accept.v == nil { + panic("accept must exist") + } + + n := &adt.Vertex{} + n.AddConjunct(adt.MakeRootConjunct(nil, v.v)) + n.AddConjunct(adt.MakeRootConjunct(nil, w.v)) + + ctx := v.idx.newContext().opCtx + n.Finalize(ctx) + + n.Parent = v.v.Parent + n.Label = v.v.Label + + if err := allowed(ctx, accept.v, n); err != nil { + return newErrValue(accept, err) + } + + return makeValue(v.idx, n) +} + +// Equals reports whether two values are equal, ignoring optional fields. +// The result is undefined for incomplete values. +func (v Value) Equals(other Value) bool { + if v.v == nil || other.v == nil { + return false + } + return adt.Equal(v.ctx().opCtx, v.v, other.v, 0) +} + +// Format prints a debug version of a value. +func (v Value) Format(state fmt.State, verb rune) { + ctx := v.ctx() + if v.v == nil { + fmt.Fprint(state, "") + return + } + switch { + case state.Flag('#'): + _, _ = io.WriteString(state, ctx.str(v.v)) + case state.Flag('+'): + _, _ = io.WriteString(state, ctx.opCtx.Str(v.v)) + default: + n, _ := export.Raw.Expr(v.idx.Runtime, v.instance().ID(), v.v) + b, _ := format.Node(n) + _, _ = state.Write(b) + } +} + +func (v Value) instance() *Instance { + if v.v == nil { + return nil + } + return v.idx.getImportFromNode(v.v) +} + +// Reference returns the instance and path referred to by this value such that +// inst.Lookup(path) resolves to the same value, or no path if this value is not +// a reference. If a reference contains index selection (foo[bar]), it will +// only return a reference if the index resolves to a concrete value. +func (v Value) Reference() (inst *Instance, path []string) { + // TODO: don't include references to hidden fields. + if v.v == nil || len(v.v.Conjuncts) != 1 { + return nil, nil + } + ctx := v.ctx() + c := v.v.Conjuncts[0] + + return reference(ctx, c.Env, c.Expr()) +} + +func reference(c *context, env *adt.Environment, r adt.Expr) (inst *Instance, path []string) { + ctx := c.opCtx + defer ctx.PopState(ctx.PushState(env, r.Source())) + + switch x := r.(type) { + // TODO: do we need to handle Vertex as well, in case this is hard-wired? + // Probably not, as this results from dynamic content. + + case *adt.NodeLink: + // TODO: consider getting rid of NodeLink. + inst, path = mkPath(c, nil, x.Node) + + case *adt.FieldReference: + env := ctx.Env(x.UpCount) + inst, path = mkPath(c, nil, env.Vertex) + path = append(path, x.Label.SelectorString(c.Runtime)) + + case *adt.LabelReference: + env := ctx.Env(x.UpCount) + return mkPath(c, nil, env.Vertex) + + case *adt.DynamicReference: + env := ctx.Env(x.UpCount) + inst, path = mkPath(c, nil, env.Vertex) + v, _ := ctx.Evaluate(env, x.Label) + str := ctx.StringValue(v) + path = append(path, str) + + case *adt.ImportReference: + imp := x.ImportPath.StringValue(ctx) + inst = c.index.getImportFromPath(imp) + + case *adt.SelectorExpr: + inst, path = reference(c, env, x.X) + path = append(path, x.Sel.SelectorString(ctx)) + + case *adt.IndexExpr: + inst, path = reference(c, env, x.X) + v, _ := ctx.Evaluate(env, x.Index) + str := ctx.StringValue(v) + path = append(path, str) + } + if inst == nil { + return nil, nil + } + return inst, path +} + +func mkPath(ctx *context, a []string, v *adt.Vertex) (inst *Instance, path []string) { + if v.Parent == nil { + return ctx.index.getImportFromNode(v), a + } + inst, path = mkPath(ctx, a, v.Parent) + path = append(path, v.Label.SelectorString(ctx.opCtx)) + return inst, path +} + +// // References reports all references used to evaluate this value. It does not +// // report references for sub fields if v is a struct. +// // +// // Deprecated: can be implemented in terms of Reference and Expr. +// func (v Value) References() [][]string { +// panic("deprecated") +// } + +type options struct { + concrete bool // enforce that values are concrete + raw bool // show original values + hasHidden bool + omitHidden bool + omitDefinitions bool + omitOptional bool + omitAttrs bool + resolveReferences bool + final bool + ignoreClosedness bool // used for comparing APIs + docs bool + disallowCycles bool // implied by concrete + allowScalar bool +} + +// An Option defines modes of evaluation. +type Option option + +type option func(p *options) + +// Final indicates a value is final. It implicitly closes all structs and lists +// in a value and selects defaults. +func Final() Option { + return func(o *options) { + o.final = true + o.omitDefinitions = true + o.omitOptional = true + o.omitHidden = true + } +} + +// Schema specifies the input is a Schema. Used by Subsume. +func Schema() Option { + return func(o *options) { + o.ignoreClosedness = true + } +} + +// Concrete ensures that all values are concrete. +// +// For Validate this means it returns an error if this is not the case. +// In other cases a non-concrete value will be replaced with an error. +func Concrete(concrete bool) Option { + return func(p *options) { + if concrete { + p.concrete = true + p.final = true + if !p.hasHidden { + p.omitHidden = true + p.omitDefinitions = true + } + } + } +} + +// DisallowCycles forces validation in the precense of cycles, even if +// non-concrete values are allowed. This is implied by Concrete(true). +func DisallowCycles(disallow bool) Option { + return func(p *options) { p.disallowCycles = disallow } +} + +// ResolveReferences forces the evaluation of references when outputting. +// This implies the input cannot have cycles. +func ResolveReferences(resolve bool) Option { + return func(p *options) { p.resolveReferences = resolve } +} + +// Raw tells Syntax to generate the value as is without any simplifications. +func Raw() Option { + return func(p *options) { p.raw = true } +} + +// All indicates that all fields and values should be included in processing +// even if they can be elided or omitted. +func All() Option { + return func(p *options) { + p.omitAttrs = false + p.omitHidden = false + p.omitDefinitions = false + p.omitOptional = false + } +} + +// Docs indicates whether docs should be included. +func Docs(include bool) Option { + return func(p *options) { p.docs = true } +} + +// Definitions indicates whether definitions should be included. +// +// Definitions may still be included for certain functions if they are referred +// to by other other values. +func Definitions(include bool) Option { + return func(p *options) { + p.hasHidden = true + p.omitDefinitions = !include + } +} + +// Hidden indicates that definitions and hidden fields should be included. +// +// Deprecated: Hidden fields are deprecated. +func Hidden(include bool) Option { + return func(p *options) { + p.hasHidden = true + p.omitHidden = !include + p.omitDefinitions = !include + } +} + +// Optional indicates that optional fields should be included. +func Optional(include bool) Option { + return func(p *options) { p.omitOptional = !include } +} + +// Attributes indicates that attributes should be included. +func Attributes(include bool) Option { + return func(p *options) { p.omitAttrs = !include } +} + +func getOptions(opts []Option) (o options) { + o.updateOptions(opts) + return +} + +func (o *options) updateOptions(opts []Option) { + for _, fn := range opts { + fn(o) + } +} + +// Validate reports any errors, recursively. The returned error may represent +// more than one error, retrievable with errors.Errors, if more than one +// exists. +func (v Value) Validate(opts ...Option) error { + o := options{} + o.updateOptions(opts) + + cfg := &validate.Config{ + Concrete: o.concrete, + DisallowCycles: o.disallowCycles, + AllErrors: true, + } + + b := validate.Validate(v.ctx().opCtx, v.v, cfg) + if b != nil { + return b.Err + } + return nil +} + +// Walk descends into all values of v, calling f. If f returns false, Walk +// will not descent further. It only visits values that are part of the data +// model, so this excludes optional fields, hidden fields, and definitions. +func (v Value) Walk(before func(Value) bool, after func(Value)) { + ctx := v.ctx() + switch v.Kind() { + case StructKind: + if before != nil && !before(v) { + return + } + obj, _ := v.structValData(ctx) + for i := 0; i < obj.Len(); i++ { + _, v := obj.At(i) + v.Walk(before, after) + } + case ListKind: + if before != nil && !before(v) { + return + } + list, _ := v.List() + for list.Next() { + list.Value().Walk(before, after) + } + default: + if before != nil { + before(v) + } + } + if after != nil { + after(v) + } +} + +// Attribute returns the attribute data for the given key. +// The returned attribute will return an error for any of its methods if there +// is no attribute for the requested key. +func (v Value) Attribute(key string) Attribute { + // look up the attributes + if v.v == nil { + return Attribute{internal.NewNonExisting(key)} + } + // look up the attributes + for _, a := range export.ExtractFieldAttrs(v.v.Conjuncts) { + k, body := a.Split() + if key != k { + continue + } + return Attribute{internal.ParseAttrBody(token.NoPos, body)} + } + + return Attribute{internal.NewNonExisting(key)} +} + +// An Attribute contains meta data about a field. +type Attribute struct { + attr internal.Attr +} + +// Err returns the error associated with this Attribute or nil if this +// attribute is valid. +func (a *Attribute) Err() error { + return a.attr.Err +} + +// String reports the possibly empty string value at the given position or +// an error the attribute is invalid or if the position does not exist. +func (a *Attribute) String(pos int) (string, error) { + return a.attr.String(pos) +} + +// Int reports the integer at the given position or an error if the attribute is +// invalid, the position does not exist, or the value at the given position is +// not an integer. +func (a *Attribute) Int(pos int) (int64, error) { + return a.attr.Int(pos) +} + +// Flag reports whether an entry with the given name exists at position pos or +// onwards or an error if the attribute is invalid or if the first pos-1 entries +// are not defined. +func (a *Attribute) Flag(pos int, key string) (bool, error) { + return a.attr.Flag(pos, key) +} + +// Lookup searches for an entry of the form key=value from position pos onwards +// and reports the value if found. It reports an error if the attribute is +// invalid or if the first pos-1 entries are not defined. +func (a *Attribute) Lookup(pos int, key string) (val string, found bool, err error) { + return a.attr.Lookup(pos, key) +} + +// Expr reports the operation of the underlying expression and the values it +// operates on. +// +// For unary expressions, it returns the single value of the expression. +// +// For binary expressions it returns first the left and right value, in that +// order. For associative operations however, (for instance '&' and '|'), it may +// return more than two values, where the operation is to be applied in +// sequence. +// +// For selector and index expressions it returns the subject and then the index. +// For selectors, the index is the string value of the identifier. +// +// For interpolations it returns a sequence of values to be concatenated, some +// of which will be literal strings and some unevaluated expressions. +// +// A builtin call expression returns the value of the builtin followed by the +// args of the call. +func (v Value) Expr() (Op, []Value) { + // TODO: return v if this is complete? Yes for now + if v.v == nil { + return NoOp, nil + } + + var expr adt.Expr + var env *adt.Environment + + if v.v.IsData() { + expr = v.v.Value() + + } else { + switch len(v.v.Conjuncts) { + case 0: + if v.v.BaseValue == nil { + return NoOp, []Value{makeValue(v.idx, v.v)} + } + expr = v.v.Value() + + case 1: + // the default case, processed below. + c := v.v.Conjuncts[0] + env = c.Env + expr = c.Expr() + if w, ok := expr.(*adt.Vertex); ok { + return Value{v.idx, w}.Expr() + } + + default: + a := []Value{} + ctx := v.ctx().opCtx + for _, c := range v.v.Conjuncts { + // Keep parent here. TODO: do we need remove the requirement + // from other conjuncts? + n := &adt.Vertex{ + Parent: v.v.Parent, + Label: v.v.Label, + } + n.AddConjunct(c) + n.Finalize(ctx) + a = append(a, makeValue(v.idx, n)) + } + return adt.AndOp, a + } + } + + // TODO: replace appends with []Value{}. For not leave. + a := []Value{} + op := NoOp + switch x := expr.(type) { + case *adt.BinaryExpr: + a = append(a, remakeValue(v, env, x.X)) + a = append(a, remakeValue(v, env, x.Y)) + op = x.Op + case *adt.UnaryExpr: + a = append(a, remakeValue(v, env, x.X)) + op = x.Op + case *adt.BoundExpr: + a = append(a, remakeValue(v, env, x.Expr)) + op = x.Op + case *adt.BoundValue: + a = append(a, remakeValue(v, env, x.Value)) + op = x.Op + case *adt.Conjunction: + // pre-expanded unification + for _, conjunct := range x.Values { + a = append(a, remakeValue(v, env, conjunct)) + } + op = AndOp + case *adt.Disjunction: + count := 0 + outer: + for i, disjunct := range x.Values { + if i < x.NumDefaults { + for _, n := range x.Values[x.NumDefaults:] { + if subsume.Value(v.ctx().opCtx, n, disjunct) == nil { + continue outer + } + } + } + count++ + a = append(a, remakeValue(v, env, disjunct)) + } + if count > 1 { + op = OrOp + } + + case *adt.DisjunctionExpr: + // Filter defaults that are subsumed by another value. + count := 0 + outerExpr: + for _, disjunct := range x.Values { + if disjunct.Default { + for _, n := range x.Values { + a := adt.Vertex{ + Label: v.v.Label, + } + b := a + a.AddConjunct(adt.MakeRootConjunct(env, n.Val)) + b.AddConjunct(adt.MakeRootConjunct(env, disjunct.Val)) + + ctx := eval.NewContext(v.idx.Runtime, nil) + ctx.Unify(&a, adt.Finalized) + ctx.Unify(&b, adt.Finalized) + if allowed(ctx, v.v, &b) != nil { + // Everything subsumed bottom + continue outerExpr + } + if allowed(ctx, v.v, &a) != nil { + // An error doesn't subsume anything except another error. + continue + } + a.Parent = v.v.Parent + if !n.Default && subsume.Value(ctx, &a, &b) == nil { + continue outerExpr + } + } + } + count++ + a = append(a, remakeValue(v, env, disjunct.Val)) + } + if count > 1 { + op = adt.OrOp + } + + case *adt.Interpolation: + for _, p := range x.Parts { + a = append(a, remakeValue(v, env, p)) + } + op = InterpolationOp + + case *adt.FieldReference: + // TODO: allow hard link + ctx := v.ctx().opCtx + f := ctx.PushState(env, x.Src) + env := ctx.Env(x.UpCount) + a = append(a, remakeValue(v, nil, &adt.NodeLink{Node: env.Vertex})) + a = append(a, remakeValue(v, nil, ctx.NewString(x.Label.SelectorString(ctx)))) + _ = ctx.PopState(f) + op = SelectorOp + + case *adt.SelectorExpr: + a = append(a, remakeValue(v, env, x.X)) + // A string selector is quoted. + a = append(a, remakeValue(v, env, &adt.String{ + Str: x.Sel.SelectorString(v.idx.Runtime), + })) + op = SelectorOp + + case *adt.IndexExpr: + a = append(a, remakeValue(v, env, x.X)) + a = append(a, remakeValue(v, env, x.Index)) + op = IndexOp + case *adt.SliceExpr: + a = append(a, remakeValue(v, env, x.X)) + a = append(a, remakeValue(v, env, x.Lo)) + a = append(a, remakeValue(v, env, x.Hi)) + op = SliceOp + case *adt.CallExpr: + a = append(a, remakeValue(v, env, x.Fun)) + for _, arg := range x.Args { + a = append(a, remakeValue(v, env, arg)) + } + op = CallOp + case *adt.BuiltinValidator: + a = append(a, remakeValue(v, env, x.Builtin)) + for _, arg := range x.Args { + a = append(a, remakeValue(v, env, arg)) + } + op = CallOp + + case *adt.StructLit: + // Simulate old embeddings. + envEmbed := &adt.Environment{ + Up: env, + Vertex: v.v, + } + fields := []adt.Decl{} + ctx := v.ctx().opCtx + for _, d := range x.Decls { + switch x := d.(type) { + case adt.Expr: + // embedding + n := &adt.Vertex{Label: v.v.Label} + c := adt.MakeRootConjunct(envEmbed, x) + n.AddConjunct(c) + n.Finalize(ctx) + n.Parent = v.v.Parent + a = append(a, makeValue(v.idx, n)) + + default: + fields = append(fields, d) + } + } + if len(a) == 0 { + a = append(a, v) + break + } + + if len(fields) > 0 { + n := &adt.Vertex{ + Label: v.v.Label, + } + c := adt.MakeRootConjunct(env, &adt.StructLit{ + Decls: fields, + }) + n.AddConjunct(c) + n.Finalize(ctx) + n.Parent = v.v.Parent + a = append(a, makeValue(v.idx, n)) + } + + op = adt.AndOp + + default: + a = append(a, v) + } + return op, a +} diff --git a/vendor/cuelang.org/go/encoding/gocode/gocodec/codec.go b/vendor/cuelang.org/go/encoding/gocode/gocodec/codec.go new file mode 100644 index 000000000..bd4b08325 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/gocode/gocodec/codec.go @@ -0,0 +1,181 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package codec converts Go to and from CUE and validates Go values based on +// CUE constraints. +// +// CUE constraints can be used to validate Go types as well as fill out +// missing struct fields that are implied from the constraints and the values +// already defined by the struct value. +package gocodec + +import ( + "sync" + + "cuelang.org/go/cue" + "cuelang.org/go/internal" +) + +// Config has no options yet, but is defined for future extensibility. +type Config struct { +} + +// A Codec decodes and encodes CUE from and to Go values and validates and +// completes Go values based on CUE templates. +type Codec struct { + runtime *cue.Runtime + mutex sync.RWMutex +} + +// New creates a new Codec for the given instance. +// +// It is safe to use the methods of Codec concurrently as long as the given +// Runtime is not used elsewhere while using Codec. However, only the concurrent +// use of Decode, Validate, and Complete is efficient. +func New(r *cue.Runtime, c *Config) *Codec { + return &Codec{runtime: r} +} + +// ExtractType extracts a CUE value from a Go type. +// +// The type represented by x is converted as the underlying type. Specific +// values, such as map or slice elements or field values of structs are ignored. +// If x is of type reflect.Type, the type represented by x is extracted. +// +// Fields of structs can be annoted using additional constrains using the 'cue' +// field tag. The value of the tag is a CUE expression, which may contain +// references to the JSON name of other fields in a struct. +// +// type Sum struct { +// A int `cue:"c-b" json:"a,omitempty"` +// B int `cue:"c-a" json:"b,omitempty"` +// C int `cue:"a+b" json:"c,omitempty"` +// } +// +func (c *Codec) ExtractType(x interface{}) (cue.Value, error) { + // ExtractType cannot introduce new fields on repeated calls. We could + // consider optimizing the lock usage based on this property. + c.mutex.Lock() + defer c.mutex.Unlock() + + return fromGoType(c.runtime, x) +} + +// TODO: allow extracting constraints and type info separately? + +// Decode converts x to a CUE value. +// +// If x is of type reflect.Value it will convert the value represented by x. +func (c *Codec) Decode(x interface{}) (cue.Value, error) { + c.mutex.Lock() + defer c.mutex.Unlock() + + // Depending on the type, can introduce new labels on repeated calls. + return fromGoValue(c.runtime, x, false) +} + +// Encode converts v to a Go value. +func (c *Codec) Encode(v cue.Value, x interface{}) error { + c.mutex.RLock() + defer c.mutex.RUnlock() + + return v.Decode(x) +} + +var defaultCodec = New(&cue.Runtime{}, nil) + +// Validate calls Validate on a default Codec for the type of x. +func Validate(x interface{}) error { + c := defaultCodec + c.mutex.RLock() + defer c.mutex.RUnlock() + + r := defaultCodec.runtime + v, err := fromGoType(r, x) + if err != nil { + return err + } + w, err := fromGoValue(r, x, false) + if err != nil { + return err + } + v = v.Unify(w) + if err := v.Validate(); err != nil { + return err + } + return nil +} + +// Validate checks whether x satisfies the constraints defined by v. +// +// The given value must be created using the same Runtime with which c was +// initialized. +func (c *Codec) Validate(v cue.Value, x interface{}) error { + c.mutex.RLock() + defer c.mutex.RUnlock() + + r := checkAndForkRuntime(c.runtime, v) + w, err := fromGoValue(r, x, false) + if err != nil { + return err + } + return w.Unify(v).Err() +} + +// Complete sets previously undefined values in x that can be uniquely +// determined form the constraints defined by v if validation passes, or returns +// an error, without modifying anything, otherwise. +// +// Only undefined values are modified. A value is considered undefined if it is +// pointer type and is nil or if it is a field with a zero value that has a json +// tag with the omitempty flag. +// +// The given value must be created using the same Runtime with which c was +// initialized. +// +// Complete does a JSON round trip. This means that data not preserved in such a +// round trip, such as the location name of a time.Time, is lost after a +// successful update. +func (c *Codec) Complete(v cue.Value, x interface{}) error { + c.mutex.RLock() + defer c.mutex.RUnlock() + + r := checkAndForkRuntime(c.runtime, v) + w, err := fromGoValue(r, x, true) + if err != nil { + return err + } + + return w.Unify(v).Decode(x) +} + +func fromGoValue(r *cue.Runtime, x interface{}, allowDefault bool) (cue.Value, error) { + v := internal.FromGoValue(r, x, allowDefault).(cue.Value) + if err := v.Err(); err != nil { + return v, err + } + return v, nil +} + +func fromGoType(r *cue.Runtime, x interface{}) (cue.Value, error) { + v := internal.FromGoType(r, x).(cue.Value) + if err := v.Err(); err != nil { + return v, err + } + return v, nil +} + +func checkAndForkRuntime(r *cue.Runtime, v cue.Value) *cue.Runtime { + return internal.CheckAndForkRuntime(r, v).(*cue.Runtime) +} diff --git a/vendor/cuelang.org/go/encoding/json/json.go b/vendor/cuelang.org/go/encoding/json/json.go new file mode 100644 index 000000000..a31922214 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/json/json.go @@ -0,0 +1,246 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package json converts JSON to and from CUE. +package json + +import ( + gojson "encoding/json" + "io" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/pkg/encoding/json" +) + +// Valid reports whether data is a valid JSON encoding. +func Valid(b []byte) bool { + return gojson.Valid(b) +} + +// Validate validates JSON and confirms it matches the constraints +// specified by v. +func Validate(b []byte, v cue.Value) error { + _, err := json.Validate(b, v) + return err +} + +// Extract parses JSON-encoded data to a CUE expression, using path for +// position information. +func Extract(path string, data []byte) (ast.Expr, error) { + expr, err := extract(path, data) + if err != nil { + return nil, err + } + patchExpr(expr) + return expr, nil +} + +// Decode parses JSON-encoded data to a CUE value, using path for position +// information. +func Decode(r *cue.Runtime, path string, data []byte) (*cue.Instance, error) { + expr, err := extract(path, data) + if err != nil { + return nil, err + } + return r.CompileExpr(expr) +} + +func extract(path string, b []byte) (ast.Expr, error) { + expr, err := parser.ParseExpr(path, b) + if err != nil || !gojson.Valid(b) { + p := token.NoPos + if pos := errors.Positions(err); len(pos) > 0 { + p = pos[0] + } + var x interface{} + err := gojson.Unmarshal(b, &x) + return nil, errors.Wrapf(err, p, "invalid JSON for file %q", path) + } + return expr, nil +} + +// NewDecoder configures a JSON decoder. The path is used to associate position +// information with each node. The runtime may be nil if the decoder +// is only used to extract to CUE ast objects. +func NewDecoder(r *cue.Runtime, path string, src io.Reader) *Decoder { + return &Decoder{ + r: r, + path: path, + dec: gojson.NewDecoder(src), + offset: 1, + } +} + +// A Decoder converts JSON values to CUE. +type Decoder struct { + r *cue.Runtime + path string + dec *gojson.Decoder + offset int +} + +// Extract converts the current JSON value to a CUE ast. It returns io.EOF +// if the input has been exhausted. +func (d *Decoder) Extract() (ast.Expr, error) { + expr, err := d.extract() + if err != nil { + return expr, err + } + patchExpr(expr) + return expr, nil +} + +func (d *Decoder) extract() (ast.Expr, error) { + var raw gojson.RawMessage + err := d.dec.Decode(&raw) + if err == io.EOF { + return nil, err + } + offset := d.offset + d.offset += len(raw) + if err != nil { + pos := token.NewFile(d.path, offset, len(raw)).Pos(0, 0) + return nil, errors.Wrapf(err, pos, "invalid JSON for file %q", d.path) + } + expr, err := parser.ParseExpr(d.path, []byte(raw), parser.FileOffset(offset)) + if err != nil { + return nil, err + } + return expr, nil +} + +// Decode converts the current JSON value to a CUE instance. It returns io.EOF +// if the input has been exhausted. +func (d *Decoder) Decode() (*cue.Instance, error) { + expr, err := d.Extract() + if err != nil { + return nil, err + } + return d.r.CompileExpr(expr) +} + +// patchExpr simplifies the AST parsed from JSON. +// TODO: some of the modifications are already done in format, but are +// a package deal of a more aggressive simplify. Other pieces of modification +// should probably be moved to format. +func patchExpr(n ast.Node) { + type info struct { + reflow bool + } + stack := []info{{true}} + + afterFn := func(n ast.Node) { + switch n.(type) { + case *ast.ListLit, *ast.StructLit: + stack = stack[:len(stack)-1] + } + } + + var beforeFn func(n ast.Node) bool + + beforeFn = func(n ast.Node) bool { + isLarge := n.End().Offset()-n.Pos().Offset() > 50 + descent := true + + switch x := n.(type) { + case *ast.ListLit: + reflow := true + if !isLarge { + for _, e := range x.Elts { + if hasSpaces(e) { + reflow = false + break + } + } + } + stack = append(stack, info{reflow}) + if reflow { + x.Lbrack = x.Lbrack.WithRel(token.NoRelPos) + x.Rbrack = x.Rbrack.WithRel(token.NoRelPos) + } + return true + + case *ast.StructLit: + reflow := true + if !isLarge { + for _, e := range x.Elts { + if f, ok := e.(*ast.Field); !ok || hasSpaces(f) || hasSpaces(f.Value) { + reflow = false + break + } + } + } + stack = append(stack, info{reflow}) + if reflow { + x.Lbrace = x.Lbrace.WithRel(token.NoRelPos) + x.Rbrace = x.Rbrace.WithRel(token.NoRelPos) + } + return true + + case *ast.Field: + // label is always a string for JSON. + switch { + case true: + s, ok := x.Label.(*ast.BasicLit) + if !ok || s.Kind != token.STRING { + break // should not happen: implies invalid JSON + } + + u, err := literal.Unquote(s.Value) + if err != nil { + break // should not happen: implies invalid JSON + } + + // TODO(legacy): remove checking for '_' prefix once hidden + // fields are removed. + if !ast.IsValidIdent(u) || strings.HasPrefix(u, "_") { + break // keep string + } + + x.Label = ast.NewIdent(u) + astutil.CopyMeta(x.Label, s) + } + ast.Walk(x.Value, beforeFn, afterFn) + descent = false + + case *ast.BasicLit: + if x.Kind == token.STRING && len(x.Value) > 10 { + s, err := literal.Unquote(x.Value) + if err != nil { + break // should not happen: implies invalid JSON + } + + x.Value = literal.String.WithOptionalTabIndent(len(stack)).Quote(s) + } + } + + if stack[len(stack)-1].reflow { + ast.SetRelPos(n, token.NoRelPos) + } + return descent + } + + ast.Walk(n, beforeFn, afterFn) +} + +func hasSpaces(n ast.Node) bool { + return n.Pos().RelPos() > token.NoSpace +} diff --git a/vendor/cuelang.org/go/encoding/jsonschema/constraints.go b/vendor/cuelang.org/go/encoding/jsonschema/constraints.go new file mode 100644 index 000000000..2b9e1ec26 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/jsonschema/constraints.go @@ -0,0 +1,715 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "math/big" + "path" + "regexp" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +// TODO: skip invalid regexps containing ?! and foes. +// alternatively, fall back to https://github.com/dlclark/regexp2 + +type constraint struct { + key string + + // phase indicates on which pass c constraint should be added. This ensures + // that constraints are applied in the correct order. For instance, the + // "required" constraint validates that a listed field is contained in + // "properties". For this to work, "properties" must be processed before + // "required" and thus must have a lower phase number than the latter. + phase int + + // Indicates the draft number in which this constraint is defined. + draft int + fn constraintFunc +} + +// A constraintFunc converts a given JSON Schema constraint (specified in n) +// to a CUE constraint recorded in state. +type constraintFunc func(n cue.Value, s *state) + +func p0(name string, f constraintFunc) *constraint { + return &constraint{key: name, fn: f} +} + +func p1d(name string, draft int, f constraintFunc) *constraint { + return &constraint{key: name, phase: 1, draft: draft, fn: f} +} + +func p1(name string, f constraintFunc) *constraint { + return &constraint{key: name, phase: 1, fn: f} +} + +func p2(name string, f constraintFunc) *constraint { + return &constraint{key: name, phase: 2, fn: f} +} + +func p3(name string, f constraintFunc) *constraint { + return &constraint{key: name, phase: 3, fn: f} +} + +// TODO: +// writeOnly, readOnly + +var constraintMap = map[string]*constraint{} + +func init() { + for _, c := range constraints { + constraintMap[c.key] = c + } +} + +func addDefinitions(n cue.Value, s *state) { + if n.Kind() != cue.StructKind { + s.errf(n, `"definitions" expected an object, found %s`, n.Kind()) + } + + old := s.isSchema + s.isSchema = true + defer func() { s.isSchema = old }() + + s.processMap(n, func(key string, n cue.Value) { + name := key + + var f *ast.Field + + ident := "#" + name + if ast.IsValidIdent(ident) { + f = &ast.Field{Value: s.schema(n, label{ident, true})} + f.Label = ast.NewIdent(ident) + } else { + f = &ast.Field{Value: s.schema(n, label{"#", true}, label{name: name})} + f.Label = ast.NewString(name) + ident = "#" + f = &ast.Field{ + Label: ast.NewIdent("#"), + Value: ast.NewStruct(f), + } + } + + ast.SetRelPos(f, token.NewSection) + s.definitions = append(s.definitions, f) + s.setField(label{name: ident, isDef: true}, f) + }) +} + +var constraints = []*constraint{ + // Meta data. + + p0("$schema", func(n cue.Value, s *state) { + // Identifies this as a JSON schema and specifies its version. + // TODO: extract version. + s.jsonschema, _ = s.strValue(n) + }), + + p0("$id", func(n cue.Value, s *state) { + // URL: https://domain.com/schemas/foo.json + // anchors: #identifier + // + // TODO: mark identifiers. + + // Resolution must be relative to parent $id + // https://tools.ietf.org/html/draft-handrews-json-schema-02#section-8.2.2 + u := s.resolveURI(n) + if u == nil { + return + } + + if u.Fragment != "" { + if s.cfg.Strict { + s.errf(n, "$id URI may not contain a fragment") + } + return + } + s.id = u + + obj := s.object(n) + + // TODO: handle the case where this is always defined and we don't want + // to include the default value. + obj.Elts = append(obj.Elts, &ast.Attribute{ + Text: fmt.Sprintf("@jsonschema(id=%q)", u)}) + }), + + // Generic constraint + + p1("type", func(n cue.Value, s *state) { + var types cue.Kind + set := func(n cue.Value) { + str, ok := s.strValue(n) + if !ok { + s.errf(n, "type value should be a string") + } + switch str { + case "null": + types |= cue.NullKind + s.setTypeUsed(n, nullType) + // TODO: handle OpenAPI restrictions. + case "boolean": + types |= cue.BoolKind + s.setTypeUsed(n, boolType) + case "string": + types |= cue.StringKind + s.setTypeUsed(n, stringType) + case "number": + types |= cue.NumberKind + s.setTypeUsed(n, numType) + case "integer": + types |= cue.IntKind + s.setTypeUsed(n, numType) + s.add(n, numType, ast.NewIdent("int")) + case "array": + types |= cue.ListKind + s.setTypeUsed(n, arrayType) + case "object": + types |= cue.StructKind + s.setTypeUsed(n, objectType) + + default: + s.errf(n, "unknown type %q", n) + } + } + + switch n.Kind() { + case cue.StringKind: + set(n) + case cue.ListKind: + for i, _ := n.List(); i.Next(); { + set(i.Value()) + } + default: + s.errf(n, `value of "type" must be a string or list of strings`) + } + + s.allowedTypes &= types + }), + + p1("enum", func(n cue.Value, s *state) { + var a []ast.Expr + for _, x := range s.listItems("enum", n, true) { + a = append(a, s.value(x)) + } + s.all.add(n, ast.NewBinExpr(token.OR, a...)) + }), + + // TODO: only allow for OpenAPI. + p1("nullable", func(n cue.Value, s *state) { + null := ast.NewNull() + setPos(null, n) + s.nullable = null + }), + + p1d("const", 6, func(n cue.Value, s *state) { + s.all.add(n, s.value(n)) + }), + + p1("default", func(n cue.Value, s *state) { + sc := *s + s.default_ = sc.value(n) + // TODO: must validate that the default is subsumed by the normal value, + // as CUE will otherwise broaden the accepted values with the default. + s.examples = append(s.examples, s.default_) + }), + + p1("deprecated", func(n cue.Value, s *state) { + if s.boolValue(n) { + s.deprecated = true + } + }), + + p1("examples", func(n cue.Value, s *state) { + if n.Kind() != cue.ListKind { + s.errf(n, `value of "examples" must be an array, found %v`, n.Kind) + } + // TODO: implement examples properly. + // for _, n := range s.listItems("examples", n, true) { + // if ex := s.value(n); !isAny(ex) { + // s.examples = append(s.examples, ex) + // } + // } + }), + + p1("description", func(n cue.Value, s *state) { + s.description, _ = s.strValue(n) + }), + + p1("title", func(n cue.Value, s *state) { + s.title, _ = s.strValue(n) + }), + + p1d("$comment", 7, func(n cue.Value, s *state) { + }), + + p1("$defs", addDefinitions), + p1("definitions", addDefinitions), + p1("$ref", func(n cue.Value, s *state) { + s.usedTypes = allTypes + + u := s.resolveURI(n) + + if u.Fragment != "" && !path.IsAbs(u.Fragment) { + s.addErr(errors.Newf(n.Pos(), "anchors (%s) not supported", u.Fragment)) + // TODO: support anchors + return + } + + expr := s.makeCUERef(n, u) + + if expr == nil { + expr = &ast.BadExpr{From: n.Pos()} + } + + s.all.add(n, expr) + }), + + // Combinators + + // TODO: work this out in more detail: oneOf and anyOf below have the same + // implementation in CUE. The distinction is that for anyOf a result is + // allowed to be ambiguous at the end, whereas for oneOf a disjunction must + // be fully resolved. There is currently no easy way to set this distinction + // in CUE. + // + // One could correctly write oneOf like this once 'not' is implemented: + // + // oneOf(a, b, c) :- + // anyOf( + // allOf(a, not(b), not(c)), + // allOf(not(a), b, not(c)), + // allOf(not(a), not(b), c), + // )) + // + // This is not necessary if the values are mutually exclusive/ have a + // discriminator. + + p2("allOf", func(n cue.Value, s *state) { + var a []ast.Expr + for _, v := range s.listItems("allOf", n, false) { + x, sub := s.schemaState(v, s.allowedTypes, nil, true) + s.allowedTypes &= sub.allowedTypes + s.usedTypes |= sub.usedTypes + if sub.hasConstraints() { + a = append(a, x) + } + } + if len(a) > 0 { + s.all.add(n, ast.NewBinExpr(token.AND, a...)) + } + }), + + p2("anyOf", func(n cue.Value, s *state) { + var types cue.Kind + var a []ast.Expr + for _, v := range s.listItems("anyOf", n, false) { + x, sub := s.schemaState(v, s.allowedTypes, nil, true) + types |= sub.allowedTypes + a = append(a, x) + } + s.allowedTypes &= types + if len(a) > 0 { + s.all.add(n, ast.NewBinExpr(token.OR, a...)) + } + }), + + p2("oneOf", func(n cue.Value, s *state) { + var types cue.Kind + var a []ast.Expr + hasSome := false + for _, v := range s.listItems("oneOf", n, false) { + x, sub := s.schemaState(v, s.allowedTypes, nil, true) + types |= sub.allowedTypes + + // TODO: make more finegrained by making it two pass. + if sub.hasConstraints() { + hasSome = true + } + + if !isAny(x) { + a = append(a, x) + } + } + s.allowedTypes &= types + if len(a) > 0 && hasSome { + s.usedTypes = allTypes + s.all.add(n, ast.NewBinExpr(token.OR, a...)) + } + + // TODO: oneOf({a:x}, {b:y}, ..., not(anyOf({a:x}, {b:y}, ...))), + // can be translated to {} | {a:x}, {b:y}, ... + }), + + // String constraints + + p1("pattern", func(n cue.Value, s *state) { + str, _ := n.String() + if _, err := regexp.Compile(str); err != nil { + if s.cfg.Strict { + s.errf(n, "unsupported regexp: %v", err) + } + return + } + s.usedTypes |= cue.StringKind + s.add(n, stringType, &ast.UnaryExpr{Op: token.MAT, X: s.string(n)}) + }), + + p1("minLength", func(n cue.Value, s *state) { + s.usedTypes |= cue.StringKind + min := s.number(n) + strings := s.addImport(n, "strings") + s.add(n, stringType, ast.NewCall(ast.NewSel(strings, "MinRunes"), min)) + }), + + p1("maxLength", func(n cue.Value, s *state) { + s.usedTypes |= cue.StringKind + max := s.number(n) + strings := s.addImport(n, "strings") + s.add(n, stringType, ast.NewCall(ast.NewSel(strings, "MaxRunes"), max)) + }), + + p1d("contentMediaType", 7, func(n cue.Value, s *state) { + // TODO: only mark as used if it generates something. + // s.usedTypes |= cue.StringKind + }), + + p1d("contentEncoding", 7, func(n cue.Value, s *state) { + // TODO: only mark as used if it generates something. + // s.usedTypes |= cue.StringKind + // 7bit, 8bit, binary, quoted-printable and base64. + // RFC 2054, part 6.1. + // https://tools.ietf.org/html/rfc2045 + // TODO: at least handle bytes. + }), + + // Number constraints + + p2("minimum", func(n cue.Value, s *state) { + s.usedTypes |= cue.NumberKind + op := token.GEQ + if s.exclusiveMin { + op = token.GTR + } + s.add(n, numType, &ast.UnaryExpr{Op: op, X: s.number(n)}) + }), + + p1("exclusiveMinimum", func(n cue.Value, s *state) { + if n.Kind() == cue.BoolKind { + s.exclusiveMin = true + return + } + s.usedTypes |= cue.NumberKind + s.add(n, numType, &ast.UnaryExpr{Op: token.GTR, X: s.number(n)}) + }), + + p2("maximum", func(n cue.Value, s *state) { + s.usedTypes |= cue.NumberKind + op := token.LEQ + if s.exclusiveMax { + op = token.LSS + } + s.add(n, numType, &ast.UnaryExpr{Op: op, X: s.number(n)}) + }), + + p1("exclusiveMaximum", func(n cue.Value, s *state) { + if n.Kind() == cue.BoolKind { + s.exclusiveMax = true + return + } + s.usedTypes |= cue.NumberKind + s.add(n, numType, &ast.UnaryExpr{Op: token.LSS, X: s.number(n)}) + }), + + p1("multipleOf", func(n cue.Value, s *state) { + s.usedTypes |= cue.NumberKind + multiple := s.number(n) + var x big.Int + _, _ = n.MantExp(&x) + if x.Cmp(big.NewInt(0)) != 1 { + s.errf(n, `"multipleOf" value must be < 0; found %s`, n) + } + math := s.addImport(n, "math") + s.add(n, numType, ast.NewCall(ast.NewSel(math, "MultipleOf"), multiple)) + }), + + // Object constraints + + p1("properties", func(n cue.Value, s *state) { + s.usedTypes |= cue.StructKind + obj := s.object(n) + + if n.Kind() != cue.StructKind { + s.errf(n, `"properties" expected an object, found %v`, n.Kind()) + } + + s.processMap(n, func(key string, n cue.Value) { + // property?: value + name := ast.NewString(key) + expr, state := s.schemaState(n, allTypes, []label{{name: key}}, false) + f := &ast.Field{Label: name, Value: expr} + state.doc(f) + f.Optional = token.Blank.Pos() + if len(obj.Elts) > 0 && len(f.Comments()) > 0 { + // TODO: change formatter such that either a a NewSection on the + // field or doc comment will cause a new section. + ast.SetRelPos(f.Comments()[0], token.NewSection) + } + if state.deprecated { + switch expr.(type) { + case *ast.StructLit: + obj.Elts = append(obj.Elts, addTag(name, "deprecated", "")) + default: + f.Attrs = append(f.Attrs, internal.NewAttr("deprecated", "")) + } + } + obj.Elts = append(obj.Elts, f) + s.setField(label{name: key}, f) + }) + }), + + p2("required", func(n cue.Value, s *state) { + if n.Kind() != cue.ListKind { + s.errf(n, `value of "required" must be list of strings, found %v`, n.Kind) + return + } + + s.usedTypes |= cue.StructKind + + // TODO: detect that properties is defined somewhere. + // s.errf(n, `"required" without a "properties" field`) + obj := s.object(n) + + // Create field map + fields := map[string]*ast.Field{} + for _, d := range obj.Elts { + f, ok := d.(*ast.Field) + if !ok { + continue // Could be embedding? See cirrus.json + } + str, _, err := ast.LabelName(f.Label) + if err == nil { + fields[str] = f + } + } + + for _, n := range s.listItems("required", n, true) { + str, ok := s.strValue(n) + f := fields[str] + if f == nil && ok { + f := &ast.Field{ + Label: ast.NewString(str), + Value: ast.NewIdent("_"), + } + fields[str] = f + obj.Elts = append(obj.Elts, f) + continue + } + if f.Optional == token.NoPos { + s.errf(n, "duplicate required field %q", str) + } + f.Optional = token.NoPos + } + }), + + p1d("propertyNames", 6, func(n cue.Value, s *state) { + // [=~pattern]: _ + if names, _ := s.schemaState(n, cue.StringKind, nil, false); !isAny(names) { + s.usedTypes |= cue.StructKind + x := ast.NewStruct(ast.NewList(names), ast.NewIdent("_")) + s.add(n, objectType, x) + } + }), + + // TODO: reenable when we have proper non-monotonic contraint validation. + // p1("minProperties", func(n cue.Value, s *state) { + // s.usedTypes |= cue.StructKind + + // pkg := s.addImport(n, "struct") + // s.addConjunct(n, ast.NewCall(ast.NewSel(pkg, "MinFields"), s.uint(n))) + // }), + + p1("maxProperties", func(n cue.Value, s *state) { + s.usedTypes |= cue.StructKind + + pkg := s.addImport(n, "struct") + x := ast.NewCall(ast.NewSel(pkg, "MaxFields"), s.uint(n)) + s.add(n, objectType, x) + }), + + p1("dependencies", func(n cue.Value, s *state) { + s.usedTypes |= cue.StructKind + + // Schema and property dependencies. + // TODO: the easiest implementation is with comprehensions. + // The nicer implementation is with disjunctions. This has to be done + // at the very end, replacing properties. + /* + *{ property?: _|_ } | { + property: _ + schema + } + */ + }), + + p2("patternProperties", func(n cue.Value, s *state) { + s.usedTypes |= cue.StructKind + if n.Kind() != cue.StructKind { + s.errf(n, `value of "patternProperties" must be an an object, found %v`, n.Kind) + } + obj := s.object(n) + existing := excludeFields(s.obj.Elts) + s.processMap(n, func(key string, n cue.Value) { + // [!~(properties) & pattern]: schema + s.patterns = append(s.patterns, + &ast.UnaryExpr{Op: token.NMAT, X: ast.NewString(key)}) + f := internal.EmbedStruct(ast.NewStruct(&ast.Field{ + Label: ast.NewList(ast.NewBinExpr(token.AND, + &ast.UnaryExpr{Op: token.MAT, X: ast.NewString(key)}, + existing)), + Value: s.schema(n), + })) + ast.SetRelPos(f, token.NewSection) + obj.Elts = append(obj.Elts, f) + }) + }), + + p3("additionalProperties", func(n cue.Value, s *state) { + switch n.Kind() { + case cue.BoolKind: + s.closeStruct = !s.boolValue(n) + + case cue.StructKind: + s.usedTypes |= cue.StructKind + s.closeStruct = true + obj := s.object(n) + if len(obj.Elts) == 0 { + obj.Elts = append(obj.Elts, &ast.Field{ + Label: ast.NewList(ast.NewIdent("string")), + Value: s.schema(n), + }) + return + } + // [!~(properties|patternProperties)]: schema + existing := append(s.patterns, excludeFields(obj.Elts)) + f := internal.EmbedStruct(ast.NewStruct(&ast.Field{ + Label: ast.NewList(ast.NewBinExpr(token.AND, existing...)), + Value: s.schema(n), + })) + obj.Elts = append(obj.Elts, f) + + default: + s.errf(n, `value of "additionalProperties" must be an object or boolean`) + } + }), + + // Array constraints. + + p1("items", func(n cue.Value, s *state) { + s.usedTypes |= cue.ListKind + switch n.Kind() { + case cue.StructKind: + elem := s.schema(n) + ast.SetRelPos(elem, token.NoRelPos) + s.add(n, arrayType, ast.NewList(&ast.Ellipsis{Type: elem})) + + case cue.ListKind: + var a []ast.Expr + for _, n := range s.listItems("items", n, true) { + v := s.schema(n) // TODO: label with number literal. + ast.SetRelPos(v, token.NoRelPos) + a = append(a, v) + } + s.list = ast.NewList(a...) + s.add(n, arrayType, s.list) + + default: + s.errf(n, `value of "items" must be an object or array`) + } + }), + + p1("additionalItems", func(n cue.Value, s *state) { + switch n.Kind() { + case cue.BoolKind: + // TODO: support + + case cue.StructKind: + if s.list != nil { + s.usedTypes |= cue.ListKind + elem := s.schema(n) + s.list.Elts = append(s.list.Elts, &ast.Ellipsis{Type: elem}) + } + + default: + s.errf(n, `value of "additionalItems" must be an object or boolean`) + } + }), + + p1("contains", func(n cue.Value, s *state) { + s.usedTypes |= cue.ListKind + list := s.addImport(n, "list") + // TODO: Passing non-concrete values is not yet supported in CUE. + if x := s.schema(n); !isAny(x) { + x := ast.NewCall(ast.NewSel(list, "Contains"), clearPos(x)) + s.add(n, arrayType, x) + } + }), + + // TODO: min/maxContains + + p1("minItems", func(n cue.Value, s *state) { + s.usedTypes |= cue.ListKind + a := []ast.Expr{} + p, err := n.Uint64() + if err != nil { + s.errf(n, "invalid uint") + } + for ; p > 0; p-- { + a = append(a, ast.NewIdent("_")) + } + s.add(n, arrayType, ast.NewList(append(a, &ast.Ellipsis{})...)) + + // TODO: use this once constraint resolution is properly implemented. + // list := s.addImport(n, "list") + // s.addConjunct(n, ast.NewCall(ast.NewSel(list, "MinItems"), clearPos(s.uint(n)))) + }), + + p1("maxItems", func(n cue.Value, s *state) { + s.usedTypes |= cue.ListKind + list := s.addImport(n, "list") + x := ast.NewCall(ast.NewSel(list, "MaxItems"), clearPos(s.uint(n))) + s.add(n, arrayType, x) + + }), + + p1("uniqueItems", func(n cue.Value, s *state) { + s.usedTypes |= cue.ListKind + if s.boolValue(n) { + list := s.addImport(n, "list") + s.add(n, arrayType, ast.NewCall(ast.NewSel(list, "UniqueItems"))) + } + }), +} + +func clearPos(e ast.Expr) ast.Expr { + ast.SetRelPos(e, token.NoRelPos) + return e +} diff --git a/vendor/cuelang.org/go/encoding/jsonschema/decode.go b/vendor/cuelang.org/go/encoding/jsonschema/decode.go new file mode 100644 index 000000000..54f8fd6a7 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/jsonschema/decode.go @@ -0,0 +1,686 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +// TODO: +// - replace converter from YAML to CUE to CUE (schema) to CUE. +// - define OpenAPI definitions als CUE. + +import ( + "fmt" + "net/url" + "sort" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +// rootDefs defines the top-level name of the map of definitions that do not +// have a valid identifier name. +// +// TODO: find something more principled, like allowing #."a-b" or `#a-b`. +const rootDefs = "#" + +// A decoder converts JSON schema to CUE. +type decoder struct { + cfg *Config + errs errors.Error + numID int // for creating unique numbers: increment on each use +} + +// addImport registers +func (d *decoder) addImport(n cue.Value, pkg string) *ast.Ident { + spec := ast.NewImport(nil, pkg) + info, err := astutil.ParseImportSpec(spec) + if err != nil { + d.errf(cue.Value{}, "invalid import %q", pkg) + } + ident := ast.NewIdent(info.Ident) + ident.Node = spec + ast.SetPos(ident, n.Pos()) + + return ident +} + +func (d *decoder) decode(v cue.Value) *ast.File { + f := &ast.File{} + + if pkgName := d.cfg.PkgName; pkgName != "" { + pkg := &ast.Package{Name: ast.NewIdent(pkgName)} + f.Decls = append(f.Decls, pkg) + } + + var a []ast.Decl + + if d.cfg.Root == "" { + a = append(a, d.schema(nil, v)...) + } else { + ref := d.parseRef(token.NoPos, d.cfg.Root) + if ref == nil { + return f + } + i, err := v.Lookup(ref...).Fields() + if err != nil { + d.errs = errors.Append(d.errs, errors.Promote(err, "")) + return nil + } + for i.Next() { + ref := append(ref, i.Label()) + lab := d.mapRef(i.Value().Pos(), "", ref) + if len(lab) == 0 { + return nil + } + decls := d.schema(lab, i.Value()) + a = append(a, decls...) + } + } + + f.Decls = append(f.Decls, a...) + + _ = astutil.Sanitize(f) + + return f +} + +func (d *decoder) schema(ref []ast.Label, v cue.Value) (a []ast.Decl) { + root := state{decoder: d} + + var name ast.Label + inner := len(ref) - 1 + + if inner >= 0 { + name = ref[inner] + root.isSchema = true + } + + expr, state := root.schemaState(v, allTypes, nil, false) + + tags := []string{} + if state.jsonschema != "" { + tags = append(tags, fmt.Sprintf("schema=%q", state.jsonschema)) + } + + if name == nil { + if len(tags) > 0 { + body := strings.Join(tags, ",") + a = append(a, &ast.Attribute{ + Text: fmt.Sprintf("@jsonschema(%s)", body)}) + } + + if state.deprecated { + a = append(a, &ast.Attribute{Text: "@deprecated()"}) + } + } else { + if len(tags) > 0 { + a = append(a, addTag(name, "jsonschema", strings.Join(tags, ","))) + } + + if state.deprecated { + a = append(a, addTag(name, "deprecated", "")) + } + } + + if name != nil { + f := &ast.Field{ + Label: name, + Value: expr, + } + + a = append(a, f) + } else if st, ok := expr.(*ast.StructLit); ok { + a = append(a, st.Elts...) + } else { + a = append(a, &ast.EmbedDecl{Expr: expr}) + } + + state.doc(a[0]) + + for i := inner - 1; i >= 0; i-- { + a = []ast.Decl{&ast.Field{ + Label: ref[i], + Value: &ast.StructLit{Elts: a}, + }} + expr = ast.NewStruct(ref[i], expr) + } + + if root.hasSelfReference { + return []ast.Decl{ + &ast.EmbedDecl{Expr: ast.NewIdent(topSchema)}, + &ast.Field{ + Label: ast.NewIdent(topSchema), + Value: &ast.StructLit{Elts: a}, + }, + } + } + + return a +} + +func (d *decoder) errf(n cue.Value, format string, args ...interface{}) ast.Expr { + d.warnf(n.Pos(), format, args...) + return &ast.BadExpr{From: n.Pos()} +} + +func (d *decoder) warnf(p token.Pos, format string, args ...interface{}) { + d.addErr(errors.Newf(p, format, args...)) +} + +func (d *decoder) addErr(err errors.Error) { + d.errs = errors.Append(d.errs, err) +} + +func (d *decoder) number(n cue.Value) ast.Expr { + return n.Syntax(cue.Final()).(ast.Expr) +} + +func (d *decoder) uint(n cue.Value) ast.Expr { + _, err := n.Uint64() + if err != nil { + d.errf(n, "invalid uint") + } + return n.Syntax(cue.Final()).(ast.Expr) +} + +func (d *decoder) bool(n cue.Value) ast.Expr { + return n.Syntax(cue.Final()).(ast.Expr) +} + +func (d *decoder) boolValue(n cue.Value) bool { + x, err := n.Bool() + if err != nil { + d.errf(n, "invalid bool") + } + return x +} + +func (d *decoder) string(n cue.Value) ast.Expr { + return n.Syntax(cue.Final()).(ast.Expr) +} + +func (d *decoder) strValue(n cue.Value) (s string, ok bool) { + s, err := n.String() + if err != nil { + d.errf(n, "invalid string") + return "", false + } + return s, true +} + +// const draftCutoff = 5 + +type coreType int + +const ( + nullType coreType = iota + boolType + numType + stringType + arrayType + objectType + + numCoreTypes +) + +var coreToCUE = []cue.Kind{ + nullType: cue.NullKind, + boolType: cue.BoolKind, + numType: cue.FloatKind, + stringType: cue.StringKind, + arrayType: cue.ListKind, + objectType: cue.StructKind, +} + +func kindToAST(k cue.Kind) ast.Expr { + switch k { + case cue.NullKind: + // TODO: handle OpenAPI restrictions. + return ast.NewNull() + case cue.BoolKind: + return ast.NewIdent("bool") + case cue.FloatKind: + return ast.NewIdent("number") + case cue.StringKind: + return ast.NewIdent("string") + case cue.ListKind: + return ast.NewList(&ast.Ellipsis{}) + case cue.StructKind: + return ast.NewStruct(&ast.Ellipsis{}) + } + return nil +} + +var coreTypeName = []string{ + nullType: "null", + boolType: "bool", + numType: "number", + stringType: "string", + arrayType: "array", + objectType: "object", +} + +type constraintInfo struct { + // typ is an identifier for the root type, if present. + // This can be omitted if there are constraints. + typ ast.Expr + constraints []ast.Expr +} + +func (c *constraintInfo) setTypeUsed(n cue.Value, t coreType) { + c.typ = kindToAST(coreToCUE[t]) + setPos(c.typ, n) + ast.SetRelPos(c.typ, token.NoRelPos) +} + +func (c *constraintInfo) add(n cue.Value, x ast.Expr) { + if !isAny(x) { + setPos(x, n) + ast.SetRelPos(x, token.NoRelPos) + c.constraints = append(c.constraints, x) + } +} + +func (s *state) add(n cue.Value, t coreType, x ast.Expr) { + s.types[t].add(n, x) +} + +func (s *state) setTypeUsed(n cue.Value, t coreType) { + s.types[t].setTypeUsed(n, t) +} + +type state struct { + *decoder + + isSchema bool // for omitting ellipsis in an ast.File + + up *state + parent *state + + path []string + + // idRef is used to refer to this schema in case it defines an $id. + idRef []label + + pos cue.Value + + // The constraints in types represent disjunctions per type. + types [numCoreTypes]constraintInfo + all constraintInfo // values and oneOf etc. + nullable *ast.BasicLit // nullable + + usedTypes cue.Kind + allowedTypes cue.Kind + + default_ ast.Expr + examples []ast.Expr + title string + description string + deprecated bool + exclusiveMin bool // For OpenAPI and legacy support. + exclusiveMax bool // For OpenAPI and legacy support. + jsonschema string + id *url.URL // base URI for $ref + + definitions []ast.Decl + + // Used for inserting definitions, properties, etc. + hasSelfReference bool + obj *ast.StructLit + // Complete at finalize. + fieldRefs map[label]refs + + closeStruct bool + patterns []ast.Expr + + list *ast.ListLit +} + +type label struct { + name string + isDef bool +} + +type refs struct { + field *ast.Field + ident string + refs []*ast.Ident +} + +func (s *state) object(n cue.Value) *ast.StructLit { + if s.obj == nil { + s.obj = &ast.StructLit{} + s.add(n, objectType, s.obj) + } + return s.obj +} + +func (s *state) hasConstraints() bool { + if len(s.all.constraints) > 0 { + return true + } + for _, t := range s.types { + if len(t.constraints) > 0 { + return true + } + } + return len(s.patterns) > 0 || + s.title != "" || + s.description != "" || + s.obj != nil +} + +const allTypes = cue.NullKind | cue.BoolKind | cue.NumberKind | cue.IntKind | + cue.StringKind | cue.ListKind | cue.StructKind + +// finalize constructs a CUE type from the collected constraints. +func (s *state) finalize() (e ast.Expr) { + conjuncts := []ast.Expr{} + disjuncts := []ast.Expr{} + + types := s.allowedTypes &^ s.usedTypes + if types == allTypes { + disjuncts = append(disjuncts, ast.NewIdent("_")) + types = 0 + } + + // Sort literal structs and list last for nicer formatting. + sort.SliceStable(s.types[arrayType].constraints, func(i, j int) bool { + _, ok := s.types[arrayType].constraints[i].(*ast.ListLit) + return !ok + }) + sort.SliceStable(s.types[objectType].constraints, func(i, j int) bool { + _, ok := s.types[objectType].constraints[i].(*ast.StructLit) + return !ok + }) + + for i, t := range s.types { + k := coreToCUE[i] + isAllowed := s.allowedTypes&k != 0 + if len(t.constraints) > 0 { + if t.typ == nil && !isAllowed { + for _, c := range t.constraints { + s.addErr(errors.Newf(c.Pos(), + "constraint not allowed because type %s is excluded", + coreTypeName[i], + )) + } + continue + } + x := ast.NewBinExpr(token.AND, t.constraints...) + disjuncts = append(disjuncts, x) + } else if s.usedTypes&k != 0 { + continue + } else if t.typ != nil { + if !isAllowed { + s.addErr(errors.Newf(t.typ.Pos(), + "constraint not allowed because type %s is excluded", + coreTypeName[i], + )) + continue + } + disjuncts = append(disjuncts, t.typ) + } else if types&k != 0 { + x := kindToAST(k) + if x != nil { + disjuncts = append(disjuncts, x) + } + } + } + + conjuncts = append(conjuncts, s.all.constraints...) + + obj := s.obj + if obj == nil { + obj, _ = s.types[objectType].typ.(*ast.StructLit) + } + if obj != nil { + // TODO: may need to explicitly close. + if !s.closeStruct { + obj.Elts = append(obj.Elts, &ast.Ellipsis{}) + } + } + + if len(disjuncts) > 0 { + conjuncts = append(conjuncts, ast.NewBinExpr(token.OR, disjuncts...)) + } + + if len(conjuncts) == 0 { + e = &ast.BottomLit{} + } else { + e = ast.NewBinExpr(token.AND, conjuncts...) + } + + a := []ast.Expr{e} + if s.nullable != nil { + a = []ast.Expr{s.nullable, e} + } + +outer: + switch { + case s.default_ != nil: + // check conditions where default can be skipped. + switch x := s.default_.(type) { + case *ast.ListLit: + if s.usedTypes == cue.ListKind && len(x.Elts) == 0 { + break outer + } + } + a = append(a, &ast.UnaryExpr{Op: token.MUL, X: s.default_}) + } + + e = ast.NewBinExpr(token.OR, a...) + + if len(s.definitions) > 0 { + if st, ok := e.(*ast.StructLit); ok { + st.Elts = append(st.Elts, s.definitions...) + } else { + st = ast.NewStruct() + st.Elts = append(st.Elts, &ast.EmbedDecl{Expr: e}) + st.Elts = append(st.Elts, s.definitions...) + e = st + } + } + + s.linkReferences() + + return e +} + +func isAny(s ast.Expr) bool { + i, ok := s.(*ast.Ident) + return ok && i.Name == "_" +} + +func (s *state) comment() *ast.CommentGroup { + // Create documentation. + doc := strings.TrimSpace(s.title) + if s.description != "" { + if doc != "" { + doc += "\n\n" + } + doc += s.description + doc = strings.TrimSpace(doc) + } + // TODO: add examples as well? + if doc == "" { + return nil + } + return internal.NewComment(true, doc) +} + +func (s *state) doc(n ast.Node) { + doc := s.comment() + if doc != nil { + ast.SetComments(n, []*ast.CommentGroup{doc}) + } +} + +func (s *state) schema(n cue.Value, idRef ...label) ast.Expr { + expr, _ := s.schemaState(n, allTypes, idRef, false) + // TODO: report unused doc. + return expr +} + +// schemaState is a low-level API for schema. isLogical specifies whether the +// caller is a logical operator like anyOf, allOf, oneOf, or not. +func (s *state) schemaState(n cue.Value, types cue.Kind, idRef []label, isLogical bool) (ast.Expr, *state) { + state := &state{ + up: s, + isSchema: s.isSchema, + decoder: s.decoder, + allowedTypes: types, + path: s.path, + idRef: idRef, + pos: n, + } + if isLogical { + state.parent = s + } + + if n.Kind() != cue.StructKind { + return s.errf(n, "schema expects mapping node, found %s", n.Kind()), state + } + + // do multiple passes over the constraints to ensure they are done in order. + for pass := 0; pass < 4; pass++ { + state.processMap(n, func(key string, value cue.Value) { + // Convert each constraint into a either a value or a functor. + c := constraintMap[key] + if c == nil { + if pass == 0 && s.cfg.Strict { + // TODO: value is not the correct position, albeit close. Fix this. + s.warnf(value.Pos(), "unsupported constraint %q", key) + } + return + } + if c.phase == pass { + c.fn(value, state) + } + }) + } + + return state.finalize(), state +} + +func (s *state) value(n cue.Value) ast.Expr { + k := n.Kind() + s.usedTypes |= k + s.allowedTypes &= k + switch k { + case cue.ListKind: + a := []ast.Expr{} + for i, _ := n.List(); i.Next(); { + a = append(a, s.value(i.Value())) + } + return setPos(ast.NewList(a...), n) + + case cue.StructKind: + a := []ast.Decl{} + s.processMap(n, func(key string, n cue.Value) { + a = append(a, &ast.Field{ + Label: ast.NewString(key), + Value: s.value(n), + }) + }) + // TODO: only open when s.isSchema? + a = append(a, &ast.Ellipsis{}) + return setPos(&ast.StructLit{Elts: a}, n) + + default: + if !n.IsConcrete() { + s.errf(n, "invalid non-concrete value") + } + return n.Syntax(cue.Final()).(ast.Expr) + } +} + +// processMap processes a yaml node, expanding merges. +// +// TODO: in some cases we can translate merges into CUE embeddings. +// This may also prevent exponential blow-up (as may happen when +// converting YAML to JSON). +func (s *state) processMap(n cue.Value, f func(key string, n cue.Value)) { + saved := s.path + defer func() { s.path = saved }() + + // TODO: intercept references to allow for optimized performance. + for i, _ := n.Fields(); i.Next(); { + key := i.Label() + s.path = append(saved, key) + f(key, i.Value()) + } +} + +func (s *state) listItems(name string, n cue.Value, allowEmpty bool) (a []cue.Value) { + if n.Kind() != cue.ListKind { + s.errf(n, `value of %q must be an array, found %v`, name, n.Kind()) + } + for i, _ := n.List(); i.Next(); { + a = append(a, i.Value()) + } + if !allowEmpty && len(a) == 0 { + s.errf(n, `array for %q must be non-empty`, name) + } + return a +} + +// excludeFields returns a CUE expression that can be used to exclude the +// fields of the given declaration in a label expression. For instance, for +// +// { foo: 1, bar: int } +// +// it creates +// +// "^(foo|bar)$" +// +// which can be used in a label expression to define types for all fields but +// those existing: +// +// [!~"^(foo|bar)$"]: string +// +func excludeFields(decls []ast.Decl) ast.Expr { + var a []string + for _, d := range decls { + f, ok := d.(*ast.Field) + if !ok { + continue + } + str, _, _ := ast.LabelName(f.Label) + if str != "" { + a = append(a, str) + } + } + re := fmt.Sprintf("^(%s)$", strings.Join(a, "|")) + return &ast.UnaryExpr{Op: token.NMAT, X: ast.NewString(re)} +} + +func addTag(field ast.Label, tag, value string) *ast.Field { + return &ast.Field{ + Label: field, + Value: ast.NewIdent("_"), + Attrs: []*ast.Attribute{ + {Text: fmt.Sprintf("@%s(%s)", tag, value)}, + }, + } +} + +func setPos(e ast.Expr, v cue.Value) ast.Expr { + ast.SetPos(e, v.Pos()) + return e +} diff --git a/vendor/cuelang.org/go/encoding/jsonschema/doc.go b/vendor/cuelang.org/go/encoding/jsonschema/doc.go new file mode 100644 index 000000000..c0b813ce2 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/jsonschema/doc.go @@ -0,0 +1,19 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package jsonschema converts JSON Schema to CUE +// +// JSON Schema data is presented in CUE, so any of the supported encodings +// that can represent JSON Schema data can be used as a source. +package jsonschema diff --git a/vendor/cuelang.org/go/encoding/jsonschema/jsonschema.go b/vendor/cuelang.org/go/encoding/jsonschema/jsonschema.go new file mode 100644 index 000000000..9e81b7355 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/jsonschema/jsonschema.go @@ -0,0 +1,88 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package jsonschema implements the JSON schema standard. +// +// Mapping and Linking +// +// JSON Schema are often defined in a single file. CUE, on the other hand +// idiomatically defines schema as a definition. +// +// CUE: +// $schema: which schema is used for validation. +// $id: which validation does this schema provide. +// +// Foo: _ @jsonschema(sc) +// @source(https://...) // What schema is used to validate. +// +// NOTE: JSON Schema is a draft standard and may undergo backwards incompatible +// changes. +package jsonschema + +import ( + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +// Extract converts JSON Schema data into an equivalent CUE representation. +// +// The generated CUE schema is guaranteed to deem valid any value that is +// a valid instance of the source JSON schema. +func Extract(data *cue.Instance, cfg *Config) (f *ast.File, err error) { + d := &decoder{cfg: cfg} + + f = d.decode(data.Value()) + if d.errs != nil { + return nil, d.errs + } + return f, nil +} + +// A Config configures a JSON Schema encoding or decoding. +type Config struct { + PkgName string + + // ID sets the URL of the original source, corresponding to the $id field. + ID string + + // JSON reference of location containing schema. The empty string indicates + // that there is a single schema at the root. + // + // Examples: + // "#/" top-level fields are schemas. + // "#/components/schemas" the canonical OpenAPI location. + Root string + + // Map maps the locations of schemas and definitions to a new location. + // References are updated accordingly. A returned label must be + // an identifier or string literal. + // + // The default mapping is + // {} {} + // {"definitions", foo} {#foo} or {#, foo} + // {"$defs", foo} {#foo} or {#, foo} + Map func(pos token.Pos, path []string) ([]ast.Label, error) + + // TODO: configurability to make it compatible with OpenAPI, such as + // - locations of definitions: #/components/schemas, for instance. + // - selection and definition of formats + // - documentation hooks. + + // Strict reports an error for unsupported features, rather than ignoring + // them. + Strict bool + + _ struct{} // prohibit casting from different type. +} diff --git a/vendor/cuelang.org/go/encoding/jsonschema/ref.go b/vendor/cuelang.org/go/encoding/jsonschema/ref.go new file mode 100644 index 000000000..26443fe1c --- /dev/null +++ b/vendor/cuelang.org/go/encoding/jsonschema/ref.go @@ -0,0 +1,442 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "net/url" + "path" + "strconv" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +func (d *decoder) parseRef(p token.Pos, str string) []string { + u, err := url.Parse(str) + if err != nil { + d.addErr(errors.Newf(p, "invalid JSON reference: %s", err)) + return nil + } + + if u.Host != "" || u.Path != "" { + d.addErr(errors.Newf(p, "external references (%s) not supported", str)) + // TODO: handle + // host: + // If the host corresponds to a package known to cue, + // load it from there. It would prefer schema converted to + // CUE, although we could consider loading raw JSON schema + // if present. + // If not present, advise the user to run cue get. + // path: + // Look up on file system or relatively to authority location. + return nil + } + + if !path.IsAbs(u.Fragment) { + d.addErr(errors.Newf(p, "anchors (%s) not supported", u.Fragment)) + // TODO: support anchors + return nil + } + + // NOTE: Go bug?: url.URL has no raw representation of the fragment. This + // means that %2F gets translated to `/` before it can be split. This, in + // turn, means that field names cannot have a `/` as name. + + return splitFragment(u) +} + +// resolveURI parses a URI from n and resolves it in the current context. +// To resolve it in the current context, it looks for the closest URI from +// an $id in the parent scopes and the uses the URI resolution to get the +// new URI. +// +// This method is used to resolve any URI, including those from $id and $ref. +func (s *state) resolveURI(n cue.Value) *url.URL { + str, ok := s.strValue(n) + if !ok { + return nil + } + + u, err := url.Parse(str) + if err != nil { + s.addErr(errors.Newf(n.Pos(), "invalid JSON reference: %s", err)) + return nil + } + + for { + if s.id != nil { + u = s.id.ResolveReference(u) + break + } + if s.up == nil { + break + } + s = s.up + } + + return u +} + +const topSchema = "_schema" + +// makeCUERef converts a URI into a CUE reference for the current location. +// The returned identifier (or first expression in a selection chain), is +// hardwired to point to the resolved value. This will allow astutil.Sanitize +// to automatically unshadow any shadowed variables. +func (s *state) makeCUERef(n cue.Value, u *url.URL) ast.Expr { + a := splitFragment(u) + + switch fn := s.cfg.Map; { + case fn != nil: + // TODO: This block is only used in case s.cfg.Map is set, which is + // currently only used for OpenAPI. Handling should be brought more in + // line with JSON schema. + a, err := fn(n.Pos(), a) + if err != nil { + s.addErr(errors.Newf(n.Pos(), "invalid reference %q: %v", u, err)) + return nil + } + if len(a) == 0 { + // TODO: should we allow inserting at root level? + s.addErr(errors.Newf(n.Pos(), + "invalid empty reference returned by map for %q", u)) + return nil + } + sel, ok := a[0].(ast.Expr) + if !ok { + sel = &ast.BadExpr{} + } + for _, l := range a[1:] { + switch x := l.(type) { + case *ast.Ident: + sel = &ast.SelectorExpr{X: sel, Sel: x} + + case *ast.BasicLit: + sel = &ast.IndexExpr{X: sel, Index: x} + } + } + return sel + } + + var ident *ast.Ident + + for ; ; s = s.up { + if s.up == nil { + switch { + case u.Host == "" && u.Path == "", + s.id != nil && s.id.Host == u.Host && s.id.Path == u.Path: + if len(a) == 0 { + // refers to the top of the file. We will allow this by + // creating a helper schema as such: + // _schema: {...} + // _schema + // This is created at the finalization stage if + // hasSelfReference is set. + s.hasSelfReference = true + + ident = ast.NewIdent(topSchema) + ident.Node = s.obj + return ident + } + + ident, a = s.getNextIdent(n, a) + + case u.Host != "": + // Reference not found within scope. Create an import reference. + + // TODO: allow the configuration to specify a map from + // URI domain+paths to CUE packages. + + // TODO: currently only $ids that are in scope can be + // referenced. We could consider doing an extra pass to record + // all '$id's in a file to be able to link to them even if they + // are not in scope. + p := u.Path + + base := path.Base(p) + if !ast.IsValidIdent(base) { + if strings.HasSuffix(base, ".json") { + base = base[:len(base)-len(".json")] + } + if !ast.IsValidIdent(base) { + // Find something more clever to do there. For now just + // pick "schema" as the package name. + base = "schema" + } + p += ":" + base + } + + ident = ast.NewIdent(base) + ident.Node = &ast.ImportSpec{Path: ast.NewString(u.Host + p)} + + default: + // Just a path, not sure what that means. + s.errf(n, "unknown domain for reference %q", u) + return nil + } + break + } + + if s.id == nil { + continue + } + + if s.id.Host == u.Host && s.id.Path == u.Path { + if len(a) == 0 { + if len(s.idRef) == 0 { + // This is a reference to either root or a schema for which + // we do not yet support references. See Issue #386. + if s.up.up != nil { + s.errf(n, "cannot refer to internal schema %q", u) + return nil + } + + // This is referring to the root scope. There is a dummy + // state above the root state that we need to update. + s = s.up + + // refers to the top of the file. We will allow this by + // creating a helper schema as such: + // _schema: {...} + // _schema + // This is created at the finalization stage if + // hasSelfReference is set. + s.hasSelfReference = true + ident = ast.NewIdent(topSchema) + ident.Node = s.obj + return ident + } + + x := s.idRef[0] + if !x.isDef && !ast.IsValidIdent(x.name) { + s.errf(n, "referring to field %q not supported", x.name) + return nil + } + e := ast.NewIdent(x.name) + if len(s.idRef) == 1 { + return e + } + return newSel(e, s.idRef[1]) + } + ident, a = s.getNextIdent(n, a) + ident.Node = s.obj + break + } + } + + return s.newSel(ident, n, a) +} + +// getNextSelector translates a JSON Reference path into a CUE path by consuming +// the first path elements and returning the corresponding CUE label. +func (s *state) getNextSelector(v cue.Value, a []string) (l label, tail []string) { + switch elem := a[0]; elem { + case "$defs", "definitions": + if len(a) == 1 { + s.errf(v, "cannot refer to %s section: must refer to one of its elements", a[0]) + return label{}, nil + } + + if name := "#" + a[1]; ast.IsValidIdent(name) { + return label{name, true}, a[2:] + } + + return label{"#", true}, a[1:] + + case "properties": + if len(a) == 1 { + s.errf(v, "cannot refer to %s section: must refer to one of its elements", a[0]) + return label{}, nil + } + + return label{a[1], false}, a[2:] + + default: + return label{elem, false}, a[1:] + + case "additionalProperties", + "patternProperties", + "items", + "additionalItems": + // TODO: as a temporary workaround, include the schema verbatim. + // TODO: provide definitions for these in CUE. + s.errf(v, "referring to field %q not yet supported", elem) + + // Other known fields cannot be supported. + return label{}, nil + } +} + +// newSel converts a JSON Reference path and initial CUE identifier to +// a CUE selection path. +func (s *state) newSel(e ast.Expr, v cue.Value, a []string) ast.Expr { + for len(a) > 0 { + var label label + label, a = s.getNextSelector(v, a) + e = newSel(e, label) + } + return e +} + +// newSel converts label to a CUE index and creates an expression to index +// into e. +func newSel(e ast.Expr, label label) ast.Expr { + if label.isDef { + return ast.NewSel(e, label.name) + + } + if ast.IsValidIdent(label.name) && !internal.IsDefOrHidden(label.name) { + return ast.NewSel(e, label.name) + } + return &ast.IndexExpr{X: e, Index: ast.NewString(label.name)} +} + +func (s *state) setField(lab label, f *ast.Field) { + x := s.getRef(lab) + x.field = f + s.setRef(lab, x) + x = s.getRef(lab) +} + +func (s *state) getRef(lab label) refs { + if s.fieldRefs == nil { + s.fieldRefs = make(map[label]refs) + } + x, ok := s.fieldRefs[lab] + if !ok { + if lab.isDef || + (ast.IsValidIdent(lab.name) && !internal.IsDefOrHidden(lab.name)) { + x.ident = lab.name + } else { + x.ident = "_X" + strconv.Itoa(s.decoder.numID) + s.decoder.numID++ + } + s.fieldRefs[lab] = x + } + return x +} + +func (s *state) setRef(lab label, r refs) { + s.fieldRefs[lab] = r +} + +// getNextIdent gets the first CUE reference from a JSON Reference path and +// converts it to a CUE identifier. +func (s *state) getNextIdent(v cue.Value, a []string) (resolved *ast.Ident, tail []string) { + lab, a := s.getNextSelector(v, a) + + x := s.getRef(lab) + ident := ast.NewIdent(x.ident) + x.refs = append(x.refs, ident) + s.setRef(lab, x) + + return ident, a +} + +// linkReferences resolves identifiers to relevant nodes. This allows +// astutil.Sanitize to unshadow nodes if necessary. +func (s *state) linkReferences() { + for _, r := range s.fieldRefs { + if r.field == nil { + // TODO: improve error message. + s.errf(cue.Value{}, "reference to non-existing value %q", r.ident) + continue + } + + // link resembles the link value. See astutil.Resolve. + var link ast.Node + + ident, ok := r.field.Label.(*ast.Ident) + if ok && ident.Name == r.ident { + link = r.field.Value + } else if len(r.refs) > 0 { + r.field.Label = &ast.Alias{ + Ident: ast.NewIdent(r.ident), + Expr: r.field.Label.(ast.Expr), + } + link = r.field + } + + for _, i := range r.refs { + i.Node = link + } + } +} + +// splitFragment splits the fragment part of a URI into path components. The +// result may be an empty slice. +// +// TODO: this requires RawFragment introduced in go1.15 to function properly. +// As for now, CUE still uses go1.12. +func splitFragment(u *url.URL) []string { + if u.Fragment == "" { + return nil + } + s := strings.TrimRight(u.Fragment[1:], "/") + if s == "" { + return nil + } + return strings.Split(s, "/") +} + +func (d *decoder) mapRef(p token.Pos, str string, ref []string) []ast.Label { + fn := d.cfg.Map + if fn == nil { + fn = jsonSchemaRef + } + a, err := fn(p, ref) + if err != nil { + if str == "" { + str = "#/" + strings.Join(ref, "/") + } + d.addErr(errors.Newf(p, "invalid reference %q: %v", str, err)) + return nil + } + if len(a) == 0 { + // TODO: should we allow inserting at root level? + if str == "" { + str = "#/" + strings.Join(ref, "/") + } + d.addErr(errors.Newf(p, + "invalid empty reference returned by map for %q", str)) + return nil + } + return a +} + +func jsonSchemaRef(p token.Pos, a []string) ([]ast.Label, error) { + // TODO: technically, references could reference a + // non-definition. We disallow this case for the standard + // JSON Schema interpretation. We could detect cases that + // are not definitions and then resolve those as literal + // values. + if len(a) != 2 || (a[0] != "definitions" && a[0] != "$defs") { + return nil, errors.Newf(p, + // Don't mention the ability to use $defs, as this definition seems + // to already have been withdrawn from the JSON Schema spec. + "$ref must be of the form #/definitions/...") + } + name := a[1] + if ast.IsValidIdent(name) && + name != rootDefs[1:] && + !internal.IsDefOrHidden(name) { + return []ast.Label{ast.NewIdent("#" + name)}, nil + } + return []ast.Label{ast.NewIdent(rootDefs), ast.NewString(name)}, nil +} diff --git a/vendor/cuelang.org/go/encoding/openapi/build.go b/vendor/cuelang.org/go/encoding/openapi/build.go new file mode 100644 index 000000000..d9d186530 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/build.go @@ -0,0 +1,1293 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "fmt" + "math" + "path" + "regexp" + "sort" + "strconv" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +type buildContext struct { + inst *cue.Instance + instExt *cue.Instance + refPrefix string + path []string + + expandRefs bool + structural bool + exclusiveBool bool + nameFunc func(inst *cue.Instance, path []string) string + descFunc func(v cue.Value) string + fieldFilter *regexp.Regexp + evalDepth int // detect cycles when resolving references + + schemas *OrderedMap + + // Track external schemas. + externalRefs map[string]*externalType +} + +type externalType struct { + ref string + inst *cue.Instance + path []string + value cue.Value +} + +type oaSchema = OrderedMap + +type typeFunc func(b *builder, a cue.Value) + +func schemas(g *Generator, inst *cue.Instance) (schemas *ast.StructLit, err error) { + var fieldFilter *regexp.Regexp + if g.FieldFilter != "" { + fieldFilter, err = regexp.Compile(g.FieldFilter) + if err != nil { + return nil, errors.Newf(token.NoPos, "invalid field filter: %v", err) + } + + // verify that certain elements are still passed. + for _, f := range strings.Split( + "version,title,allOf,anyOf,not,enum,Schema/properties,Schema/items"+ + "nullable,type", ",") { + if fieldFilter.MatchString(f) { + return nil, errors.Newf(token.NoPos, "field filter may not exclude %q", f) + } + } + } + + if g.Version == "" { + g.Version = "3.0.0" + } + + c := buildContext{ + inst: inst, + instExt: inst, + refPrefix: "components/schemas", + expandRefs: g.ExpandReferences, + structural: g.ExpandReferences, + nameFunc: g.ReferenceFunc, + descFunc: g.DescriptionFunc, + schemas: &OrderedMap{}, + externalRefs: map[string]*externalType{}, + fieldFilter: fieldFilter, + } + + switch g.Version { + case "3.0.0": + c.exclusiveBool = true + case "3.1.0": + default: + return nil, errors.Newf(token.NoPos, "unsupported version %s", g.Version) + } + + defer func() { + switch x := recover().(type) { + case nil: + case *openapiError: + err = x + default: + panic(x) + } + }() + + // Although paths is empty for now, it makes it valid OpenAPI spec. + + i, err := inst.Value().Fields(cue.Definitions(true)) + if err != nil { + return nil, err + } + for i.Next() { + if !i.IsDefinition() { + continue + } + // message, enum, or constant. + label := i.Label() + if c.isInternal(label) { + continue + } + if i.IsDefinition() && strings.HasPrefix(label, "#") { + label = label[1:] + } + ref := c.makeRef(inst, []string{label}) + if ref == "" { + continue + } + c.schemas.Set(ref, c.build(label, i.Value())) + } + + // keep looping until a fixed point is reached. + for done := 0; len(c.externalRefs) != done; { + done = len(c.externalRefs) + + // From now on, all references need to be expanded + external := []string{} + for k := range c.externalRefs { + external = append(external, k) + } + sort.Strings(external) + + for _, k := range external { + ext := c.externalRefs[k] + c.instExt = ext.inst + last := len(ext.path) - 1 + c.path = ext.path[:last] + name := ext.path[last] + c.schemas.Set(ext.ref, c.build(name, cue.Dereference(ext.value))) + } + } + + a := c.schemas.Elts + sort.Slice(a, func(i, j int) bool { + x, _, _ := ast.LabelName(a[i].(*ast.Field).Label) + y, _, _ := ast.LabelName(a[j].(*ast.Field).Label) + return x < y + }) + + return (*ast.StructLit)(c.schemas), nil +} + +func (c *buildContext) build(name string, v cue.Value) *ast.StructLit { + return newCoreBuilder(c).schema(nil, name, v) +} + +// isInternal reports whether or not to include this type. +func (c *buildContext) isInternal(name string) bool { + // TODO: allow a regexp filter in Config. If we have closed structs and + // definitions, this will likely be unnecessary. + return strings.HasSuffix(name, "_value") +} + +func (b *builder) failf(v cue.Value, format string, args ...interface{}) { + panic(&openapiError{ + errors.NewMessage(format, args), + b.ctx.path, + v.Pos(), + }) +} + +func (b *builder) unsupported(v cue.Value) { + if b.format == "" { + // Not strictly an error, but consider listing it as a warning + // in strict mode. + } +} + +func (b *builder) checkArgs(a []cue.Value, n int) { + if len(a)-1 != n { + b.failf(a[0], "%v must be used with %d arguments", a[0], len(a)-1) + } +} + +func (b *builder) schema(core *builder, name string, v cue.Value) *ast.StructLit { + oldPath := b.ctx.path + b.ctx.path = append(b.ctx.path, name) + defer func() { b.ctx.path = oldPath }() + + var c *builder + if core == nil && b.ctx.structural { + c = newCoreBuilder(b.ctx) + c.buildCore(v) // initialize core structure + c.coreSchema() + } else { + c = newRootBuilder(b.ctx) + c.core = core + } + + return c.fillSchema(v) +} + +func (b *builder) getDoc(v cue.Value) { + doc := []string{} + if b.ctx.descFunc != nil { + if str := b.ctx.descFunc(v); str != "" { + doc = append(doc, str) + } + } else { + for _, d := range v.Doc() { + doc = append(doc, d.Text()) + } + } + if len(doc) > 0 { + str := strings.TrimSpace(strings.Join(doc, "\n\n")) + b.setSingle("description", ast.NewString(str), true) + } +} + +func (b *builder) fillSchema(v cue.Value) *ast.StructLit { + if b.filled != nil { + return b.filled + } + + b.setValueType(v) + b.format = extractFormat(v) + b.deprecated = getDeprecated(v) + + if b.core == nil || len(b.core.values) > 1 { + isRef := b.value(v, nil) + if isRef { + b.typ = "" + } + + if !isRef && !b.ctx.structural { + b.getDoc(v) + } + } + + schema := b.finish() + s := (*ast.StructLit)(schema) + + simplify(b, s) + + sortSchema(s) + + b.filled = s + return s +} + +func label(d ast.Decl) string { + f := d.(*ast.Field) + s, _, _ := ast.LabelName(f.Label) + return s +} + +func value(d ast.Decl) ast.Expr { + return d.(*ast.Field).Value +} + +func sortSchema(s *ast.StructLit) { + sort.Slice(s.Elts, func(i, j int) bool { + iName := label(s.Elts[i]) + jName := label(s.Elts[j]) + pi := fieldOrder[iName] + pj := fieldOrder[jName] + if pi != pj { + return pi > pj + } + return iName < jName + }) +} + +var fieldOrder = map[string]int{ + "description": 31, + "type": 30, + "format": 29, + "required": 28, + "properties": 27, + "minProperties": 26, + "maxProperties": 25, + "minimum": 24, + "exclusiveMinimum": 23, + "maximum": 22, + "exclusiveMaximum": 21, + "minItems": 18, + "maxItems": 17, + "minLength": 16, + "maxLength": 15, + "items": 14, + "enum": 13, + "default": 12, +} + +func (b *builder) value(v cue.Value, f typeFunc) (isRef bool) { + count := 0 + disallowDefault := false + var values cue.Value + if b.ctx.expandRefs || b.format != "" { + values = cue.Dereference(v) + count = 1 + } else { + dedup := map[string]bool{} + hasNoRef := false + accept := v + conjuncts := appendSplit(nil, cue.AndOp, v) + for _, v := range conjuncts { + // This may be a reference to an enum. So we need to check references before + // dissecting them. + switch p, r := v.Reference(); { + case len(r) > 0: + ref := b.ctx.makeRef(p, r) + if ref == "" { + v = cue.Dereference(v) + break + } + if dedup[ref] { + continue + } + dedup[ref] = true + + b.addRef(v, p, r) + disallowDefault = true + continue + } + hasNoRef = true + count++ + values = values.UnifyAccept(v, accept) + } + isRef = !hasNoRef && len(dedup) == 1 + } + + if count > 0 { // TODO: implement IsAny. + // TODO: perhaps find optimal representation. For now we assume the + // representation as is is already optimized for human consumption. + if values.IncompleteKind()&cue.StructKind != cue.StructKind && !isRef { + values = values.Eval() + } + + conjuncts := appendSplit(nil, cue.AndOp, values) + for i, v := range conjuncts { + switch { + case isConcrete(v): + b.dispatch(f, v) + if !b.isNonCore() { + b.set("enum", ast.NewList(b.decode(v))) + } + default: + a := appendSplit(nil, cue.OrOp, v) + for i, v := range a { + if _, r := v.Reference(); len(r) == 0 { + a[i] = v.Eval() + } + } + + _ = i + // TODO: it matters here whether a conjunct is obtained + // from embedding or normal unification. Fix this at some + // point. + // + // if len(a) > 1 { + // // Filter disjuncts that cannot unify with other conjuncts, + // // and thus can never be satisfied. + // // TODO: there should be generalized simplification logic + // // in CUE (outside of the usual implicit simplifications). + // k := 0 + // outer: + // for _, d := range a { + // for j, w := range conjuncts { + // if i == j { + // continue + // } + // if d.Unify(w).Err() != nil { + // continue outer + // } + // } + // a[k] = d + // k++ + // } + // a = a[:k] + // } + switch len(a) { + case 0: + // Conjunct entirely eliminated. + case 1: + v = a[0] + if err := v.Err(); err != nil { + b.failf(v, "openapi: %v", err) + return + } + b.dispatch(f, v) + default: + b.disjunction(a, f) + } + } + } + } + + if v, ok := v.Default(); ok && v.IsConcrete() && !disallowDefault { + // TODO: should we show the empty list default? This would be correct + // but perhaps a bit too pedantic and noisy. + switch { + case v.Kind() == cue.ListKind: + iter, _ := v.List() + if !iter.Next() { + // Don't show default for empty list. + break + } + fallthrough + default: + if !b.isNonCore() { + e := v.Syntax(cue.Concrete(true)).(ast.Expr) + b.setFilter("Schema", "default", e) + } + } + } + return isRef +} + +func appendSplit(a []cue.Value, splitBy cue.Op, v cue.Value) []cue.Value { + op, args := v.Expr() + if op == cue.NoOp && len(args) == 1 { + // TODO: this is to deal with default value removal. This may change + // whe we completely separate default values from values. + a = append(a, args...) + } else if op != splitBy { + a = append(a, v) + } else { + for _, v := range args { + a = appendSplit(a, splitBy, v) + } + } + return a +} + +func countNodes(v cue.Value) (n int) { + switch op, a := v.Expr(); op { + case cue.OrOp, cue.AndOp: + for _, v := range a { + n += countNodes(v) + } + n += len(a) - 1 + default: + switch v.Kind() { + case cue.ListKind: + for i, _ := v.List(); i.Next(); { + n += countNodes(i.Value()) + } + case cue.StructKind: + for i, _ := v.Fields(); i.Next(); { + n += countNodes(i.Value()) + 1 + } + } + } + return n + 1 +} + +// isConcrete reports whether v is concrete and not a struct (recursively). +// structs are not supported as the result of a struct enum depends on how +// conjunctions and disjunctions are distributed. We could consider still doing +// this if we define a normal form. +func isConcrete(v cue.Value) bool { + if !v.IsConcrete() { + return false + } + if v.Kind() == cue.StructKind { + return false // TODO: handle struct kinds + } + for list, _ := v.List(); list.Next(); { + if !isConcrete(list.Value()) { + return false + } + } + return true +} + +func (b *builder) disjunction(a []cue.Value, f typeFunc) { + disjuncts := []cue.Value{} + enums := []ast.Expr{} // TODO: unique the enums + nullable := false // Only supported in OpenAPI, not JSON schema + + for _, v := range a { + switch { + case v.Null() == nil: + // TODO: for JSON schema, we need to fall through. + nullable = true + + case isConcrete(v): + enums = append(enums, b.decode(v)) + + default: + disjuncts = append(disjuncts, v) + } + } + + // Only one conjunct? + if len(disjuncts) == 0 || (len(disjuncts) == 1 && len(enums) == 0) { + if len(disjuncts) == 1 { + b.value(disjuncts[0], f) + } + if len(enums) > 0 && !b.isNonCore() { + b.set("enum", ast.NewList(enums...)) + } + if nullable { + b.setSingle("nullable", ast.NewBool(true), true) // allowed in Structural + } + return + } + + anyOf := []ast.Expr{} + if len(enums) > 0 { + anyOf = append(anyOf, b.kv("enum", ast.NewList(enums...))) + } + + if nullable { + b.setSingle("nullable", ast.NewBool(true), true) + } + + schemas := make([]*ast.StructLit, len(disjuncts)) + for i, v := range disjuncts { + c := newOASBuilder(b) + c.value(v, f) + t := c.finish() + schemas[i] = (*ast.StructLit)(t) + if len(t.Elts) == 0 { + if c.typ == "" { + return + } + } + } + + for i, v := range disjuncts { + // In OpenAPI schema are open by default. To ensure forward compatibility, + // we do not represent closed structs with additionalProperties: false + // (this is discouraged and often disallowed by implementions), but + // rather enforce this by ensuring uniqueness of the disjuncts. + // + // TODO: subsumption may currently give false negatives. We are extra + // conservative in these instances. + subsumed := []ast.Expr{} + for j, w := range disjuncts { + if i == j { + continue + } + err := v.Subsume(w, cue.Schema()) + if err == nil || errors.Is(err, internal.ErrInexact) { + subsumed = append(subsumed, schemas[j]) + } + } + + t := schemas[i] + if len(subsumed) > 0 { + // TODO: elide anyOf if there is only one element. This should be + // rare if originating from oneOf. + exclude := ast.NewStruct("not", + ast.NewStruct("anyOf", ast.NewList(subsumed...))) + if len(t.Elts) == 0 { + t = exclude + } else { + t = ast.NewStruct("allOf", ast.NewList(t, exclude)) + } + } + anyOf = append(anyOf, t) + } + + b.set("oneOf", ast.NewList(anyOf...)) +} + +func (b *builder) setValueType(v cue.Value) { + if b.core != nil { + return + } + + switch v.IncompleteKind() { + case cue.BoolKind: + b.typ = "boolean" + case cue.FloatKind, cue.NumberKind: + b.typ = "number" + case cue.IntKind: + b.typ = "integer" + case cue.BytesKind: + b.typ = "string" + case cue.StringKind: + b.typ = "string" + case cue.StructKind: + b.typ = "object" + case cue.ListKind: + b.typ = "array" + } +} + +func (b *builder) dispatch(f typeFunc, v cue.Value) { + if f != nil { + f(b, v) + return + } + + switch v.IncompleteKind() { + case cue.NullKind: + // TODO: for JSON schema we would set the type here. For OpenAPI, + // it must be nullable. + b.setSingle("nullable", ast.NewBool(true), true) + + case cue.BoolKind: + b.setType("boolean", "") + // No need to call. + + case cue.FloatKind, cue.NumberKind: + // TODO: + // Common Name type format Comments + // float number float + // double number double + b.setType("number", "") // may be overridden to integer + b.number(v) + + case cue.IntKind: + // integer integer int32 signed 32 bits + // long integer int64 signed 64 bits + b.setType("integer", "") // may be overridden to integer + b.number(v) + + // TODO: for JSON schema, consider adding multipleOf: 1. + + case cue.BytesKind: + // byte string byte base64 encoded characters + // binary string binary any sequence of octets + b.setType("string", "byte") + b.bytes(v) + case cue.StringKind: + // date string date As defined by full-date - RFC3339 + // dateTime string date-time As defined by date-time - RFC3339 + // password string password A hint to UIs to obscure input + b.setType("string", "") + b.string(v) + case cue.StructKind: + b.setType("object", "") + b.object(v) + case cue.ListKind: + b.setType("array", "") + b.array(v) + } +} + +// object supports the following +// - maxProperties: maximum allowed fields in this struct. +// - minProperties: minimum required fields in this struct. +// - patternProperties: [regexp]: schema +// TODO: we can support this once .kv(key, value) allow +// foo [=~"pattern"]: type +// An instance field must match all schemas for which a regexp matches. +// Even though it is not supported in OpenAPI, we should still accept it +// when receiving from OpenAPI. We could possibly use disjunctions to encode +// this. +// - dependencies: what? +// - propertyNames: schema +// every property name in the enclosed schema matches that of +func (b *builder) object(v cue.Value) { + // TODO: discriminator objects: we could theoretically derive discriminator + // objects automatically: for every object in a oneOf/allOf/anyOf, or any + // object composed of the same type, if a property is required and set to a + // constant value for each type, it is a discriminator. + + switch op, a := v.Expr(); op { + case cue.CallOp: + name := fmt.Sprint(a[0]) + switch name { + case "struct.MinFields": + b.checkArgs(a, 1) + b.setFilter("Schema", "minProperties", b.int(a[1])) + return + + case "struct.MaxFields": + b.checkArgs(a, 1) + b.setFilter("Schema", "maxProperties", b.int(a[1])) + return + + default: + b.unsupported(a[0]) + return + } + + case cue.NoOp: + // TODO: extract format from specific type. + + default: + b.failf(v, "unsupported op %v for object type (%v)", op, v) + return + } + + required := []ast.Expr{} + for i, _ := v.Fields(); i.Next(); { + required = append(required, ast.NewString(i.Label())) + } + if len(required) > 0 { + b.setFilter("Schema", "required", ast.NewList(required...)) + } + + var properties *OrderedMap + if b.singleFields != nil { + properties = b.singleFields.getMap("properties") + } + hasProps := properties != nil + if !hasProps { + properties = &OrderedMap{} + } + + for i, _ := v.Fields(cue.Optional(true), cue.Definitions(true)); i.Next(); { + label := i.Label() + if b.ctx.isInternal(label) { + continue + } + if i.IsDefinition() && strings.HasPrefix(label, "#") { + label = label[1:] + } + var core *builder + if b.core != nil { + core = b.core.properties[label] + } + schema := b.schema(core, label, i.Value()) + switch { + case i.IsDefinition(): + ref := b.ctx.makeRef(b.ctx.instExt, append(b.ctx.path, label)) + if ref == "" { + continue + } + b.ctx.schemas.Set(ref, schema) + case !b.isNonCore() || len(schema.Elts) > 0: + properties.Set(label, schema) + } + } + + if !hasProps && properties.len() > 0 { + b.setSingle("properties", (*ast.StructLit)(properties), false) + } + + if t, ok := v.Elem(); ok && (b.core == nil || b.core.items == nil) { + schema := b.schema(nil, "*", t) + if len(schema.Elts) > 0 { + b.setSingle("additionalProperties", schema, true) // Not allowed in structural. + } + } + + // TODO: maxProperties, minProperties: can be done once we allow cap to + // unify with structs. +} + +// List constraints: +// +// Max and min items. +// - maxItems: int (inclusive) +// - minItems: int (inclusive) +// - items (item type) +// schema: applies to all items +// array of schemas: +// schema at pos must match if both value and items are defined. +// - additional items: +// schema: where items must be an array of schemas, intstance elements +// succeed for if they match this value for any value at a position +// greater than that covered by items. +// - uniqueItems: bool +// TODO: support with list.Unique() unique() or comprehensions. +// For the latter, we need equality for all values, which is doable, +// but not done yet. +// +// NOT SUPPORTED IN OpenAPI: +// - contains: +// schema: an array instance is valid if at least one element matches +// this schema. +func (b *builder) array(v cue.Value) { + + switch op, a := v.Expr(); op { + case cue.CallOp: + name := fmt.Sprint(a[0]) + switch name { + case "list.UniqueItems", "list.UniqueItems()": + b.checkArgs(a, 0) + b.setFilter("Schema", "uniqueItems", ast.NewBool(true)) + return + + case "list.MinItems": + b.checkArgs(a, 1) + b.setFilter("Schema", "minItems", b.int(a[1])) + return + + case "list.MaxItems": + b.checkArgs(a, 1) + b.setFilter("Schema", "maxItems", b.int(a[1])) + return + + default: + b.unsupported(a[0]) + return + } + + case cue.NoOp: + // TODO: extract format from specific type. + + default: + b.failf(v, "unsupported op %v for array type", op) + return + } + + // Possible conjuncts: + // - one list (CUE guarantees merging all conjuncts) + // - no cap: is unified with list + // - unique items: at most one, but idempotent if multiple. + // There is never a need for allOf or anyOf. Note that a CUE list + // corresponds almost one-to-one to OpenAPI lists. + items := []ast.Expr{} + count := 0 + for i, _ := v.List(); i.Next(); count++ { + items = append(items, b.schema(nil, strconv.Itoa(count), i.Value())) + } + if len(items) > 0 { + // TODO: per-item schema are not allowed in OpenAPI, only in JSON Schema. + // Perhaps we should turn this into an OR after first normalizing + // the entries. + b.set("items", ast.NewList(items...)) + // panic("per-item types not supported in OpenAPI") + } + + // TODO: + // A CUE cap can be a set of discontinuous ranges. If we encounter this, + // we can create an allOf(list type, anyOf(ranges)). + cap := v.Len() + hasMax := false + maxLength := int64(math.MaxInt64) + + if n, capErr := cap.Int64(); capErr == nil { + maxLength = n + hasMax = true + } else { + b.value(cap, (*builder).listCap) + } + + if !hasMax || int64(len(items)) < maxLength { + if typ, ok := v.Elem(); ok { + var core *builder + if b.core != nil { + core = b.core.items + } + t := b.schema(core, "*", typ) + if len(items) > 0 { + b.setFilter("Schema", "additionalItems", t) // Not allowed in structural. + } else if !b.isNonCore() || len(t.Elts) > 0 { + b.setSingle("items", t, true) + } + } + } +} + +func (b *builder) listCap(v cue.Value) { + switch op, a := v.Expr(); op { + case cue.LessThanOp: + b.setFilter("Schema", "maxItems", b.inta(a[0], -1)) + case cue.LessThanEqualOp: + b.setFilter("Schema", "maxItems", b.inta(a[0], 0)) + case cue.GreaterThanOp: + b.setFilter("Schema", "minItems", b.inta(a[0], 1)) + case cue.GreaterThanEqualOp: + if b.int64(a[0]) > 0 { + b.setFilter("Schema", "minItems", b.inta(a[0], 0)) + } + case cue.NoOp: + // must be type, so okay. + case cue.NotEqualOp: + i := b.int(a[0]) + b.setNot("allOff", ast.NewList( + b.kv("minItems", i), + b.kv("maxItems", i), + )) + + default: + b.failf(v, "unsupported op for list capacity %v", op) + return + } +} + +func (b *builder) number(v cue.Value) { + // Multiple conjuncts mostly means just additive constraints. + // Type may be number of float. + + switch op, a := v.Expr(); op { + case cue.LessThanOp: + if b.ctx.exclusiveBool { + b.setFilter("Schema", "exclusiveMaximum", ast.NewBool(true)) + b.setFilter("Schema", "maximum", b.big(a[0])) + } else { + b.setFilter("Schema", "exclusiveMaximum", b.big(a[0])) + } + + case cue.LessThanEqualOp: + b.setFilter("Schema", "maximum", b.big(a[0])) + + case cue.GreaterThanOp: + if b.ctx.exclusiveBool { + b.setFilter("Schema", "exclusiveMinimum", ast.NewBool(true)) + b.setFilter("Schema", "minimum", b.big(a[0])) + } else { + b.setFilter("Schema", "exclusiveMinimum", b.big(a[0])) + } + + case cue.GreaterThanEqualOp: + b.setFilter("Schema", "minimum", b.big(a[0])) + + case cue.NotEqualOp: + i := b.big(a[0]) + b.setNot("allOff", ast.NewList( + b.kv("minimum", i), + b.kv("maximum", i), + )) + + case cue.CallOp: + name := fmt.Sprint(a[0]) + switch name { + case "math.MultipleOf": + b.checkArgs(a, 1) + b.setFilter("Schema", "multipleOf", b.int(a[1])) + + default: + b.unsupported(a[0]) + return + } + + case cue.NoOp: + // TODO: extract format from specific type. + + default: + b.failf(v, "unsupported op for number %v", op) + } +} + +// Multiple Regexp conjuncts are represented as allOf all other +// constraints can be combined unless in the even of discontinuous +// lengths. + +// string supports the following options: +// +// - maxLength (Unicode codepoints) +// - minLength (Unicode codepoints) +// - pattern (a regexp) +// +// The regexp pattern is as follows, and is limited to be a strict subset of RE2: +// Ref: https://tools.ietf.org/html/draft-wright-json-schema-validation-01#section-3.3 +// +// JSON schema requires ECMA 262 regular expressions, but +// limited to the following constructs: +// - simple character classes: [abc] +// - range character classes: [a-z] +// - complement character classes: [^abc], [^a-z] +// - simple quantifiers: +, *, ?, and lazy versions +? *? ?? +// - range quantifiers: {x}, {x,y}, {x,}, {x}?, {x,y}?, {x,}? +// - begin and end anchors: ^ and $ +// - simple grouping: (...) +// - alteration: | +// This is a subset of RE2 used by CUE. +// +// Most notably absent: +// - the '.' for any character (not sure if that is a doc bug) +// - character classes \d \D [[::]] \pN \p{Name} \PN \P{Name} +// - word boundaries +// - capturing directives. +// - flag setting +// - comments +// +// The capturing directives and comments can be removed without +// compromising the meaning of the regexp (TODO). Removing +// flag setting will be tricky. Unicode character classes, +// boundaries, etc can be compiled into simple character classes, +// although the resulting regexp will look cumbersome. +// +func (b *builder) string(v cue.Value) { + switch op, a := v.Expr(); op { + + case cue.RegexMatchOp, cue.NotRegexMatchOp: + s, err := a[0].String() + if err != nil { + // TODO: this may be an unresolved interpolation or expression. Consider + // whether it is reasonable to treat unevaluated operands as wholes and + // generate a compound regular expression. + b.failf(v, "regexp value must be a string: %v", err) + return + } + if op == cue.RegexMatchOp { + b.setFilter("Schema", "pattern", ast.NewString(s)) + } else { + b.setNot("pattern", ast.NewString(s)) + } + + case cue.NoOp, cue.SelectorOp: + + case cue.CallOp: + name := fmt.Sprint(a[0]) + switch name { + case "strings.MinRunes": + b.checkArgs(a, 1) + b.setFilter("Schema", "minLength", b.int(a[1])) + return + + case "strings.MaxRunes": + b.checkArgs(a, 1) + b.setFilter("Schema", "maxLength", b.int(a[1])) + return + + default: + b.unsupported(a[0]) + return + } + + default: + b.failf(v, "unsupported op %v for string type", op) + } +} + +func (b *builder) bytes(v cue.Value) { + switch op, a := v.Expr(); op { + + case cue.RegexMatchOp, cue.NotRegexMatchOp: + s, err := a[0].Bytes() + if err != nil { + // TODO: this may be an unresolved interpolation or expression. Consider + // whether it is reasonable to treat unevaluated operands as wholes and + // generate a compound regular expression. + b.failf(v, "regexp value must be of type bytes: %v", err) + return + } + + e := ast.NewString(string(s)) + if op == cue.RegexMatchOp { + b.setFilter("Schema", "pattern", e) + } else { + b.setNot("pattern", e) + } + + // TODO: support the following JSON schema constraints + // - maxLength + // - minLength + + case cue.NoOp, cue.SelectorOp: + + default: + b.failf(v, "unsupported op %v for bytes type", op) + } +} + +type builder struct { + ctx *buildContext + typ string + format string + singleFields *oaSchema + current *oaSchema + allOf []*ast.StructLit + deprecated bool + + // Building structural schema + core *builder + kind cue.Kind + filled *ast.StructLit + values []cue.Value // in structural mode, all values of not and *Of. + keys []string + properties map[string]*builder + items *builder +} + +func newRootBuilder(c *buildContext) *builder { + return &builder{ctx: c} +} + +func newOASBuilder(parent *builder) *builder { + core := parent + if parent.core != nil { + core = parent.core + } + b := &builder{ + core: core, + ctx: parent.ctx, + typ: parent.typ, + format: parent.format, + } + return b +} + +func (b *builder) isNonCore() bool { + return b.core != nil +} + +func (b *builder) setType(t, format string) { + if b.typ == "" { + b.typ = t + if format != "" { + b.format = format + } + } +} + +func setType(t *oaSchema, b *builder) { + if b.typ != "" { + if b.core == nil || (b.core.typ != b.typ && !b.ctx.structural) { + if !t.exists("type") { + t.Set("type", ast.NewString(b.typ)) + } + } + } + if b.format != "" { + if b.core == nil || b.core.format != b.format { + t.Set("format", ast.NewString(b.format)) + } + } +} + +// setFilter is like set, but allows the key-value pair to be filtered. +func (b *builder) setFilter(schema, key string, v ast.Expr) { + if re := b.ctx.fieldFilter; re != nil && re.MatchString(path.Join(schema, key)) { + return + } + b.set(key, v) +} + +// setSingle sets a value of which there should only be one. +func (b *builder) setSingle(key string, v ast.Expr, drop bool) { + if b.singleFields == nil { + b.singleFields = &OrderedMap{} + } + if b.singleFields.exists(key) { + if !drop { + b.failf(cue.Value{}, "more than one value added for key %q", key) + } + } + b.singleFields.Set(key, v) +} + +func (b *builder) set(key string, v ast.Expr) { + if b.current == nil { + b.current = &OrderedMap{} + b.allOf = append(b.allOf, (*ast.StructLit)(b.current)) + } else if b.current.exists(key) { + b.current = &OrderedMap{} + b.allOf = append(b.allOf, (*ast.StructLit)(b.current)) + } + b.current.Set(key, v) +} + +func (b *builder) kv(key string, value ast.Expr) *ast.StructLit { + return ast.NewStruct(key, value) +} + +func (b *builder) setNot(key string, value ast.Expr) { + b.add(ast.NewStruct("not", b.kv(key, value))) +} + +func (b *builder) finish() *ast.StructLit { + var t *OrderedMap + + if b.filled != nil { + return b.filled + } + switch len(b.allOf) { + case 0: + t = &OrderedMap{} + + case 1: + hasRef := false + for _, e := range b.allOf[0].Elts { + if f, ok := e.(*ast.Field); ok { + name, _, _ := ast.LabelName(f.Label) + hasRef = hasRef || name == "$ref" + } + } + if !hasRef || b.singleFields == nil { + t = (*OrderedMap)(b.allOf[0]) + break + } + fallthrough + + default: + exprs := []ast.Expr{} + if t != nil { + exprs = append(exprs, (*ast.StructLit)(t)) + } + for _, s := range b.allOf { + exprs = append(exprs, s) + } + t = &OrderedMap{} + t.Set("allOf", ast.NewList(exprs...)) + } + if b.singleFields != nil { + b.singleFields.Elts = append(b.singleFields.Elts, t.Elts...) + t = b.singleFields + } + if b.deprecated { + t.Set("deprecated", ast.NewBool(true)) + } + setType(t, b) + sortSchema((*ast.StructLit)(t)) + return (*ast.StructLit)(t) +} + +func (b *builder) add(t *ast.StructLit) { + b.allOf = append(b.allOf, t) +} + +func (b *builder) addConjunct(f func(*builder)) { + c := newOASBuilder(b) + f(c) + b.add((*ast.StructLit)(c.finish())) +} + +func (b *builder) addRef(v cue.Value, inst *cue.Instance, ref []string) { + name := b.ctx.makeRef(inst, ref) + b.addConjunct(func(b *builder) { + b.allOf = append(b.allOf, ast.NewStruct( + "$ref", ast.NewString(path.Join("#", b.ctx.refPrefix, name)))) + }) + + if b.ctx.inst != inst { + b.ctx.externalRefs[name] = &externalType{ + ref: name, + inst: inst, + path: ref, + value: v, + } + } +} + +func (b *buildContext) makeRef(inst *cue.Instance, ref []string) string { + ref = append([]string{}, ref...) + for i, s := range ref { + if strings.HasPrefix(s, "#") { + ref[i] = s[1:] + } + } + a := make([]string, 0, len(ref)+3) + if b.nameFunc != nil { + a = append(a, b.nameFunc(inst, ref)) + } else { + a = append(a, ref...) + } + return strings.Join(a, ".") +} + +func (b *builder) int64(v cue.Value) int64 { + v, _ = v.Default() + i, err := v.Int64() + if err != nil { + b.failf(v, "could not retrieve int: %v", err) + } + return i +} + +func (b *builder) intExpr(i int64) ast.Expr { + return &ast.BasicLit{ + Kind: token.INT, + Value: fmt.Sprint(i), + } +} + +func (b *builder) int(v cue.Value) ast.Expr { + return b.intExpr(b.int64(v)) +} + +func (b *builder) inta(v cue.Value, offset int64) ast.Expr { + return b.intExpr(b.int64(v) + offset) +} + +func (b *builder) decode(v cue.Value) ast.Expr { + v, _ = v.Default() + return v.Syntax().(ast.Expr) +} + +func (b *builder) big(v cue.Value) ast.Expr { + v, _ = v.Default() + return v.Syntax().(ast.Expr) +} diff --git a/vendor/cuelang.org/go/encoding/openapi/crd.go b/vendor/cuelang.org/go/encoding/openapi/crd.go new file mode 100644 index 000000000..7251b43e4 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/crd.go @@ -0,0 +1,172 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +// This file contains functionality for structural schema, a subset of OpenAPI +// used for CRDs. +// +// See https://kubernetes.io/blog/2019/06/20/crd-structural-schema/ for details. +// +// Insofar definitions are compatible, openapi normalizes to structural whenever +// possible. +// +// A core structural schema is only made out of the following fields: +// +// - properties +// - items +// - additionalProperties +// - type +// - nullable +// - title +// - descriptions. +// +// Where the types must be defined for all fields. +// +// In addition, the value validations constraints may be used as defined in +// OpenAPI, with the restriction that +// - within the logical constraints anyOf, allOf, oneOf, and not +// additionalProperties, type, nullable, title, and description may not be used. +// - all mentioned fields must be defined in the core schema. +// +// It appears that CRDs do not allow references. +// + +import ( + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" +) + +// newCoreBuilder returns a builder that represents a structural schema. +func newCoreBuilder(c *buildContext) *builder { + b := newRootBuilder(c) + b.properties = map[string]*builder{} + return b +} + +func (b *builder) coreSchemaWithName(name string) *ast.StructLit { + oldPath := b.ctx.path + b.ctx.path = append(b.ctx.path, name) + s := b.coreSchema() + b.ctx.path = oldPath + return s +} + +// coreSchema creates the core part of a structural OpenAPI. +func (b *builder) coreSchema() *ast.StructLit { + switch b.kind { + case cue.ListKind: + if b.items != nil { + b.setType("array", "") + schema := b.items.coreSchemaWithName("*") + b.setSingle("items", schema, false) + } + + case cue.StructKind: + p := &OrderedMap{} + for _, k := range b.keys { + sub := b.properties[k] + p.Set(k, sub.coreSchemaWithName(k)) + } + if p.len() > 0 || b.items != nil { + b.setType("object", "") + } + if p.len() > 0 { + b.setSingle("properties", (*ast.StructLit)(p), false) + } + // TODO: in Structural schema only one of these is allowed. + if b.items != nil { + schema := b.items.coreSchemaWithName("*") + b.setSingle("additionalProperties", schema, false) + } + } + + // If there was only a single value associated with this node, we can + // safely assume there were no disjunctions etc. In structural mode this + // is the only chance we get to set certain properties. + if len(b.values) == 1 { + return b.fillSchema(b.values[0]) + } + + // TODO: do type analysis if we have multiple values and piece out more + // information that applies to all possible instances. + + return b.finish() +} + +// buildCore collects the CUE values for the structural OpenAPI tree. +// To this extent, all fields of both conjunctions and disjunctions are +// collected in a single properties map. +func (b *builder) buildCore(v cue.Value) { + if !b.ctx.expandRefs { + _, r := v.Reference() + if len(r) > 0 { + return + } + } + b.getDoc(v) + format := extractFormat(v) + if format != "" { + b.format = format + } else { + v = v.Eval() + b.kind = v.IncompleteKind() + + switch b.kind { + case cue.StructKind: + if typ, ok := v.Elem(); ok { + if b.items == nil { + b.items = newCoreBuilder(b.ctx) + } + b.items.buildCore(typ) + } + b.buildCoreStruct(v) + + case cue.ListKind: + if typ, ok := v.Elem(); ok { + if b.items == nil { + b.items = newCoreBuilder(b.ctx) + } + b.items.buildCore(typ) + } + } + } + + for _, bv := range b.values { + if bv.Equals(v) { + return + } + } + b.values = append(b.values, v) +} + +func (b *builder) buildCoreStruct(v cue.Value) { + op, args := v.Expr() + switch op { + case cue.OrOp, cue.AndOp: + for _, v := range args { + b.buildCore(v) + } + } + for i, _ := v.Fields(cue.Optional(true), cue.Hidden(false)); i.Next(); { + label := i.Label() + sub, ok := b.properties[label] + if !ok { + sub = newCoreBuilder(b.ctx) + b.properties[label] = sub + b.keys = append(b.keys, label) + } + sub.buildCore(i.Value()) + } +} diff --git a/vendor/cuelang.org/go/encoding/openapi/decode.go b/vendor/cuelang.org/go/encoding/openapi/decode.go new file mode 100644 index 000000000..a90efd316 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/decode.go @@ -0,0 +1,141 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + "cuelang.org/go/encoding/jsonschema" + "cuelang.org/go/internal" +) + +// Extract converts OpenAPI definitions to an equivalent CUE representation. +// +// It currently only converts entries in #/components/schema and extracts some +// meta data. +func Extract(data *cue.Instance, c *Config) (*ast.File, error) { + // TODO: find a good OpenAPI validator. Both go-openapi and kin-openapi + // seem outdated. The k8s one might be good, but avoid pulling in massive + // amounts of dependencies. + + f := &ast.File{} + add := func(d ast.Decl) { + if d != nil { + f.Decls = append(f.Decls, d) + } + } + + js, err := jsonschema.Extract(data, &jsonschema.Config{ + Root: oapiSchemas, + Map: openAPIMapping, + }) + if err != nil { + return nil, err + } + + v := data.Value() + + doc, _ := v.Lookup("info", "title").String() // Required + if s, _ := v.Lookup("info", "description").String(); s != "" { + doc += "\n\n" + s + } + cg := internal.NewComment(true, doc) + + if c.PkgName != "" { + p := &ast.Package{Name: ast.NewIdent(c.PkgName)} + p.AddComment(cg) + add(p) + } else { + add(cg) + } + + preamble := js.Preamble() + body := js.Decls[len(preamble):] + for _, d := range preamble { + switch x := d.(type) { + case *ast.Package: + return nil, errors.Newf(x.Pos(), "unexpected package %q", x.Name.Name) + + default: + add(x) + } + } + + // TODO: allow attributes before imports? Would be easier. + + // TODO: do we want to store the OpenAPI version? + // if version, _ := v.Lookup("openapi").String(); version != "" { + // add(internal.NewAttr("openapi", "version="+ version)) + // } + + if info := v.Lookup("info"); info.Exists() { + decls := []interface{}{} + if st, ok := info.Syntax().(*ast.StructLit); ok { + // Remove title. + for _, d := range st.Elts { + if f, ok := d.(*ast.Field); ok { + switch name, _, _ := ast.LabelName(f.Label); name { + case "title", "version": + // title: *"title" | string + decls = append(decls, &ast.Field{ + Label: f.Label, + Value: ast.NewBinExpr(token.OR, + &ast.UnaryExpr{Op: token.MUL, X: f.Value}, + ast.NewIdent("string")), + }) + continue + } + } + decls = append(decls, d) + } + add(&ast.Field{ + Label: ast.NewIdent("info"), + Value: ast.NewStruct(decls...), + }) + } + } + + if len(body) > 0 { + ast.SetRelPos(body[0], token.NewSection) + f.Decls = append(f.Decls, body...) + } + + return f, nil +} + +const oapiSchemas = "#/components/schemas/" + +// rootDefs is the fallback for schemas that are not valid identifiers. +// TODO: find something more principled. +const rootDefs = "#SchemaMap" + +func openAPIMapping(pos token.Pos, a []string) ([]ast.Label, error) { + if len(a) != 3 || a[0] != "components" || a[1] != "schemas" { + return nil, errors.Newf(pos, + `openapi: reference must be of the form %q; found "#/%s"`, + oapiSchemas, strings.Join(a, "/")) + } + name := a[2] + if ast.IsValidIdent(name) && + name != rootDefs[1:] && + !internal.IsDefOrHidden(name) { + return []ast.Label{ast.NewIdent("#" + name)}, nil + } + return []ast.Label{ast.NewIdent(rootDefs), ast.NewString(name)}, nil +} diff --git a/vendor/cuelang.org/go/encoding/openapi/doc.go b/vendor/cuelang.org/go/encoding/openapi/doc.go new file mode 100644 index 000000000..3c388befa --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/doc.go @@ -0,0 +1,21 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package openapi provides functionality for mapping CUE to and from +// OpenAPI v3.0.0. +// +// It currently handles OpenAPI Schema components only. +// +// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#schemaObject. +package openapi diff --git a/vendor/cuelang.org/go/encoding/openapi/errors.go b/vendor/cuelang.org/go/encoding/openapi/errors.go new file mode 100644 index 000000000..620161348 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/errors.go @@ -0,0 +1,41 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +var _ errors.Error = &openapiError{} + +// implements cue/Error +type openapiError struct { + errors.Message + path []string + pos token.Pos +} + +func (e *openapiError) Position() token.Pos { + return e.pos +} + +func (e *openapiError) InputPositions() []token.Pos { + return nil +} + +func (e *openapiError) Path() []string { + return e.path +} diff --git a/vendor/cuelang.org/go/encoding/openapi/openapi.go b/vendor/cuelang.org/go/encoding/openapi/openapi.go new file mode 100644 index 000000000..6eaebb8ca --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/openapi.go @@ -0,0 +1,237 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "encoding/json" + "fmt" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" + cuejson "cuelang.org/go/encoding/json" +) + +// A Config defines options for converting CUE to and from OpenAPI. +type Config struct { + // PkgName defines to package name for a generated CUE package. + PkgName string + + // Info specifies the info section of the OpenAPI document. To be a valid + // OpenAPI document, it must include at least the title and version fields. + // Info may be a *ast.StructLit or any type that marshals to JSON. + Info interface{} + + // ReferenceFunc allows users to specify an alternative representation + // for references. An empty string tells the generator to expand the type + // in place and, if applicable, not generate a schema for that entity. + ReferenceFunc func(inst *cue.Instance, path []string) string + + // DescriptionFunc allows rewriting a description associated with a certain + // field. A typical implementation compiles the description from the + // comments obtains from the Doc method. No description field is added if + // the empty string is returned. + DescriptionFunc func(v cue.Value) string + + // SelfContained causes all non-expanded external references to be included + // in this document. + SelfContained bool + + // OpenAPI version to use. Supported as of v3.0.0. + Version string + + // FieldFilter defines a regular expression of all fields to omit from the + // output. It is only allowed to filter fields that add additional + // constraints. Fields that indicate basic types cannot be removed. It is + // an error for such fields to be excluded by this filter. + // Fields are qualified by their Object type. For instance, the + // minimum field of the schema object is qualified as Schema/minimum. + FieldFilter string + + // ExpandReferences replaces references with actual objects when generating + // OpenAPI Schema. It is an error for an CUE value to refer to itself + // if this option is used. + ExpandReferences bool +} + +type Generator = Config + +// Gen generates the set OpenAPI schema for all top-level types of the +// given instance. +func Gen(inst *cue.Instance, c *Config) ([]byte, error) { + if c == nil { + c = defaultConfig + } + all, err := c.All(inst) + if err != nil { + return nil, err + } + return json.Marshal(all) +} + +// Generate generates the set of OpenAPI schema for all top-level types of the +// given instance. +// +// Note: only a limited number of top-level types are supported so far. +func Generate(inst *cue.Instance, c *Config) (*ast.File, error) { + all, err := schemas(c, inst) + if err != nil { + return nil, err + } + top, err := c.compose(inst, all) + if err != nil { + return nil, err + } + return &ast.File{Decls: top.Elts}, nil +} + +// All generates an OpenAPI definition from the given instance. +// +// Note: only a limited number of top-level types are supported so far. +// Deprecated: use Generate +func (g *Generator) All(inst *cue.Instance) (*OrderedMap, error) { + all, err := schemas(g, inst) + if err != nil { + return nil, err + } + top, err := g.compose(inst, all) + return (*OrderedMap)(top), err +} + +func toCUE(name string, x interface{}) (v ast.Expr, err error) { + b, err := json.Marshal(x) + if err == nil { + v, err = cuejson.Extract(name, b) + } + if err != nil { + return nil, errors.Wrapf(err, token.NoPos, + "openapi: could not encode %s", name) + } + return v, nil + +} + +func (c *Config) compose(inst *cue.Instance, schemas *ast.StructLit) (x *ast.StructLit, err error) { + + var errs errors.Error + + var title, version string + var info *ast.StructLit + + for i, _ := inst.Value().Fields(cue.Definitions(true)); i.Next(); { + if i.IsDefinition() { + continue + } + label := i.Label() + attr := i.Value().Attribute("openapi") + if s, _ := attr.String(0); s != "" { + label = s + } + switch label { + case "$version": + case "-": + case "info": + info, _ = i.Value().Syntax().(*ast.StructLit) + if info == nil { + errs = errors.Append(errs, errors.Newf(i.Value().Pos(), + "info must be a struct")) + } + title, _ = i.Value().Lookup("title").String() + version, _ = i.Value().Lookup("version").String() + + default: + errs = errors.Append(errs, errors.Newf(i.Value().Pos(), + "openapi: unsupported top-level field %q", label)) + } + } + + // Support of OrderedMap is mostly for backwards compatibility. + switch x := c.Info.(type) { + case nil: + if title == "" { + title = "Generated by cue." + for _, d := range inst.Doc() { + title = strings.TrimSpace(d.Text()) + break + } + if p := inst.ImportPath; title == "" && p != "" { + title = fmt.Sprintf("Generated by cue from package %q", p) + } + } + + if version == "" { + version, _ = inst.Lookup("$version").String() + if version == "" { + version = "no version" + } + } + + if info == nil { + info = ast.NewStruct( + "title", ast.NewString(title), + "version", ast.NewString(version), + ) + } else { + m := (*OrderedMap)(info) + m.Set("title", ast.NewString(title)) + m.Set("version", ast.NewString(version)) + } + + case *ast.StructLit: + info = x + case *OrderedMap: + info = (*ast.StructLit)(x) + case OrderedMap: + info = (*ast.StructLit)(&x) + default: + x, err := toCUE("info section", x) + if err != nil { + return nil, err + } + info, _ = x.(*ast.StructLit) + errs = errors.Append(errs, errors.Newf(token.NoPos, + "Info field supplied must be an *ast.StructLit")) + } + + return ast.NewStruct( + "openapi", ast.NewString(c.Version), + "info", info, + "paths", ast.NewStruct(), + "components", ast.NewStruct("schemas", schemas), + ), errs +} + +// Schemas extracts component/schemas from the CUE top-level types. +func (g *Generator) Schemas(inst *cue.Instance) (*OrderedMap, error) { + comps, err := schemas(g, inst) + if err != nil { + return nil, err + } + return (*OrderedMap)(comps), err +} + +var defaultConfig = &Config{} + +// TODO +// The conversion interprets @openapi( {, }) attributes as follows: +// +// readOnly sets the readOnly flag for a property in the schema +// only one of readOnly and writeOnly may be set. +// writeOnly sets the writeOnly flag for a property in the schema +// only one of readOnly and writeOnly may be set. +// discriminator explicitly sets a field as the discriminator field +// diff --git a/vendor/cuelang.org/go/encoding/openapi/orderedmap.go b/vendor/cuelang.org/go/encoding/openapi/orderedmap.go new file mode 100644 index 000000000..9f8bdd083 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/orderedmap.go @@ -0,0 +1,181 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "fmt" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal/encoding/json" +) + +// An OrderedMap is a set of key-value pairs that preserves the order in which +// items were added. It marshals to JSON as an object. +// +// Deprecated: the API now returns an ast.File. This allows OpenAPI to be +// represented as JSON, YAML, or CUE data, in addition to being able to use +// all the ast-related tooling. +type OrderedMap ast.StructLit + +// KeyValue associates a value with a key. +type KeyValue struct { + Key string + Value interface{} +} + +// TODO: these functions are here to support backwards compatibility with Istio. +// At some point, once this is removed from Istio, this can be removed. + +func fromLegacy(x interface{}) ast.Expr { + switch x := x.(type) { + case *OrderedMap: + return (*ast.StructLit)(x) + case []*OrderedMap: + a := make([]ast.Expr, len(x)) + for i, v := range x { + a[i] = fromLegacy(v) + } + return ast.NewList(a...) + case string: + return ast.NewString(x) + case ast.Expr: + return x + default: + panic(fmt.Sprintf("unsupported type %T", x)) + } +} + +func toLegacy(x ast.Expr) interface{} { + switch x := x.(type) { + case *ast.StructLit: + return (*OrderedMap)(x) + case *ast.ListLit: + a := make([]*OrderedMap, len(x.Elts)) + for i, v := range x.Elts { + e, ok := v.(*ast.StructLit) + if !ok { + return x + } + a[i] = (*OrderedMap)(e) + } + return a + case *ast.BasicLit: + if x.Kind == token.STRING { + str, err := literal.Unquote(x.Value) + if err != nil { + return x + } + return str + } + } + return x +} + +func (m *OrderedMap) len() int { + return len(m.Elts) +} + +// Pairs returns the KeyValue pairs associated with m. +func (m *OrderedMap) Pairs() []KeyValue { + kvs := make([]KeyValue, len(m.Elts)) + for i, e := range m.Elts { + kvs[i].Key = label(e) + kvs[i].Value = toLegacy(e.(*ast.Field).Value) + } + return kvs +} + +func (m *OrderedMap) find(key string) *ast.Field { + for _, v := range m.Elts { + f, ok := v.(*ast.Field) + if !ok { + continue + } + s, _, err := ast.LabelName(f.Label) + if err == nil && s == key { + return f + } + } + return nil +} + +// Set sets a key value pair. If a pair with the same key already existed, it +// will be replaced with the new value. Otherwise, the new value is added to +// the end. The value must be of type string, ast.Expr, or *OrderedMap. +// +// Deprecated: use cuelang.org/go/cue/ast to manipulate ASTs. +func (m *OrderedMap) Set(key string, x interface{}) { + switch x := x.(type) { + case *OrderedMap: + m.setExpr(key, (*ast.StructLit)(x)) + case string: + m.setExpr(key, ast.NewString(x)) + case ast.Expr: + m.setExpr(key, x) + default: + v, err := toCUE("Set", x) + if err != nil { + panic(err) + } + m.setExpr(key, v) + } +} + +func (m *OrderedMap) setExpr(key string, expr ast.Expr) { + if f := m.find(key); f != nil { + f.Value = expr + return + } + m.Elts = append(m.Elts, &ast.Field{ + Label: ast.NewString(key), + Value: expr, + }) +} + +// SetAll replaces existing key-value pairs with the given ones. The keys must +// be unique. +func (m *OrderedMap) SetAll(kvs []KeyValue) { + var a []ast.Decl + for _, kv := range kvs { + a = append(a, &ast.Field{ + Label: ast.NewString(kv.Key), + Value: fromLegacy(kv.Value), + }) + } + m.Elts = a +} + +// exists reports whether a key-value pair exists for the given key. +func (m *OrderedMap) exists(key string) bool { + return m.find(key) != nil +} + +// exists reports whether a key-value pair exists for the given key. +func (m *OrderedMap) getMap(key string) *OrderedMap { + f := m.find(key) + if f == nil { + return nil + } + return (*OrderedMap)(f.Value.(*ast.StructLit)) +} + +// MarshalJSON implements json.Marshaler. +func (m *OrderedMap) MarshalJSON() (b []byte, err error) { + // This is a pointer receiever to enforce that we only store pointers to + // OrderedMap in the output. + return json.Encode((*ast.StructLit)(m)) +} diff --git a/vendor/cuelang.org/go/encoding/openapi/types.go b/vendor/cuelang.org/go/encoding/openapi/types.go new file mode 100644 index 000000000..7306660ff --- /dev/null +++ b/vendor/cuelang.org/go/encoding/openapi/types.go @@ -0,0 +1,161 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi + +import ( + "fmt" + "strings" + + "github.com/cockroachdb/apd/v2" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" +) + +// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#data-types +var cueToOpenAPI = map[string]string{ + "int32": "int32", + "int64": "int64", + + "float64": "double", + "float32": "float", + + "bytes": "binary", + + "time.Time()": "date-time", + "time.Time": "date-time", + `time.Format ("2006-01-02")`: "date", + + // TODO: if a format is more strict (e.g. using zeros instead of nines + // for fractional seconds), we could still use this as an approximation. + `time.Format ("2006-01-02T15:04:05.999999999Z07:00")`: "date-time", + + // TODO: password. + + ">=-2147483648 & <=2147483647 & int": "int32", + ">=-9223372036854775808 & <=9223372036854775807 & int": "int64", + ">=-340282346638528859811704183484516925440 & <=340282346638528859811704183484516925440": "float", + ">=-1.797693134862315708145274237317043567981e+308 & <=1.797693134862315708145274237317043567981e+308": "double", +} + +func extractFormat(v cue.Value) string { + switch k := v.IncompleteKind(); { + case k&cue.NumberKind != 0, k&cue.StringKind != 0, k&cue.BytesKind != 0: + default: + return "" + } + var expr, arg string + op, a := v.Expr() + if op == cue.CallOp { + v = a[0] + if len(a) == 2 { + arg = fmt.Sprintf(" (%s)", a[1].Eval()) + } + } + if inst, ref := v.Reference(); len(ref) > 0 { + expr = inst.ImportPath + "." + strings.Join(ref, ".") + expr += arg + } else { + expr = fmt.Sprint(v.Eval()) + expr += arg + } + if s, ok := cueToOpenAPI[expr]; ok { + return s + } + s := fmt.Sprint(v) + return cueToOpenAPI[s] +} + +func getDeprecated(v cue.Value) bool { + // only looking at protobuf attribute for now. + a := v.Attribute("protobuf") + r, _ := a.Flag(1, "deprecated") + return r +} + +func simplify(b *builder, t *ast.StructLit) { + if b.format == "" { + return + } + switch b.typ { + case "number", "integer": + simplifyNumber(t, b.format) + } +} + +func simplifyNumber(t *ast.StructLit, format string) string { + fields := t.Elts + k := 0 + for i, d := range fields { + switch label(d) { + case "minimum": + if decimalEqual(minMap[format], value(d)) { + continue + } + case "maximum": + if decimalEqual(maxMap[format], value(d)) { + continue + } + } + fields[k] = fields[i] + k++ + } + t.Elts = fields[:k] + return format +} + +func decimalEqual(d *apd.Decimal, v ast.Expr) bool { + if d == nil { + return false + } + lit, ok := v.(*ast.BasicLit) + if !ok || (lit.Kind != token.INT && lit.Kind != token.FLOAT) { + return false + } + n := literal.NumInfo{} + if literal.ParseNum(lit.Value, &n) != nil { + return false + } + var b apd.Decimal + if n.Decimal(&b) != nil { + return false + } + return d.Cmp(&b) == 0 +} + +func mustDecimal(s string) *apd.Decimal { + d, _, err := apd.NewFromString(s) + if err != nil { + panic(err) + } + return d +} + +var ( + minMap = map[string]*apd.Decimal{ + "int32": mustDecimal("-2147483648"), + "int64": mustDecimal("-9223372036854775808"), + "float": mustDecimal("-3.40282346638528859811704183484516925440e+38"), + "double": mustDecimal("-1.797693134862315708145274237317043567981e+308"), + } + maxMap = map[string]*apd.Decimal{ + "int32": mustDecimal("2147483647"), + "int64": mustDecimal("9223372036854775807"), + "float": mustDecimal("+3.40282346638528859811704183484516925440e+38"), + "double": mustDecimal("+1.797693134862315708145274237317043567981e+308"), + } +) diff --git a/vendor/cuelang.org/go/encoding/protobuf/errors.go b/vendor/cuelang.org/go/encoding/protobuf/errors.go new file mode 100644 index 000000000..0c6756098 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/protobuf/errors.go @@ -0,0 +1,53 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protobuf + +import ( + "fmt" + "strings" + + "cuelang.org/go/cue/token" +) + +// protobufError implements cue/Error +type protobufError struct { + path []string + pos token.Pos + err error +} + +func (e *protobufError) Position() token.Pos { + return e.pos +} + +func (e *protobufError) InputPositions() []token.Pos { + return nil +} + +func (e *protobufError) Error() string { + if e.path == nil { + return fmt.Sprintf("protobuf: %s: %v", e.pos, e.err) + } + path := strings.Join(e.path, ".") + return fmt.Sprintf("protobuf: %s:%s: %v", e.pos, path, e.err) +} + +func (e *protobufError) Path() []string { + return e.path +} + +func (e *protobufError) Msg() (format string, args []interface{}) { + return "error parsing protobuf: %v", []interface{}{e.err} +} diff --git a/vendor/cuelang.org/go/encoding/protobuf/parse.go b/vendor/cuelang.org/go/encoding/protobuf/parse.go new file mode 100644 index 000000000..276baed78 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/protobuf/parse.go @@ -0,0 +1,807 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protobuf + +import ( + "bytes" + "fmt" + "os" + "path" + "path/filepath" + "strconv" + "strings" + "text/scanner" + "unicode" + + "github.com/emicklei/proto" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/ast/astutil" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal/source" +) + +func (s *Extractor) parse(filename string, src interface{}) (p *protoConverter, err error) { + if filename == "" { + return nil, errors.Newf(token.NoPos, "empty filename") + } + if r, ok := s.fileCache[filename]; ok { + return r.p, r.err + } + defer func() { + s.fileCache[filename] = result{p, err} + }() + + b, err := source.Read(filename, src) + if err != nil { + return nil, err + } + + parser := proto.NewParser(bytes.NewReader(b)) + if filename != "" { + parser.Filename(filename) + } + d, err := parser.Parse() + if err != nil { + return nil, errors.Newf(token.NoPos, "protobuf: %v", err) + } + + tfile := token.NewFile(filename, 0, len(b)) + tfile.SetLinesForContent(b) + + p = &protoConverter{ + id: filename, + state: s, + tfile: tfile, + imported: map[string]bool{}, + symbols: map[string]bool{}, + } + + defer func() { + switch x := recover().(type) { + case nil: + case protoError: + err = &protobufError{ + path: p.path, + pos: p.toCUEPos(x.pos), + err: x.error, + } + default: + panic(x) + } + }() + + p.file = &ast.File{Filename: filename} + + p.addNames(d.Elements) + + // Parse package definitions. + for _, e := range d.Elements { + switch x := e.(type) { + case *proto.Package: + p.protoPkg = x.Name + case *proto.Option: + if x.Name == "go_package" { + str, err := strconv.Unquote(x.Constant.SourceRepresentation()) + if err != nil { + failf(x.Position, "unquoting package filed: %v", err) + } + split := strings.Split(str, ";") + switch { + case strings.Contains(split[0], "."): + p.cuePkgPath = split[0] + switch len(split) { + case 1: + p.shortPkgName = path.Base(str) + case 2: + p.shortPkgName = split[1] + default: + failf(x.Position, "unexpected ';' in %q", str) + } + + case len(split) == 1: + p.shortPkgName = split[0] + + default: + failf(x.Position, "malformed go_package clause %s", str) + } + // name.AddComment(comment(x.Comment, true)) + // name.AddComment(comment(x.InlineComment, false)) + } + } + } + + if name := p.shortName(); name != "" { + p.file.Decls = append(p.file.Decls, &ast.Package{Name: ast.NewIdent(name)}) + } + + for _, e := range d.Elements { + switch x := e.(type) { + case *proto.Import: + if err := p.doImport(x); err != nil { + return nil, err + } + } + } + + for _, e := range d.Elements { + p.topElement(e) + } + + err = astutil.Sanitize(p.file) + + return p, err +} + +// A protoConverter converts a proto definition to CUE. Proto files map to +// CUE files one to one. +type protoConverter struct { + state *Extractor + tfile *token.File + + proto3 bool + + id string + protoPkg string + shortPkgName string + cuePkgPath string + + file *ast.File + current *ast.StructLit + + imported map[string]bool + + path []string + scope []map[string]mapping // for symbols resolution within package. + symbols map[string]bool // symbols provided by package +} + +type mapping struct { + cue func() ast.Expr // needs to be a new copy as position changes + pkg *protoConverter +} + +func (p *protoConverter) qualifiedImportPath() string { + s := p.importPath() + if short := p.shortPkgName; short != "" && short != path.Base(s) { + s += ":" + short + } + return s +} + +func (p *protoConverter) importPath() string { + if p.cuePkgPath == "" && p.protoPkg != "" { + dir := strings.Replace(p.protoPkg, ".", "/", -1) + p.cuePkgPath = path.Join("googleapis.com", dir) + } + return p.cuePkgPath +} + +func (p *protoConverter) shortName() string { + if p.state.pkgName != "" { + return p.state.pkgName + } + if p.shortPkgName == "" && p.protoPkg != "" { + split := strings.Split(p.protoPkg, ".") + p.shortPkgName = split[len(split)-1] + } + return p.shortPkgName +} + +func (p *protoConverter) toCUEPos(pos scanner.Position) token.Pos { + return p.tfile.Pos(pos.Offset, 0) +} + +func (p *protoConverter) addRef(pos scanner.Position, name string, cue func() ast.Expr) { + top := p.scope[len(p.scope)-1] + if _, ok := top[name]; ok { + failf(pos, "entity %q already defined", name) + } + top[name] = mapping{cue: cue} +} + +func (p *protoConverter) addNames(elems []proto.Visitee) { + p.scope = append(p.scope, map[string]mapping{}) + for _, e := range elems { + var pos scanner.Position + var name string + switch x := e.(type) { + case *proto.Message: + if x.IsExtend { + continue + } + name = x.Name + pos = x.Position + case *proto.Enum: + name = x.Name + pos = x.Position + case *proto.NormalField: + name = x.Name + pos = x.Position + case *proto.MapField: + name = x.Name + pos = x.Position + case *proto.Oneof: + name = x.Name + pos = x.Position + default: + continue + } + sym := strings.Join(append(p.path, name), ".") + p.symbols[sym] = true + p.addRef(pos, name, func() ast.Expr { return ast.NewIdent("#" + name) }) + } +} + +func (p *protoConverter) popNames() { + p.scope = p.scope[:len(p.scope)-1] +} + +func (p *protoConverter) resolve(pos scanner.Position, name string, options []*proto.Option) ast.Expr { + if expr := protoToCUE(name, options); expr != nil { + ast.SetPos(expr, p.toCUEPos(pos)) + return expr + } + if strings.HasPrefix(name, ".") { + return p.resolveTopScope(pos, name[1:], options) + } + for i := len(p.scope) - 1; i > 0; i-- { + if m, ok := p.scope[i][name]; ok { + return m.cue() + } + } + expr := p.resolveTopScope(pos, name, options) + return expr +} + +func (p *protoConverter) resolveTopScope(pos scanner.Position, name string, options []*proto.Option) ast.Expr { + for i := 0; i < len(name); i++ { + k := strings.IndexByte(name[i:], '.') + i += k + if k == -1 { + i = len(name) + } + if m, ok := p.scope[0][name[:i]]; ok { + if m.pkg != nil { + p.imported[m.pkg.qualifiedImportPath()] = true + } + expr := m.cue() + for i < len(name) { + name = name[i+1:] + if i = strings.IndexByte(name, '.'); i == -1 { + i = len(name) + } + expr = ast.NewSel(expr, "#"+name[:i]) + } + ast.SetPos(expr, p.toCUEPos(pos)) + return expr + } + } + failf(pos, "name %q not found", name) + return nil +} + +func (p *protoConverter) doImport(v *proto.Import) error { + if v.Filename == "cue/cue.proto" { + return nil + } + + filename := "" + for _, p := range p.state.paths { + name := filepath.Join(p, v.Filename) + _, err := os.Stat(name) + if err != nil { + continue + } + filename = name + break + } + + if filename == "" { + err := errors.Newf(p.toCUEPos(v.Position), "could not find import %q", v.Filename) + p.state.addErr(err) + return err + } + + if !p.mapBuiltinPackage(v.Position, v.Filename, filename == "") { + return nil + } + + imp, err := p.state.parse(filename, nil) + if err != nil { + fail(v.Position, err) + } + + pkgNamespace := strings.Split(imp.protoPkg, ".") + curNamespace := strings.Split(p.protoPkg, ".") + for { + for k := range imp.symbols { + ref := k + if len(pkgNamespace) > 0 { + ref = strings.Join(append(pkgNamespace, k), ".") + } + if _, ok := p.scope[0][ref]; !ok { + pkg := imp + a := toCue(k) + + var f func() ast.Expr + + if imp.qualifiedImportPath() == p.qualifiedImportPath() { + pkg = nil + f = func() ast.Expr { return ast.NewIdent(a[0]) } + } else { + f = func() ast.Expr { + ident := &ast.Ident{ + Name: imp.shortName(), + Node: ast.NewImport(nil, imp.qualifiedImportPath()), + } + return ast.NewSel(ident, a[0]) + } + } + p.scope[0][ref] = mapping{f, pkg} + } + } + if len(pkgNamespace) == 0 { + break + } + if len(curNamespace) == 0 || pkgNamespace[0] != curNamespace[0] { + break + } + pkgNamespace = pkgNamespace[1:] + curNamespace = curNamespace[1:] + } + return nil +} + +// TODO: this doesn't work. Do something more principled. +func toCue(name string) []string { + a := strings.Split(name, ".") + for i, s := range a { + a[i] = "#" + s + } + return a +} + +func (p *protoConverter) stringLit(pos scanner.Position, s string) *ast.BasicLit { + return &ast.BasicLit{ + ValuePos: p.toCUEPos(pos), + Kind: token.STRING, + Value: literal.String.Quote(s)} +} + +func (p *protoConverter) ident(pos scanner.Position, name string) *ast.Ident { + return &ast.Ident{NamePos: p.toCUEPos(pos), Name: labelName(name)} +} + +func (p *protoConverter) ref(pos scanner.Position) *ast.Ident { + name := "#" + p.path[len(p.path)-1] + return &ast.Ident{NamePos: p.toCUEPos(pos), Name: name} +} + +func (p *protoConverter) subref(pos scanner.Position, name string) *ast.Ident { + return &ast.Ident{ + NamePos: p.toCUEPos(pos), + Name: "#" + name, + } +} + +func (p *protoConverter) addTag(f *ast.Field, body string) { + tag := "@protobuf(" + body + ")" + f.Attrs = append(f.Attrs, &ast.Attribute{Text: tag}) +} + +func (p *protoConverter) topElement(v proto.Visitee) { + switch x := v.(type) { + case *proto.Syntax: + p.proto3 = x.Value == "proto3" + + case *proto.Comment: + addComments(p.file, 0, x, nil) + + case *proto.Enum: + p.enum(x) + + case *proto.Package: + if doc := x.Doc(); doc != nil { + addComments(p.file, 0, doc, nil) + } + + case *proto.Message: + p.message(x) + + case *proto.Option: + case *proto.Import: + // already handled. + + case *proto.Service: + // TODO: handle services. + + case *proto.Extensions, *proto.Reserved: + // no need to handle + + default: + failf(scanner.Position{}, "unsupported type %T", x) + } +} + +func (p *protoConverter) message(v *proto.Message) { + if v.IsExtend { + // TODO: we are not handling extensions as for now. + return + } + + defer func(saved []string) { p.path = saved }(p.path) + p.path = append(p.path, v.Name) + + p.addNames(v.Elements) + defer p.popNames() + + // TODO: handle IsExtend/ proto2 + + s := &ast.StructLit{ + Lbrace: p.toCUEPos(v.Position), + // TODO: set proto file position. + Rbrace: token.Newline.Pos(), + } + + ref := p.ref(v.Position) + if v.Comment == nil { + ref.NamePos = newSection + } + f := &ast.Field{Label: ref, Value: s} + addComments(f, 1, v.Comment, nil) + + p.addDecl(f) + defer func(current *ast.StructLit) { + p.current = current + }(p.current) + p.current = s + + for i, e := range v.Elements { + p.messageField(s, i, e) + } +} + +func (p *protoConverter) addDecl(d ast.Decl) { + if p.current == nil { + p.file.Decls = append(p.file.Decls, d) + } else { + p.current.Elts = append(p.current.Elts, d) + } +} + +func (p *protoConverter) messageField(s *ast.StructLit, i int, v proto.Visitee) { + switch x := v.(type) { + case *proto.Comment: + s.Elts = append(s.Elts, comment(x, true)) + + case *proto.NormalField: + f := p.parseField(s, i, x.Field) + + if x.Repeated { + f.Value = &ast.ListLit{ + Lbrack: p.toCUEPos(x.Position), + Elts: []ast.Expr{&ast.Ellipsis{Type: f.Value}}, + } + } + + case *proto.MapField: + defer func(saved []string) { p.path = saved }(p.path) + p.path = append(p.path, x.Name) + + f := &ast.Field{} + + // All keys are converted to strings. + // TODO: support integer keys. + f.Label = ast.NewList(ast.NewIdent("string")) + f.Value = p.resolve(x.Position, x.Type, x.Options) + + name := p.ident(x.Position, x.Name) + f = &ast.Field{ + Label: name, + Value: ast.NewStruct(f), + } + addComments(f, i, x.Comment, x.InlineComment) + + o := optionParser{message: s, field: f} + o.tags = fmt.Sprintf("%d,type=map<%s,%s>", x.Sequence, x.KeyType, x.Type) + if x.Name != name.Name { + o.tags += "," + x.Name + } + s.Elts = append(s.Elts, f) + o.parse(x.Options) + p.addTag(f, o.tags) + + if !o.required { + f.Optional = token.NoSpace.Pos() + } + + case *proto.Enum: + p.enum(x) + + case *proto.Message: + p.message(x) + + case *proto.Oneof: + p.oneOf(x) + + case *proto.Extensions, *proto.Reserved: + // no need to handle + + case *proto.Option: + opt := fmt.Sprintf("@protobuf(option %s=%s)", x.Name, x.Constant.Source) + attr := &ast.Attribute{ + At: p.toCUEPos(x.Position), + Text: opt, + } + addComments(attr, i, x.Doc(), x.InlineComment) + s.Elts = append(s.Elts, attr) + + default: + failf(scanner.Position{}, "unsupported field type %T", v) + } +} + +// enum converts a proto enum definition to CUE. +// +// An enum will generate two top-level definitions: +// +// Enum: +// "Value1" | +// "Value2" | +// "Value3" +// +// and +// +// Enum_value: { +// "Value1": 0 +// "Value2": 1 +// } +// +// Enums are always defined at the top level. The name of a nested enum +// will be prefixed with the name of its parent and an underscore. +func (p *protoConverter) enum(x *proto.Enum) { + + if len(x.Elements) == 0 { + failf(x.Position, "empty enum") + } + + name := p.subref(x.Position, x.Name) + + defer func(saved []string) { p.path = saved }(p.path) + p.path = append(p.path, x.Name) + + p.addNames(x.Elements) + + if len(p.path) == 0 { + defer func() { p.path = p.path[:0] }() + p.path = append(p.path, x.Name) + } + + // Top-level enum entry. + enum := &ast.Field{Label: name} + addComments(enum, 1, x.Comment, nil) + + // Top-level enum values entry. + valueName := ast.NewIdent(name.Name + "_value") + valueName.NamePos = newSection + valueMap := &ast.StructLit{} + d := &ast.Field{Label: valueName, Value: valueMap} + // addComments(valueMap, 1, x.Comment, nil) + + if strings.Contains(name.Name, "google") { + panic(name.Name) + } + p.addDecl(enum) + p.addDecl(d) + + numEnums := 0 + for _, v := range x.Elements { + if _, ok := v.(*proto.EnumField); ok { + numEnums++ + } + } + + // The line comments for an enum field need to attach after the '|', which + // is only known at the next iteration. + var lastComment *proto.Comment + for i, v := range x.Elements { + switch y := v.(type) { + case *proto.EnumField: + // Add enum value to map + f := &ast.Field{ + Label: p.stringLit(y.Position, y.Name), + Value: ast.NewLit(token.INT, strconv.Itoa(y.Integer)), + } + valueMap.Elts = append(valueMap.Elts, f) + + // add to enum disjunction + value := p.stringLit(y.Position, y.Name) + + var e ast.Expr = value + // Make the first value the default value. + if i > 0 { + value.ValuePos = newline + } + addComments(e, i, y.Comment, nil) + if enum.Value != nil { + e = &ast.BinaryExpr{X: enum.Value, Op: token.OR, Y: e} + if cg := comment(lastComment, false); cg != nil { + cg.Position = 2 + e.AddComment(cg) + } + } + enum.Value = e + + if y.Comment != nil { + lastComment = nil + addComments(f, 0, nil, y.InlineComment) + } else { + lastComment = y.InlineComment + } + + // a := fmt.Sprintf("@protobuf(enum,name=%s)", y.Name) + // f.Attrs = append(f.Attrs, &ast.Attribute{Text: a}) + } + } + addComments(enum.Value, 1, nil, lastComment) +} + +// oneOf converts a Proto OneOf field to CUE. Note that Protobuf defines +// a oneOf to be at most one of the fields. Rather than making each field +// optional, we define oneOfs as all required fields, but add one more +// disjunction allowing no fields. This makes it easier to constrain the +// result to include at least one of the values. +func (p *protoConverter) oneOf(x *proto.Oneof) { + s := ast.NewStruct() + ast.SetRelPos(s, token.Newline) + embed := &ast.EmbedDecl{Expr: s} + embed.AddComment(comment(x.Comment, true)) + + p.addDecl(embed) + + newStruct := func() { + s = &ast.StructLit{ + // TODO: make this the default in the formatter. + Rbrace: token.Newline.Pos(), + } + embed.Expr = ast.NewBinExpr(token.OR, embed.Expr, s) + } + for _, v := range x.Elements { + switch x := v.(type) { + case *proto.OneOfField: + newStruct() + oneOf := p.parseField(s, 0, x.Field) + oneOf.Optional = token.NoPos + + case *proto.Comment: + cg := comment(x, false) + ast.SetRelPos(cg, token.NewSection) + s.Elts = append(s.Elts, cg) + + default: + newStruct() + p.messageField(s, 1, v) + } + + } +} + +func (p *protoConverter) parseField(s *ast.StructLit, i int, x *proto.Field) *ast.Field { + defer func(saved []string) { p.path = saved }(p.path) + p.path = append(p.path, x.Name) + + f := &ast.Field{} + addComments(f, i, x.Comment, x.InlineComment) + + name := p.ident(x.Position, x.Name) + f.Label = name + typ := p.resolve(x.Position, x.Type, x.Options) + f.Value = typ + s.Elts = append(s.Elts, f) + + o := optionParser{message: s, field: f} + + // body of @protobuf tag: sequence[,type][,name=][,...] + o.tags += fmt.Sprint(x.Sequence) + b, _ := format.Node(typ) + str := string(b) + if x.Type != strings.TrimLeft(str, "#") { + o.tags += ",type=" + x.Type + } + if x.Name != name.Name { + o.tags += ",name=" + x.Name + } + o.parse(x.Options) + p.addTag(f, o.tags) + + if !o.required { + f.Optional = token.NoSpace.Pos() + } + return f +} + +type optionParser struct { + message *ast.StructLit + field *ast.Field + required bool + tags string +} + +func (p *optionParser) parse(options []*proto.Option) { + + // TODO: handle options + // - translate options to tags + // - interpret CUE options. + for _, o := range options { + switch o.Name { + case "(cue.opt).required": + p.required = true + // TODO: Dropping comments. Maybe add a dummy tag? + + case "(cue.val)": + // TODO: set filename and base offset. + expr, err := parser.ParseExpr("", o.Constant.Source) + if err != nil { + failf(o.Position, "invalid cue.val value: %v", err) + } + // Any further checks will be done at the end. + constraint := &ast.Field{Label: p.field.Label, Value: expr} + addComments(constraint, 1, o.Comment, o.InlineComment) + p.message.Elts = append(p.message.Elts, constraint) + if !p.required { + constraint.Optional = token.NoSpace.Pos() + } + + default: + // TODO: dropping comments. Maybe add dummy tag? + + // TODO: should CUE support nested attributes? + source := o.Constant.SourceRepresentation() + p.tags += "," + switch source { + case "true": + p.tags += quoteOption(o.Name) + default: + p.tags += quoteOption(o.Name + "=" + source) + } + } + } +} + +func quoteOption(s string) string { + needQuote := false + for _, r := range s { + if !unicode.In(r, unicode.L, unicode.N) { + needQuote = true + break + } + } + if !needQuote { + return s + } + if !strings.ContainsAny(s, `"\`) { + return literal.String.Quote(s) + } + esc := `\#` + for strings.Contains(s, esc) { + esc += "#" + } + return esc[1:] + `"` + s + `"` + esc[1:] +} diff --git a/vendor/cuelang.org/go/encoding/protobuf/protobuf.go b/vendor/cuelang.org/go/encoding/protobuf/protobuf.go new file mode 100644 index 000000000..456a19bf7 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/protobuf/protobuf.go @@ -0,0 +1,416 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package protobuf defines functionality for parsing protocol buffer +// definitions and instances. +// +// Proto definition mapping follows the guidelines of mapping Proto to JSON as +// discussed in https://developers.google.com/protocol-buffers/docs/proto3, and +// carries some of the mapping further when possible with CUE. +// +// +// Package Paths +// +// If a .proto file contains a go_package directive, it will be used as the +// destination package fo the generated .cue files. A common use case is to +// generate the CUE in the same directory as the .proto definition. If a +// destination package is not within the current CUE module, it will be written +// relative to the pkg directory. +// +// If a .proto file does not specify go_package, it will convert a proto package +// "google.parent.sub" to the import path "googleapis.com/google/parent/sub". +// It is safe to mix package with and without a go_package within the same +// project. +// +// Type Mappings +// +// The following type mappings of definitions apply: +// +// Proto type CUE type/def Comments +// message struct Message fields become CUE fields, whereby +// names are mapped to lowerCamelCase. +// enum e1 | e2 | ... Where ex are strings. A separate mapping is +// generated to obtain the numeric values. +// map { <>: V } All keys are converted to strings. +// repeated V [...V] null is accepted as the empty list []. +// bool bool +// string string +// bytes bytes A base64-encoded string when converted to JSON. +// int32, fixed32 int32 An integer with bounds as defined by int32. +// uint32 uint32 An integer with bounds as defined by uint32. +// int64, fixed64 int64 An integer with bounds as defined by int64. +// uint64 uint64 An integer with bounds as defined by uint64. +// float float32 A number with bounds as defined by float32. +// double float64 A number with bounds as defined by float64. +// Struct struct See struct.proto. +// Value _ See struct.proto. +// ListValue [...] See struct.proto. +// NullValue null See struct.proto. +// BoolValue bool See struct.proto. +// StringValue string See struct.proto. +// NumberValue number See struct.proto. +// StringValue string See struct.proto. +// Empty close({}) +// Timestamp time.Time See struct.proto. +// Duration time.Duration See struct.proto. +// +// Protobuf definitions can be annotated with CUE constraints that are included +// in the generated CUE: +// (cue.val) string CUE expression defining a constraint for this +// field. The string may refer to other fields +// in a message definition using their JSON name. +// +// (cue.opt) FieldOptions +// required bool Defines the field is required. Use with +// caution. +// +package protobuf + +// TODO mappings: +// +// Wrapper types various types 2, "2", "foo", true, "true", null, 0, … Wrappers use the same representation in JSON as the wrapped primitive type, except that null is allowed and preserved during data conversion and transfer. +// FieldMask string "f.fooBar,h" See field_mask.proto. +// Any {"@type":"url", See struct.proto. +// f1: value, +// ...} + +import ( + "os" + "path/filepath" + "sort" + "strings" + + "github.com/mpvl/unique" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/build" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" + + // Generated protobuf CUE may use builtins. Ensure that these can always be + // found, even if the user does not use cue/load or another package that + // triggers its loading. + // + // TODO: consider whether just linking in the necessary packages suffices. + // It probably does, but this may reorder some of the imports, which may, + // in turn, change the numbering, which can be confusing while debugging. + _ "cuelang.org/go/pkg" +) + +// Config specifies the environment into which to parse a proto definition file. +type Config struct { + // Root specifies the root of the CUE project, which typically coincides + // with, for example, a version control repository root or the Go module. + // Any imports of proto files within the directory tree of this of this root + // are considered to be "project files" and are generated at the + // corresponding location with this hierarchy. Any other imports are + // considered to be external. Files for such imports are rooted under the + // $Root/pkg/, using the Go package path specified in the .proto file. + Root string + + // Module is the Go package import path of the module root. It is the value + // as after "module" in a cue.mod/modules.cue file, if a module file is + // present. + Module string // TODO: determine automatically if unspecified. + + // Paths defines the include directory in which to search for imports. + Paths []string + + // PkgName specifies the package name for a generated CUE file. A value + // will be derived from the Go package name if undefined. + PkgName string +} + +// An Extractor converts a collection of proto files, typically belonging to one +// repo or module, to CUE. It thereby observes the CUE package layout. +// +// CUE observes the same package layout as Go and requires .proto files to have +// the go_package directive. Generated CUE files are put in the same directory +// as their corresponding .proto files if the .proto files are located in the +// specified Root (or current working directory if none is specified). +// All other imported files are assigned to the CUE pkg dir ($Root/pkg) +// according to their Go package import path. +// +type Extractor struct { + root string + cwd string + module string + paths []string + pkgName string + + fileCache map[string]result + imports map[string]*build.Instance + + errs errors.Error + done bool +} + +type result struct { + p *protoConverter + err error +} + +// NewExtractor creates an Extractor. If the configuration contained any errors +// it will be observable by the Err method fo the Extractor. It is safe, +// however, to only check errors after building the output. +func NewExtractor(c *Config) *Extractor { + cwd, _ := os.Getwd() + b := &Extractor{ + root: c.Root, + cwd: cwd, + paths: c.Paths, + pkgName: c.PkgName, + module: c.Module, + fileCache: map[string]result{}, + imports: map[string]*build.Instance{}, + } + + if b.root == "" { + b.root = b.cwd + } + + return b +} + +// Err returns the errors accumulated during testing. The returned error may be +// of type cuelang.org/go/cue/errors.List. +func (b *Extractor) Err() error { + return b.errs +} + +func (b *Extractor) addErr(err error) { + b.errs = errors.Append(b.errs, errors.Promote(err, "unknown error")) +} + +// AddFile adds a proto definition file to be converted into CUE by the builder. +// Relatives paths are always taken relative to the Root with which the b is +// configured. +// +// AddFile assumes that the proto file compiles with protoc and may not report +// an error if it does not. Imports are resolved using the paths defined in +// Config. +// +func (b *Extractor) AddFile(filename string, src interface{}) error { + if b.done { + err := errors.Newf(token.NoPos, + "protobuf: cannot call AddFile: Instances was already called") + b.errs = errors.Append(b.errs, err) + return err + } + if b.root != b.cwd && !filepath.IsAbs(filename) { + filename = filepath.Join(b.root, filename) + } + _, err := b.parse(filename, src) + return err +} + +// TODO: some way of (recursively) adding multiple proto files with filter. + +// Files returns a File for each proto file that was added or imported, +// recursively. +func (b *Extractor) Files() (files []*ast.File, err error) { + defer func() { err = b.Err() }() + b.done = true + + instances, err := b.Instances() + if err != nil { + return nil, err + } + + for _, p := range instances { + for _, f := range p.Files { + files = append(files, f) + } + } + return files, nil +} + +// Instances creates a build.Instances for every package for which a proto file +// was added to the builder. This includes transitive dependencies. It does not +// write the generated files to disk. +// +// The returned instances can be passed to cue.Build to generated the +// corresponding CUE instances. +// +// All import paths are located within the specified Root, where external +// packages are located under $Root/pkg. Instances for builtin (like time) +// packages may be omitted, and if not will have no associated files. +func (b *Extractor) Instances() (instances []*build.Instance, err error) { + defer func() { err = b.Err() }() + b.done = true + + for _, r := range b.fileCache { + if r.err != nil { + b.addErr(r.err) + continue + } + inst := b.getInst(r.p) + if inst == nil { + continue + } + + // Set canonical CUE path for generated file. + f := r.p.file + base := filepath.Base(f.Filename) + base = base[:len(base)-len(".proto")] + "_proto_gen.cue" + f.Filename = filepath.Join(inst.Dir, base) + buf, err := format.Node(f) + if err != nil { + b.addErr(err) + // return nil, err + continue + } + f, err = parser.ParseFile(f.Filename, buf, parser.ParseComments) + if err != nil { + b.addErr(err) + continue + } + + inst.Files = append(inst.Files, f) + + for pkg := range r.p.imported { + inst.ImportPaths = append(inst.ImportPaths, pkg) + } + } + + for _, p := range b.imports { + instances = append(instances, p) + sort.Strings(p.ImportPaths) + unique.Strings(&p.ImportPaths) + for _, i := range p.ImportPaths { + if imp := b.imports[i]; imp != nil { + p.Imports = append(p.Imports, imp) + } + } + + sort.Slice(p.Files, func(i, j int) bool { + return p.Files[i].Filename < p.Files[j].Filename + }) + } + sort.Slice(instances, func(i, j int) bool { + return instances[i].ImportPath < instances[j].ImportPath + }) + + if err != nil { + return instances, err + } + return instances, nil +} + +func (b *Extractor) getInst(p *protoConverter) *build.Instance { + if b.errs != nil { + return nil + } + importPath := p.qualifiedImportPath() + if importPath == "" { + err := errors.Newf(token.NoPos, + "no package clause for proto package %q in file %s", p.id, p.file.Filename) + b.errs = errors.Append(b.errs, err) + // TODO: find an alternative. Is proto package good enough? + return nil + } + + dir := b.root + path := p.importPath() + file := p.file.Filename + if !filepath.IsAbs(file) { + file = filepath.Join(b.root, p.file.Filename) + } + // Determine whether the generated file should be included in place, or + // within cue.mod. + inPlace := strings.HasPrefix(file, b.root) + if !strings.HasPrefix(path, b.module) { + // b.module is either "", in which case we assume the setting for + // inPlace, or not, in which case the module in the protobuf must + // correspond with that of the proto package. + inPlace = false + } + if !inPlace { + dir = filepath.Join(internal.GenPath(dir), path) + } else { + dir = filepath.Dir(p.file.Filename) + } + + // TODO: verify module name from go_package option against that of actual + // CUE module. Maybe keep this old code for some strict mode? + // want := filepath.Dir(p.file.Filename) + // dir = filepath.Join(dir, path[len(b.module)+1:]) + // if !filepath.IsAbs(want) { + // want = filepath.Join(b.root, want) + // } + // if dir != want { + // err := errors.Newf(token.NoPos, + // "file %s mapped to inconsistent path %s; module name %q may be inconsistent with root dir %s", + // want, dir, b.module, b.root, + // ) + // b.errs = errors.Append(b.errs, err) + // } + + inst := b.imports[importPath] + if inst == nil { + inst = &build.Instance{ + Root: b.root, + Dir: dir, + ImportPath: importPath, + PkgName: p.shortPkgName, + DisplayPath: p.protoPkg, + } + b.imports[importPath] = inst + } + return inst +} + +// Extract parses a single proto file and returns its contents translated to a CUE +// file. If src is not nil, it will use this as the contents of the file. It may +// be a string, []byte or io.Reader. Otherwise Extract will open the given file +// name at the fully qualified path. +// +// Extract assumes the proto file compiles with protoc and may not report an error +// if it does not. Imports are resolved using the paths defined in Config. +// +func Extract(filename string, src interface{}, c *Config) (f *ast.File, err error) { + if c == nil { + c = &Config{} + } + b := NewExtractor(c) + + p, err := b.parse(filename, src) + if err != nil { + return nil, err + } + p.file.Filename = filename[:len(filename)-len(".proto")] + "_gen.cue" + return p.file, b.Err() +} + +// TODO +// func GenDefinition + +// func MarshalText(cue.Value) (string, error) { +// return "", nil +// } + +// func MarshalBytes(cue.Value) ([]byte, error) { +// return nil, nil +// } + +// func UnmarshalText(descriptor cue.Value, b string) (ast.Expr, error) { +// return nil, nil +// } + +// func UnmarshalBytes(descriptor cue.Value, b []byte) (ast.Expr, error) { +// return nil, nil +// } diff --git a/vendor/cuelang.org/go/encoding/protobuf/types.go b/vendor/cuelang.org/go/encoding/protobuf/types.go new file mode 100644 index 000000000..b451fbfbe --- /dev/null +++ b/vendor/cuelang.org/go/encoding/protobuf/types.go @@ -0,0 +1,188 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protobuf + +import ( + "fmt" + "text/scanner" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" + "github.com/emicklei/proto" +) + +func protoToCUE(typ string, options []*proto.Option) ast.Expr { + t, ok := scalars[typ] + if !ok { + return nil + } + return predeclared(t) +} + +var scalars = map[string]string{ + // Differing + "sint32": "int32", + "sint64": "int64", + "fixed32": "uint32", + "fixed64": "uint64", + "sfixed32": "int32", + "sfixed64": "int64", + + // Identical to CUE + "int32": "int32", + "int64": "int64", + "uint32": "uint32", + "uint64": "uint64", + + "double": "float64", + "float": "float32", + + "bool": "bool", + "string": "string", + "bytes": "bytes", +} + +func predeclared(s string) ast.Expr { + return &ast.Ident{ + Name: s, + Node: ast.NewIdent("__" + s), + } +} + +func (p *protoConverter) setBuiltin(from string, to func() ast.Expr, pkg *protoConverter) { + p.scope[0][from] = mapping{to, pkg} +} + +func (p *protoConverter) setBuiltinParse(from, to string, pkg *protoConverter) { + f := func() ast.Expr { + expr, err := parser.ParseExpr("", to, parser.ParseComments) + if err != nil { + panic(fmt.Sprintf("error parsing name %q: %v", to, err)) + } + return expr + } + p.scope[0][from] = mapping{f, pkg} +} + +var ( + pkgTime = &protoConverter{cuePkgPath: "time"} + pkgStruct = &protoConverter{cuePkgPath: "struct"} + importTime = ast.NewImport(nil, "time") + importStruct = ast.NewImport(nil, "struct") +) + +func (p *protoConverter) mapBuiltinPackage(pos scanner.Position, file string, required bool) (generate bool) { + // Map some builtin types to their JSON/CUE mappings. + switch file { + case "gogoproto/gogo.proto": + + case "google/protobuf/struct.proto": + p.setBuiltin("google.protobuf.Struct", func() ast.Expr { + return ast.NewStruct() + }, nil) + + p.setBuiltin("google.protobuf.Value", func() ast.Expr { + return ast.NewIdent("_") + }, nil) + + p.setBuiltin("google.protobuf.NullValue", func() ast.Expr { + return ast.NewNull() + }, nil) + + p.setBuiltin("google.protobuf.ListValue", func() ast.Expr { + return ast.NewList(&ast.Ellipsis{}) + }, nil) + + p.setBuiltin("google.protobuf.StringValue", func() ast.Expr { + return predeclared("string") + }, nil) + + p.setBuiltin("google.protobuf.BoolValue", func() ast.Expr { + return predeclared("bool") + }, nil) + + p.setBuiltin("google.protobuf.NumberValue", func() ast.Expr { + return predeclared("number") + }, nil) + + return false + + case "google/protobuf/empty.proto": + f := func() ast.Expr { + time := &ast.Ident{Name: "struct", Node: importStruct} + return ast.NewCall( + ast.NewSel(time, "MaxFields"), + ast.NewLit(token.INT, "0"), + ) + } + p.setBuiltin("google.protobuf.Empty", f, pkgStruct) + return false + + case "google/protobuf/duration.proto": + f := func() ast.Expr { + time := &ast.Ident{Name: "time", Node: importTime} + return ast.NewSel(time, "Duration") + } + p.setBuiltin("google.protobuf.Duration", f, pkgTime) + return false + + case "google/protobuf/timestamp.proto": + f := func() ast.Expr { + time := &ast.Ident{Name: "time", Node: importTime} + return ast.NewSel(time, "Time") + } + p.setBuiltin("google.protobuf.Timestamp", f, pkgTime) + return false + + case "google/protobuf/any.proto": + // TODO: technically, the value should be `_` (anything), but that + // will not convert to a valid OpenAPI value. In practice, all + // "well-known" types except wrapper types (which will likely not + // be used here) are represented as strings. + // + // In Structural OpenAPI this type cannot be represented. + p.setBuiltinParse("google.protobuf.Any", `{ + // A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one "/" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `+ + "`type.googleapis.com/google.protobuf.Duration`"+`). The name should be in a canonical form (e.g., leading "." is not accepted). + // The remaining fields of this object correspond to fields of the proto messsage. If the embedded message is well-known and has a custom JSON representation, that representation is assigned to the 'value' field. + "@type": string, +}`, nil) + return false + + case "google/protobuf/wrappers.proto": + p.setBuiltinParse("google.protobuf.DoubleValue", `null | float`, nil) + p.setBuiltinParse("google.protobuf.FloatValue", `null | float`, nil) + p.setBuiltinParse("google.protobuf.Int64Value", `null | int64`, nil) + p.setBuiltinParse("google.protobuf.UInt64Value", `null | uint64`, nil) + p.setBuiltinParse("google.protobuf.Int32Value", `null | int32`, nil) + p.setBuiltinParse("google.protobuf.UInt32Value", `null | uint32`, nil) + p.setBuiltinParse("google.protobuf.BoolValue", `null | bool`, nil) + p.setBuiltinParse("google.protobuf.StringValue", `null | string`, nil) + p.setBuiltinParse("google.protobuf.BytesValue", `null | bytes`, nil) + return false + + // case "google/protobuf/field_mask.proto": + // p.setBuiltin("google.protobuf.FieldMask", "protobuf.FieldMask", nil) + + // protobuf.Any + + default: + if required { + failf(pos, "import %q not found", file) + } + } + return true +} diff --git a/vendor/cuelang.org/go/encoding/protobuf/util.go b/vendor/cuelang.org/go/encoding/protobuf/util.go new file mode 100644 index 000000000..ad6328556 --- /dev/null +++ b/vendor/cuelang.org/go/encoding/protobuf/util.go @@ -0,0 +1,82 @@ +// Copyright 2019 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protobuf + +import ( + "strings" + "text/scanner" + + "github.com/emicklei/proto" + "golang.org/x/xerrors" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +// failf panics with a marked error that can be intercepted upon returning +// from parsing. +func failf(pos scanner.Position, format string, args ...interface{}) { + panic(protoError{pos, xerrors.Errorf(format, args...)}) +} + +func fail(pos scanner.Position, err error) { + panic(protoError{pos, err}) +} + +type protoError struct { + pos scanner.Position + error +} + +var ( + newline = token.Newline.Pos() + newSection = token.NewSection.Pos() +) + +func addComments(f ast.Node, i int, doc, inline *proto.Comment) bool { + cg := comment(doc, true) + if cg != nil && len(cg.List) > 0 && i > 0 { + cg.List[0].Slash = newSection + } + f.AddComment(cg) + f.AddComment(comment(inline, false)) + return doc != nil +} + +func comment(c *proto.Comment, doc bool) *ast.CommentGroup { + if c == nil || len(c.Lines) == 0 { + return nil + } + cg := &ast.CommentGroup{} + if doc { + cg.Doc = true + } else { + cg.Line = true + cg.Position = 10 + } + for _, s := range c.Lines { + s = strings.TrimRight(s, " ") + cg.List = append(cg.List, &ast.Comment{Text: "//" + s}) + } + return cg +} + +func labelName(s string) string { + split := strings.Split(s, "_") + for i := 1; i < len(split); i++ { + split[i] = strings.Title(split[i]) + } + return strings.Join(split, "") +} diff --git a/vendor/cuelang.org/go/internal/attrs.go b/vendor/cuelang.org/go/internal/attrs.go new file mode 100644 index 000000000..c0d03c8fb --- /dev/null +++ b/vendor/cuelang.org/go/internal/attrs.go @@ -0,0 +1,205 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + "fmt" + "strconv" + "strings" + + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" +) + +// Attr holds positional information for a single Attr. +type Attr struct { + Fields []keyValue + Err error +} + +// NewNonExisting creates a non-existing attribute. +func NewNonExisting(key string) Attr { + const msgNotExist = "attribute %q does not exist" + return Attr{Err: errors.Newf(token.NoPos, msgNotExist, key)} +} + +type keyValue struct { + data string + equal int // index of equal sign or 0 if non-existing +} + +func (kv *keyValue) Text() string { return kv.data } +func (kv *keyValue) Key() string { return kv.data[:kv.equal] } +func (kv *keyValue) Value() string { + return strings.TrimSpace(kv.data[kv.equal+1:]) +} + +func (a *Attr) hasPos(p int) error { + if a.Err != nil { + return a.Err + } + if p >= len(a.Fields) { + return fmt.Errorf("field does not exist") + } + return nil +} + +// String reports the possibly empty string value at the given position or +// an error the attribute is invalid or if the position does not exist. +func (a *Attr) String(pos int) (string, error) { + if err := a.hasPos(pos); err != nil { + return "", err + } + return a.Fields[pos].Text(), nil +} + +// Int reports the integer at the given position or an error if the attribute is +// invalid, the position does not exist, or the value at the given position is +// not an integer. +func (a *Attr) Int(pos int) (int64, error) { + if err := a.hasPos(pos); err != nil { + return 0, err + } + // TODO: use CUE's literal parser once it exists, allowing any of CUE's + // number types. + return strconv.ParseInt(a.Fields[pos].Text(), 10, 64) +} + +// Flag reports whether an entry with the given name exists at position pos or +// onwards or an error if the attribute is invalid or if the first pos-1 entries +// are not defined. +func (a *Attr) Flag(pos int, key string) (bool, error) { + if err := a.hasPos(pos - 1); err != nil { + return false, err + } + for _, kv := range a.Fields[pos:] { + if kv.Text() == key { + return true, nil + } + } + return false, nil +} + +// Lookup searches for an entry of the form key=value from position pos onwards +// and reports the value if found. It reports an error if the attribute is +// invalid or if the first pos-1 entries are not defined. +func (a *Attr) Lookup(pos int, key string) (val string, found bool, err error) { + if err := a.hasPos(pos - 1); err != nil { + return "", false, err + } + for _, kv := range a.Fields[pos:] { + if kv.Key() == key { + return kv.Value(), true, nil + } + } + return "", false, nil +} + +func ParseAttrBody(pos token.Pos, s string) (a Attr) { + i := 0 + for { + // always scan at least one, possibly empty element. + n, err := scanAttributeElem(pos, s[i:], &a) + if err != nil { + return Attr{Err: err} + } + if i += n; i >= len(s) { + break + } + if s[i] != ',' { + return Attr{Err: errors.Newf(pos, "invalid attribute: expected comma")} + } + i++ + } + return a +} + +func scanAttributeElem(pos token.Pos, s string, a *Attr) (n int, err errors.Error) { + // try CUE string + kv := keyValue{} + if n, kv.data, err = scanAttributeString(pos, s); n == 0 { + // try key-value pair + p := strings.IndexAny(s, ",=") // ) is assumed to be stripped. + switch { + case p < 0: + kv.data = s + n = len(s) + + default: // ',' + n = p + kv.data = s[:n] + + case s[p] == '=': + kv.equal = p + offset := p + 1 + var str string + if p, str, err = scanAttributeString(pos, s[offset:]); p > 0 { + n = offset + p + kv.data = s[:offset] + str + } else { + n = len(s) + if p = strings.IndexByte(s[offset:], ','); p >= 0 { + n = offset + p + } + kv.data = s[:n] + } + } + } + if a != nil { + a.Fields = append(a.Fields, kv) + } + return n, err +} + +func scanAttributeString(pos token.Pos, s string) (n int, str string, err errors.Error) { + if s == "" || (s[0] != '#' && s[0] != '"' && s[0] != '\'') { + return 0, "", nil + } + + nHash := 0 + for { + if nHash < len(s) { + if s[nHash] == '#' { + nHash++ + continue + } + if s[nHash] == '\'' || s[nHash] == '"' { + break + } + } + return nHash, s[:nHash], errors.Newf(pos, "invalid attribute string") + } + + // Determine closing quote. + nQuote := 1 + if c := s[nHash]; nHash+6 < len(s) && s[nHash+1] == c && s[nHash+2] == c { + nQuote = 3 + } + close := s[nHash:nHash+nQuote] + s[:nHash] + + // Search for closing quote. + index := strings.Index(s[len(close):], close) + if index == -1 { + return len(s), "", errors.Newf(pos, "attribute string not terminated") + } + + index += 2 * len(close) + s, err2 := literal.Unquote(s[:index]) + if err2 != nil { + return index, "", errors.Newf(pos, "invalid attribute string: %v", err2) + } + return index, s, nil +} diff --git a/vendor/cuelang.org/go/internal/cli/cli.go b/vendor/cuelang.org/go/internal/cli/cli.go new file mode 100644 index 000000000..f6ffd251f --- /dev/null +++ b/vendor/cuelang.org/go/internal/cli/cli.go @@ -0,0 +1,91 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cli + +import ( + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/parser" + "cuelang.org/go/cue/token" +) + +func ParseValue(pos token.Pos, name, str string, k cue.Kind) (x ast.Expr, errs errors.Error) { + var expr ast.Expr + + if k&cue.NumberKind != 0 { + var err error + expr, err = parser.ParseExpr(name, str) + if err != nil { + errs = errors.Wrapf(err, pos, + "invalid number for environment variable %s", name) + } + } + + if k&cue.BoolKind != 0 { + str = strings.TrimSpace(str) + b, ok := boolValues[str] + if !ok { + errs = errors.Append(errs, errors.Newf(pos, + "invalid boolean value %q for environment variable %s", str, name)) + } else if expr != nil || k&cue.StringKind != 0 { + // Convert into an expression + bl := ast.NewBool(b) + if expr != nil { + expr = &ast.BinaryExpr{Op: token.OR, X: expr, Y: bl} + } else { + expr = bl + } + } else { + x = ast.NewBool(b) + } + } + + if k&cue.StringKind != 0 { + if expr != nil { + expr = &ast.BinaryExpr{Op: token.OR, X: expr, Y: ast.NewString(str)} + } else { + x = ast.NewString(str) + } + } + + switch { + case expr != nil: + return expr, nil + case x != nil: + return x, nil + case errs == nil: + return nil, errors.Newf(pos, + "invalid type for environment variable %s", name) + } + return nil, errs +} + +var boolValues = map[string]bool{ + "1": true, + "0": false, + "t": true, + "f": false, + "T": true, + "F": false, + "true": true, + "false": false, + "TRUE": true, + "FALSE": false, + "True": true, + "False": false, +} diff --git a/vendor/cuelang.org/go/internal/core/adt/adt.go b/vendor/cuelang.org/go/internal/core/adt/adt.go new file mode 100644 index 000000000..7c670a49f --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/adt.go @@ -0,0 +1,361 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +func Resolve(ctx *OpContext, c Conjunct) *Vertex { + env := c.Env + // TODO: also allow resolution in parent scopes. The following will set up + // the environments. But the compiler also needs to resolve accordingly. + // + // // Set up environments for parent scopes, if any. + // root := env + // for p := scope; p != nil; p = p.Parent { + // root.Up = &Environment{Vertex: p.Parent} + // root = root.Up + // } + + var v Value + + expr := c.Expr() + switch x := expr.(type) { + case Value: + v = x + + case Resolver: + r, err := ctx.Resolve(env, x) + if err != nil { + v = err + break + } + // r.Finalize(ctx) // TODO: Finalize here? + return r + + case Evaluator: + // TODO: have a way to evaluate, but not strip down to the value. + v, _ = ctx.Evaluate(env, expr) + + default: + // Unknown type. + v = ctx.NewErrf( + "could not evaluate expression %s of type %T", ctx.Str(c.Expr()), c) + } + + return ToVertex(v) +} + +// A Node is any abstract data type representing an value or expression. +type Node interface { + Source() ast.Node + node() // enforce internal. +} + +// A Decl represents all valid StructLit elements. +type Decl interface { + Node + declNode() +} + +// An Elem represents all value ListLit elements. +// +// All Elem values can be used as a Decl. +type Elem interface { + Decl + elemNode() +} + +// An Expr corresponds to an ast.Expr. +// +// All Expr values can be used as an Elem or Decl. +type Expr interface { + Elem + expr() +} + +// A BaseValue is any Value or a *Marker. It indicates the type of a Vertex. +type BaseValue interface { + Kind() Kind +} + +// A Value represents a node in the evaluated data graph. +// +// All Values values can also be used as a Expr. +type Value interface { + Expr + Concreteness() Concreteness + Kind() Kind +} + +// An Evaluator provides a method to convert to a value. +type Evaluator interface { + Node + + // evaluate evaluates the underlying expression. If the expression + // is incomplete, it may record the error in ctx and return nil. + evaluate(ctx *OpContext) Value +} + +// A Resolver represents a reference somewhere else within a tree that resolves +// a value. +type Resolver interface { + Node + resolve(ctx *OpContext, state VertexStatus) *Vertex +} + +type YieldFunc func(env *Environment, s *StructLit) + +// A Yielder represents 0 or more labeled values of structs or lists. +type Yielder interface { + Node + yield(ctx *OpContext, fn YieldFunc) +} + +// A Validator validates a Value. All Validators are Values. +type Validator interface { + Value + validate(c *OpContext, v Value) *Bottom +} + +// Pos returns the file position of n, or token.NoPos if it is unknown. +func Pos(n Node) token.Pos { + src := n.Source() + if src == nil { + return token.NoPos + } + return src.Pos() +} + +// Value + +func (x *Vertex) Concreteness() Concreteness { + // Depends on concreteness of value. + switch v := x.BaseValue.(type) { + case nil: + return Concrete // Should be indetermined. + + case Value: + return v.Concreteness() + + default: // *StructMarker, *ListMarker: + return Concrete + } +} + +func (x *NodeLink) Concreteness() Concreteness { return Concrete } + +func (*Conjunction) Concreteness() Concreteness { return Constraint } +func (*Disjunction) Concreteness() Concreteness { return Constraint } +func (*BoundValue) Concreteness() Concreteness { return Constraint } + +func (*Builtin) Concreteness() Concreteness { return Concrete } +func (*BuiltinValidator) Concreteness() Concreteness { return Constraint } + +// Value and Expr + +func (*Bottom) Concreteness() Concreteness { return BottomLevel } +func (*Null) Concreteness() Concreteness { return Concrete } +func (*Bool) Concreteness() Concreteness { return Concrete } +func (*Num) Concreteness() Concreteness { return Concrete } +func (*String) Concreteness() Concreteness { return Concrete } +func (*Bytes) Concreteness() Concreteness { return Concrete } +func (*Top) Concreteness() Concreteness { return Any } +func (*BasicType) Concreteness() Concreteness { return Type } + +// Expr + +func (*StructLit) expr() {} +func (*ListLit) expr() {} +func (*DisjunctionExpr) expr() {} + +// Expr and Value + +func (*Bottom) expr() {} +func (*Null) expr() {} +func (*Bool) expr() {} +func (*Num) expr() {} +func (*String) expr() {} +func (*Bytes) expr() {} +func (*Top) expr() {} +func (*BasicType) expr() {} +func (*Vertex) expr() {} +func (*ListMarker) expr() {} +func (*StructMarker) expr() {} +func (*Conjunction) expr() {} +func (*Disjunction) expr() {} +func (*BoundValue) expr() {} +func (*BuiltinValidator) expr() {} +func (*Builtin) expr() {} + +// Expr and Resolver + +func (*NodeLink) expr() {} +func (*FieldReference) expr() {} +func (*LabelReference) expr() {} +func (*DynamicReference) expr() {} +func (*ImportReference) expr() {} +func (*LetReference) expr() {} + +// Expr and Evaluator + +func (*BoundExpr) expr() {} +func (*SelectorExpr) expr() {} +func (*IndexExpr) expr() {} +func (*SliceExpr) expr() {} +func (*Interpolation) expr() {} +func (*UnaryExpr) expr() {} +func (*BinaryExpr) expr() {} +func (*CallExpr) expr() {} + +// Decl and Expr (so allow attaching original source in Conjunct) + +func (*Field) declNode() {} +func (x *Field) expr() Expr { return x.Value } +func (*OptionalField) declNode() {} +func (x *OptionalField) expr() Expr { return x.Value } +func (*BulkOptionalField) declNode() {} +func (x *BulkOptionalField) expr() Expr { return x.Value } +func (*DynamicField) declNode() {} +func (x *DynamicField) expr() Expr { return x.Value } + +// Decl and Yielder + +func (*LetClause) declNode() {} + +// Decl and Elem + +func (*StructLit) declNode() {} +func (*StructLit) elemNode() {} +func (*ListLit) declNode() {} +func (*ListLit) elemNode() {} +func (*Ellipsis) elemNode() {} +func (*Ellipsis) declNode() {} +func (*Bottom) declNode() {} +func (*Bottom) elemNode() {} +func (*Null) declNode() {} +func (*Null) elemNode() {} +func (*Bool) declNode() {} +func (*Bool) elemNode() {} +func (*Num) declNode() {} +func (*Num) elemNode() {} +func (*String) declNode() {} +func (*String) elemNode() {} +func (*Bytes) declNode() {} +func (*Bytes) elemNode() {} +func (*Top) declNode() {} +func (*Top) elemNode() {} +func (*BasicType) declNode() {} +func (*BasicType) elemNode() {} +func (*BoundExpr) declNode() {} +func (*BoundExpr) elemNode() {} +func (*Vertex) declNode() {} +func (*Vertex) elemNode() {} +func (*ListMarker) declNode() {} +func (*ListMarker) elemNode() {} +func (*StructMarker) declNode() {} +func (*StructMarker) elemNode() {} +func (*Conjunction) declNode() {} +func (*Conjunction) elemNode() {} +func (*Disjunction) declNode() {} +func (*Disjunction) elemNode() {} +func (*BoundValue) declNode() {} +func (*BoundValue) elemNode() {} +func (*BuiltinValidator) declNode() {} +func (*BuiltinValidator) elemNode() {} +func (*NodeLink) declNode() {} +func (*NodeLink) elemNode() {} +func (*FieldReference) declNode() {} +func (*FieldReference) elemNode() {} +func (*LabelReference) declNode() {} +func (*LabelReference) elemNode() {} +func (*DynamicReference) declNode() {} +func (*DynamicReference) elemNode() {} +func (*ImportReference) declNode() {} +func (*ImportReference) elemNode() {} +func (*LetReference) declNode() {} +func (*LetReference) elemNode() {} +func (*SelectorExpr) declNode() {} +func (*SelectorExpr) elemNode() {} +func (*IndexExpr) declNode() {} +func (*IndexExpr) elemNode() {} +func (*SliceExpr) declNode() {} +func (*SliceExpr) elemNode() {} +func (*Interpolation) declNode() {} +func (*Interpolation) elemNode() {} +func (*UnaryExpr) declNode() {} +func (*UnaryExpr) elemNode() {} +func (*BinaryExpr) declNode() {} +func (*BinaryExpr) elemNode() {} +func (*CallExpr) declNode() {} +func (*CallExpr) elemNode() {} +func (*Builtin) declNode() {} +func (*Builtin) elemNode() {} +func (*DisjunctionExpr) declNode() {} +func (*DisjunctionExpr) elemNode() {} + +// Decl, Elem, and Yielder + +func (*ForClause) declNode() {} +func (*ForClause) elemNode() {} +func (*IfClause) declNode() {} +func (*IfClause) elemNode() {} + +// Yielder only: ValueClause + +// Node + +func (*Vertex) node() {} +func (*Conjunction) node() {} +func (*Disjunction) node() {} +func (*BoundValue) node() {} +func (*Builtin) node() {} +func (*BuiltinValidator) node() {} +func (*Bottom) node() {} +func (*Null) node() {} +func (*Bool) node() {} +func (*Num) node() {} +func (*String) node() {} +func (*Bytes) node() {} +func (*Top) node() {} +func (*BasicType) node() {} +func (*StructLit) node() {} +func (*ListLit) node() {} +func (*BoundExpr) node() {} +func (*NodeLink) node() {} +func (*FieldReference) node() {} +func (*LabelReference) node() {} +func (*DynamicReference) node() {} +func (*ImportReference) node() {} +func (*LetReference) node() {} +func (*SelectorExpr) node() {} +func (*IndexExpr) node() {} +func (*SliceExpr) node() {} +func (*Interpolation) node() {} +func (*UnaryExpr) node() {} +func (*BinaryExpr) node() {} +func (*CallExpr) node() {} +func (*DisjunctionExpr) node() {} +func (*Field) node() {} +func (*OptionalField) node() {} +func (*BulkOptionalField) node() {} +func (*DynamicField) node() {} +func (*Ellipsis) node() {} +func (*ForClause) node() {} +func (*IfClause) node() {} +func (*LetClause) node() {} +func (*ValueClause) node() {} diff --git a/vendor/cuelang.org/go/internal/core/adt/binop.go b/vendor/cuelang.org/go/internal/core/adt/binop.go new file mode 100644 index 000000000..4c87a4961 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/binop.go @@ -0,0 +1,313 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "bytes" + "strings" +) + +// BinOp handles all operations except AndOp and OrOp. This includes processing +// unary comparators such as '<4' and '=~"foo"'. +// +// BinOp returns nil if not both left and right are concrete. +func BinOp(c *OpContext, op Op, left, right Value) Value { + leftKind := left.Kind() + rightKind := right.Kind() + + const msg = "non-concrete value '%v' to operation '%s'" + if left.Concreteness() > Concrete { + return &Bottom{ + Code: IncompleteError, + Err: c.Newf(msg, c.Str(left), op), + } + } + if right.Concreteness() > Concrete { + return &Bottom{ + Code: IncompleteError, + Err: c.Newf(msg, c.Str(right), op), + } + } + + if err := CombineErrors(c.src, left, right); err != nil { + return err + } + + switch op { + case EqualOp: + switch { + case leftKind == NullKind && rightKind == NullKind: + return c.newBool(true) + + case leftKind == NullKind || rightKind == NullKind: + return c.newBool(false) + + case leftKind == BoolKind: + return c.newBool(c.BoolValue(left) == c.BoolValue(right)) + + case leftKind == StringKind: + // normalize? + return cmpTonode(c, op, strings.Compare(c.StringValue(left), c.StringValue(right))) + + case leftKind == BytesKind: + return cmpTonode(c, op, bytes.Compare(c.bytesValue(left, op), c.bytesValue(right, op))) + + case leftKind&NumKind != 0 && rightKind&NumKind != 0: + // n := c.newNum() + return cmpTonode(c, op, c.Num(left, op).X.Cmp(&c.Num(right, op).X)) + + case leftKind == ListKind && rightKind == ListKind: + x := c.Elems(left) + y := c.Elems(right) + if len(x) != len(y) { + return c.newBool(false) + } + for i, e := range x { + a, _ := c.Concrete(nil, e, op) + b, _ := c.Concrete(nil, y[i], op) + if !test(c, EqualOp, a, b) { + return c.newBool(false) + } + } + return c.newBool(true) + } + + case NotEqualOp: + switch { + case leftKind == NullKind && rightKind == NullKind: + return c.newBool(false) + + case leftKind == NullKind || rightKind == NullKind: + return c.newBool(true) + + case leftKind == BoolKind: + return c.newBool(c.boolValue(left, op) != c.boolValue(right, op)) + + case leftKind == StringKind: + // normalize? + return cmpTonode(c, op, strings.Compare(c.StringValue(left), c.StringValue(right))) + + case leftKind == BytesKind: + return cmpTonode(c, op, bytes.Compare(c.bytesValue(left, op), c.bytesValue(right, op))) + + case leftKind&NumKind != 0 && rightKind&NumKind != 0: + // n := c.newNum() + return cmpTonode(c, op, c.Num(left, op).X.Cmp(&c.Num(right, op).X)) + + case leftKind == ListKind && rightKind == ListKind: + x := c.Elems(left) + y := c.Elems(right) + if len(x) != len(y) { + return c.newBool(false) + } + for i, e := range x { + a, _ := c.Concrete(nil, e, op) + b, _ := c.Concrete(nil, y[i], op) + if !test(c, EqualOp, a, b) { + return c.newBool(true) + } + } + return c.newBool(false) + } + + case LessThanOp, LessEqualOp, GreaterEqualOp, GreaterThanOp: + switch { + case leftKind == StringKind && rightKind == StringKind: + // normalize? + return cmpTonode(c, op, strings.Compare(c.stringValue(left, op), c.stringValue(right, op))) + + case leftKind == BytesKind && rightKind == BytesKind: + return cmpTonode(c, op, bytes.Compare(c.bytesValue(left, op), c.bytesValue(right, op))) + + case leftKind&NumKind != 0 && rightKind&NumKind != 0: + // n := c.newNum(left, right) + return cmpTonode(c, op, c.Num(left, op).X.Cmp(&c.Num(right, op).X)) + } + + case BoolAndOp: + return c.newBool(c.boolValue(left, op) && c.boolValue(right, op)) + + case BoolOrOp: + return c.newBool(c.boolValue(left, op) || c.boolValue(right, op)) + + case MatchOp: + // if y.re == nil { + // // This really should not happen, but leave in for safety. + // b, err := Regexp.MatchString(str, x.str) + // if err != nil { + // return c.Errf(Src, "error parsing Regexp: %v", err) + // } + // return boolTonode(Src, b) + // } + return c.newBool(c.regexp(right).MatchString(c.stringValue(left, op))) + + case NotMatchOp: + return c.newBool(!c.regexp(right).MatchString(c.stringValue(left, op))) + + case AddOp: + switch { + case leftKind&NumKind != 0 && rightKind&NumKind != 0: + return c.Add(c.Num(left, op), c.Num(right, op)) + + case leftKind == StringKind && rightKind == StringKind: + return c.NewString(c.StringValue(left) + c.StringValue(right)) + + case leftKind == BytesKind && rightKind == BytesKind: + ba := c.bytesValue(left, op) + bb := c.bytesValue(right, op) + b := make([]byte, len(ba)+len(bb)) + copy(b, ba) + copy(b[len(ba):], bb) + return c.newBytes(b) + + case leftKind == ListKind && rightKind == ListKind: + // TODO: get rid of list addition. Semantically it is somewhat + // unclear and, as it turns out, it is also hard to get right. + // Simulate addition with comprehensions now. + if err := c.Err(); err != nil { + return err + } + + x := MakeIdentLabel(c, "x", "") + + forClause := func(src Expr) *ForClause { + return &ForClause{ + Value: x, + Src: src, + Dst: &ValueClause{&StructLit{Decls: []Decl{ + &FieldReference{UpCount: 1, Label: x}, + }}}, + } + } + + list := &ListLit{ + Elems: []Elem{ + forClause(left), + forClause(right), + }, + } + + n := &Vertex{} + n.AddConjunct(MakeRootConjunct(c.Env(0), list)) + n.Finalize(c) + + return n + } + + case SubtractOp: + return c.Sub(c.Num(left, op), c.Num(right, op)) + + case MultiplyOp: + switch { + // float + case leftKind&NumKind != 0 && rightKind&NumKind != 0: + return c.Mul(c.Num(left, op), c.Num(right, op)) + + case leftKind == StringKind && rightKind == IntKind: + const as = "string multiplication" + return c.NewString(strings.Repeat(c.stringValue(left, as), int(c.uint64(right, as)))) + + case leftKind == IntKind && rightKind == StringKind: + const as = "string multiplication" + return c.NewString(strings.Repeat(c.stringValue(right, as), int(c.uint64(left, as)))) + + case leftKind == BytesKind && rightKind == IntKind: + const as = "bytes multiplication" + return c.newBytes(bytes.Repeat(c.bytesValue(left, as), int(c.uint64(right, as)))) + + case leftKind == IntKind && rightKind == BytesKind: + const as = "bytes multiplication" + return c.newBytes(bytes.Repeat(c.bytesValue(right, as), int(c.uint64(left, as)))) + + case leftKind == ListKind && rightKind == IntKind: + left, right = right, left + fallthrough + + case leftKind == IntKind && rightKind == ListKind: + // TODO: get rid of list multiplication. + + list := &ListLit{} + x := MakeIdentLabel(c, "x", "") + + for i := c.uint64(left, "list multiplier"); i > 0; i-- { + list.Elems = append(list.Elems, + &ForClause{ + Value: x, + Src: right, + Dst: &ValueClause{&StructLit{Decls: []Decl{ + &FieldReference{UpCount: 1, Label: x}, + }}}, + }, + ) + } + if err := c.Err(); err != nil { + return err + } + + n := &Vertex{} + n.AddConjunct(MakeRootConjunct(c.Env(0), list)) + n.Finalize(c) + + return n + } + + case FloatQuotientOp: + if leftKind&NumKind != 0 && rightKind&NumKind != 0 { + return c.Quo(c.Num(left, op), c.Num(right, op)) + } + + case IntDivideOp: + if leftKind&IntKind != 0 && rightKind&IntKind != 0 { + return c.IntDiv(c.Num(left, op), c.Num(right, op)) + } + + case IntModuloOp: + if leftKind&IntKind != 0 && rightKind&IntKind != 0 { + return c.IntMod(c.Num(left, op), c.Num(right, op)) + } + + case IntQuotientOp: + if leftKind&IntKind != 0 && rightKind&IntKind != 0 { + return c.IntQuo(c.Num(left, op), c.Num(right, op)) + } + + case IntRemainderOp: + if leftKind&IntKind != 0 && rightKind&IntKind != 0 { + return c.IntRem(c.Num(left, op), c.Num(right, op)) + } + } + + return c.NewErrf("invalid operands %s and %s to '%s' (type %s and %s)", + c.Str(left), c.Str(right), op, left.Kind(), right.Kind()) +} + +func cmpTonode(c *OpContext, op Op, r int) Value { + result := false + switch op { + case LessThanOp: + result = r == -1 + case LessEqualOp: + result = r != 1 + case EqualOp, AndOp: + result = r == 0 + case NotEqualOp: + result = r != 0 + case GreaterEqualOp: + result = r != -1 + case GreaterThanOp: + result = r == 1 + } + return c.newBool(result) +} diff --git a/vendor/cuelang.org/go/internal/core/adt/closed.go b/vendor/cuelang.org/go/internal/core/adt/closed.go new file mode 100644 index 000000000..663351794 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/closed.go @@ -0,0 +1,504 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +// This file implements the closedness algorithm. + +// Outline of algorithm +// +// To compute closedness each Vertex is associated with a tree which has +// leaf nodes with sets of allowed labels, and interior nodes that describe +// how these sets may be combines: Or, for embedding, or And for definitions. +// +// Each conjunct of a Vertex is associated with such a leaf node. Each +// conjunct that evaluates to a struct is added to the list of Structs, which +// in the end forms this tree. If a conjunct is embedded, or references another +// struct or definition, it adds interior node to reflect this. +// +// To test whether a feature is allowed, it must satisfy the resulting +// expression tree. +// +// In order to avoid having to copy the tree for each node, the tree is linked +// from leaf node to root, rather than the other way around. This allows +// parent nodes to be shared as the tree grows and ensures that the growth +// of the tree is bounded by the number of conjuncts. As a consequence, this +// requires a two-pass algorithm: +// +// - walk up to mark which nodes are required and count the number of +// child nodes that need to be satisfied. +// - verify fields in leaf structs and mark parent leafs as satisfied +// when appropriate. +// +// A label is allowed if all required root nodes are marked as accepted after +// these two passes. +// + +// A note on embeddings: it is important to keep track which conjuncts originate +// from an embedding, as an embedded value may eventually turn into a closed +// struct. Consider +// +// a: { +// b +// d: e: int +// } +// b: d: { +// #A & #B +// } +// +// At the point of evaluating `a`, the struct is not yet closed. However, +// descending into `d` will trigger the inclusion of definitions which in turn +// causes the struct to be closed. At this point, it is important to know that +// `b` originated from an embedding, as otherwise `e` may not be allowed. + +// TODO(perf): +// - less nodes +// - disable StructInfo nodes that can no longer pass a feature +// - sort StructInfos active ones first. + +// TODO(errors): return a dedicated ConflictError that can track original +// positions on demand. + +type closeNodeType uint8 + +const ( + // a closeRef node is created when there is a non-definition reference. + // These nodes are not necessary for computing results, but may be + // relevant down the line to group closures through embedded values and + // to track position information for failures. + closeRef closeNodeType = iota + + // closeDef indicates this node was introduced as a result of referencing + // a definition. + closeDef + + // closeEmbed indicates this node was added as a result of an embedding. + closeEmbed + + _ = closeRef // silence the linter +) + +// TODO: merge with closeInfo: this is a leftover of the refactoring. +type CloseInfo struct { + *closeInfo + + IsClosed bool + FieldTypes OptionalType +} + +func (c CloseInfo) Location() Node { + if c.closeInfo == nil { + return nil + } + return c.closeInfo.location +} + +func (c CloseInfo) SpanMask() SpanType { + if c.closeInfo == nil { + return 0 + } + return c.span +} + +func (c CloseInfo) RootSpanType() SpanType { + if c.closeInfo == nil { + return 0 + } + return c.root +} + +func (c CloseInfo) IsInOneOf(t SpanType) bool { + if c.closeInfo == nil { + return false + } + return c.span&t != 0 +} + +// TODO(perf): remove: error positions should always be computed on demand +// in dedicated error types. +func (c *CloseInfo) AddPositions(ctx *OpContext) { + for s := c.closeInfo; s != nil; s = s.parent { + if loc := s.location; loc != nil { + ctx.AddPosition(loc) + } + } +} + +// TODO(perf): use on StructInfo. Then if parent and expression are the same +// it is possible to use cached value. +func (c CloseInfo) SpawnEmbed(x Expr) CloseInfo { + var span SpanType + if c.closeInfo != nil { + span = c.span + } + + c.closeInfo = &closeInfo{ + parent: c.closeInfo, + location: x, + mode: closeEmbed, + root: EmbeddingSpan, + span: span | EmbeddingSpan, + } + return c +} + +// SpawnGroup is used for structs that contain embeddings that may end up +// closing the struct. This is to force that `b` is not allowed in +// +// a: {#foo} & {b: int} +// +func (c CloseInfo) SpawnGroup(x Expr) CloseInfo { + var span SpanType + if c.closeInfo != nil { + span = c.span + } + c.closeInfo = &closeInfo{ + parent: c.closeInfo, + location: x, + span: span, + } + return c +} + +// SpawnSpan is used to track that a value is introduced by a comprehension +// or constraint. Definition and embedding spans are introduced with SpawnRef +// and SpawnEmbed, respectively. +func (c CloseInfo) SpawnSpan(x Node, t SpanType) CloseInfo { + var span SpanType + if c.closeInfo != nil { + span = c.span + } + c.closeInfo = &closeInfo{ + parent: c.closeInfo, + location: x, + root: t, + span: span | t, + } + return c +} + +func (c CloseInfo) SpawnRef(arc *Vertex, isDef bool, x Expr) CloseInfo { + var span SpanType + if c.closeInfo != nil { + span = c.span + } + c.closeInfo = &closeInfo{ + parent: c.closeInfo, + location: x, + span: span, + } + if isDef { + c.mode = closeDef + c.closeInfo.root = DefinitionSpan + c.closeInfo.span |= DefinitionSpan + } + return c +} + +// isDef reports whether an expressions is a reference that references a +// definition anywhere in its selection path. +// +// TODO(performance): this should be merged with resolve(). But for now keeping +// this code isolated makes it easier to see what it is for. +func IsDef(x Expr) bool { + switch r := x.(type) { + case *FieldReference: + return r.Label.IsDef() + + case *SelectorExpr: + if r.Sel.IsDef() { + return true + } + return IsDef(r.X) + + case *IndexExpr: + return IsDef(r.X) + } + return false +} + +// A SpanType is used to indicate whether a CUE value is within the scope of +// a certain CUE language construct, the span type. +type SpanType uint8 + +const ( + // EmbeddingSpan means that this value was embedded at some point and should + // not be included as a possible root node in the todo field of OpContext. + EmbeddingSpan SpanType = 1 << iota + ConstraintSpan + ComprehensionSpan + DefinitionSpan +) + +type closeInfo struct { + // location records the expression that led to this node's introduction. + location Node + + // The parent node in the tree. + parent *closeInfo + + // TODO(performance): if references are chained, we could have a separate + // parent pointer to skip the chain. + + // mode indicates whether this node was added as part of an embedding, + // definition or non-definition reference. + mode closeNodeType + + // noCheck means this struct is irrelevant for closedness checking. This can + // happen when: + // - it is a sibling of a new definition. + noCheck bool // don't process for inclusion info + + root SpanType + span SpanType +} + +// closeStats holds the administrative fields for a closeInfo value. Each +// closeInfo is associated with a single closeStats value per unification +// operator. This association is done through an OpContext. This allows the +// same value to be used in multiple concurrent unification operations. +// NOTE: there are other parts of the algorithm that are not thread-safe yet. +type closeStats struct { + // the other fields of this closeStats value are only valid if generation + // is equal to the generation in OpContext. This allows for lazy + // initialization of closeStats. + generation int + + // These counts keep track of how many required child nodes need to be + // completed before this node is accepted. + requiredCount int + acceptedCount int + + // accepted is set if this node is accepted. + accepted bool + + required bool + next *closeStats +} + +func (c *closeInfo) isClosed() bool { + return c.mode == closeDef +} + +func isClosed(v *Vertex) bool { + for _, s := range v.Structs { + if s.IsClosed { + return true + } + for c := s.closeInfo; c != nil; c = c.parent { + if c.isClosed() { + return true + } + } + } + return false +} + +// Accept determines whether f is allowed in n. It uses the OpContext for +// caching administrative fields. +func Accept(ctx *OpContext, n *Vertex, f Feature) (found, required bool) { + ctx.generation++ + ctx.todo = nil + + var optionalTypes OptionalType + + // TODO(perf): more aggressively determine whether a struct is open or + // closed: open structs do not have to be checked, yet they can particularly + // be the ones with performance isssues, for instanced as a result of + // embedded for comprehensions. + for _, s := range n.Structs { + if !s.useForAccept() { + continue + } + markCounts(ctx, s.CloseInfo) + optionalTypes |= s.types + } + + var str Value + if optionalTypes&(HasComplexPattern|HasDynamic) != 0 && f.IsString() { + str = f.ToValue(ctx) + } + + for _, s := range n.Structs { + if !s.useForAccept() { + continue + } + if verifyArc(ctx, s, f, str) { + // Beware: don't add to below expression: this relies on the + // side effects of markUp. + ok := markUp(ctx, s.closeInfo, 0) + found = found || ok + } + } + + // Reject if any of the roots is not accepted. + for x := ctx.todo; x != nil; x = x.next { + if !x.accepted { + return false, true + } + } + + return found, ctx.todo != nil +} + +func markCounts(ctx *OpContext, info CloseInfo) { + if info.IsClosed { + markRequired(ctx, info.closeInfo) + return + } + for s := info.closeInfo; s != nil; s = s.parent { + if s.isClosed() { + markRequired(ctx, s) + return + } + } +} + +func markRequired(ctx *OpContext, info *closeInfo) { + count := 0 + for ; ; info = info.parent { + var s closeInfo + if info != nil { + s = *info + } + + x := getScratch(ctx, info) + + x.requiredCount += count + + if x.required { + return + } + + if s.span&EmbeddingSpan == 0 { + x.next = ctx.todo + ctx.todo = x + } + + x.required = true + + if info == nil { + return + } + + count = 0 + if s.mode != closeEmbed { + count = 1 + } + } +} + +func markUp(ctx *OpContext, info *closeInfo, count int) bool { + for ; ; info = info.parent { + var s closeInfo + if info != nil { + s = *info + } + + x := getScratch(ctx, info) + + x.acceptedCount += count + + if x.acceptedCount < x.requiredCount { + return false + } + + x.accepted = true + + if info == nil { + return true + } + + count = 0 + if x.required && s.mode != closeEmbed { + count = 1 + } + } +} + +// getScratch: explain generation. +func getScratch(ctx *OpContext, s *closeInfo) *closeStats { + m := ctx.closed + if m == nil { + m = map[*closeInfo]*closeStats{} + ctx.closed = m + } + + x := m[s] + if x == nil { + x = &closeStats{} + m[s] = x + } + + if x.generation != ctx.generation { + *x = closeStats{generation: ctx.generation} + } + + return x +} + +func verifyArc(ctx *OpContext, s *StructInfo, f Feature, label Value) bool { + isRegular := f.IsRegular() + + o := s.StructLit + env := s.Env + + if isRegular && (len(o.Additional) > 0 || o.IsOpen) { + return true + } + + for _, g := range o.Fields { + if f == g.Label { + return true + } + } + + if !isRegular { + return false + } + + // Do not record errors during this validation. + errs := ctx.errs + defer func() { ctx.errs = errs }() + + if len(o.Dynamic) > 0 && f.IsString() { + if label == nil && f.IsString() { + label = f.ToValue(ctx) + } + for _, b := range o.Dynamic { + v := env.evalCached(ctx, b.Key) + s, ok := v.(*String) + if !ok { + continue + } + if label.(*String).Str == s.Str { + return true + } + } + } + + for _, b := range o.Bulk { + if matchBulk(ctx, env, b, f, label) { + return true + } + } + + // TODO(perf): delay adding this position: create a special error type that + // computes all necessary positions on demand. + if ctx != nil { + ctx.AddPosition(s.StructLit) + } + + return false +} diff --git a/vendor/cuelang.org/go/internal/core/adt/closed2.go b/vendor/cuelang.org/go/internal/core/adt/closed2.go new file mode 100644 index 000000000..60496ae9a --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/closed2.go @@ -0,0 +1,61 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +// CloseDef defines how individual fieldSets (corresponding to conjuncts) +// combine to determine whether a field is contained in a closed set. +// +// A CloseDef combines multiple conjuncts and embeddings. All CloseDefs are +// stored in slice. References to other CloseDefs are indices within this slice. +// Together they define the top of the tree of the expression tree of how +// conjuncts combine together (a canopy). + +// isComplexStruct reports whether the Closed information should be copied as a +// subtree into the parent node using InsertSubtree. If not, the conjuncts can +// just be inserted at the current ID. +func isComplexStruct(ctx *OpContext, v *Vertex) bool { + return v.IsClosed(ctx) +} + +// TODO: cleanup code and error messages. Reduce duplication in some related +// code. +func verifyArc2(ctx *OpContext, f Feature, v *Vertex, isClosed bool) (found bool, err *Bottom) { + // TODO(perf): collect positions in error. + defer ctx.ReleasePositions(ctx.MarkPositions()) + + if ok, required := Accept(ctx, v.Parent, f); ok || (!required && !isClosed) { + return true, nil + } + + if !f.IsString() && f != InvalidLabel { + // if f.IsHidden() && f != InvalidLabel { Also change Accept in composite.go + return false, nil + } + + if v != nil { + for _, c := range v.Conjuncts { + if pos := c.Field(); pos != nil { + ctx.AddPosition(pos) + } + } + } + + for _, s := range v.Parent.Structs { + s.AddPositions(ctx) + } + + label := f.SelectorString(ctx) + return false, ctx.NewErrf("field `%s` not allowed", label) +} diff --git a/vendor/cuelang.org/go/internal/core/adt/composite.go b/vendor/cuelang.org/go/internal/core/adt/composite.go new file mode 100644 index 000000000..e1d96dfb2 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/composite.go @@ -0,0 +1,760 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "fmt" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// TODO: unanswered questions about structural cycles: +// +// 1. When detecting a structural cycle, should we consider this as: +// a) an unevaluated value, +// b) an incomplete error (which does not affect parent validity), or +// c) a special value. +// +// Making it an error is the simplest way to ensure reentrancy is disallowed: +// without an error it would require an additional mechanism to stop reentrancy +// from continuing to process. Even worse, in some cases it may only partially +// evaluate, resulting in unexpected results. For this reason, we are taking +// approach `b` for now. +// +// This has some consequences of how disjunctions are treated though. Consider +// +// list: { +// head: _ +// tail: list | null +// } +// +// When making it an error, evaluating the above will result in +// +// list: { +// head: _ +// tail: null +// } +// +// because list will result in a structural cycle, and thus an error, it will be +// stripped from the disjunction. This may or may not be a desirable property. A +// nice thing is that it is not required to write `list | *null`. A disadvantage +// is that this is perhaps somewhat inexplicit. +// +// When not making it an error (and simply cease evaluating child arcs upon +// cycle detection), the result would be: +// +// list: { +// head: _ +// tail: list | null +// } +// +// In other words, an evaluation would result in a cycle and thus an error. +// Implementations can recognize such cases by having unevaluated arcs. An +// explicit structure cycle marker would probably be less error prone. +// +// Note that in both cases, a reference to list will still use the original +// conjuncts, so the result will be the same for either method in this case. +// +// +// 2. Structural cycle allowance. +// +// Structural cycle detection disallows reentrancy as well. This means one +// cannot use structs for recursive computation. This will probably preclude +// evaluation of some configuration. Given that there is no real alternative +// yet, we could allow structural cycle detection to be optionally disabled. + +// An Environment links the parent scopes for identifier lookup to a composite +// node. Each conjunct that make up node in the tree can be associated with +// a different environment (although some conjuncts may share an Environment). +type Environment struct { + Up *Environment + Vertex *Vertex + + // DynamicLabel is only set when instantiating a field from a pattern + // constraint. It is used to resolve label references. + DynamicLabel Feature + + // TODO(perf): make the following public fields a shareable struct as it + // mostly is going to be the same for child nodes. + + // Cyclic indicates a structural cycle was detected for this conjunct or one + // of its ancestors. + Cyclic bool + + // Deref keeps track of nodes that should dereference to Vertex. It is used + // for detecting structural cycle. + // + // The detection algorithm is based on Tomabechi's quasi-destructive graph + // unification. This detection requires dependencies to be resolved into + // fully dereferenced vertices. This is not the case in our algorithm: + // the result of evaluating conjuncts is placed into dereferenced vertices + // _after_ they are evaluated, but the Environment still points to the + // non-dereferenced context. + // + // In order to be able to detect structural cycles, we need to ensure that + // at least one node that is part of a cycle in the context in which + // conjunctions are evaluated dereferences correctly. + // + // The only field necessary to detect a structural cycle, however, is + // the Status field of the Vertex. So rather than dereferencing a node + // proper, it is sufficient to copy the Status of the dereferenced nodes + // to these nodes (will always be EvaluatingArcs). + Deref []*Vertex + + // Cycles contains vertices for which cycles are detected. It is used + // for tracking self-references within structural cycles. + // + // Unlike Deref, Cycles is not incremented with child nodes. + // TODO: Cycles is always a tail end of Deref, so this can be optimized. + Cycles []*Vertex + + cache map[Expr]Value +} + +type ID int32 + +// evalCached is used to look up let expressions. Caching let expressions +// prevents a possible combinatorial explosion. +func (e *Environment) evalCached(c *OpContext, x Expr) Value { + if v, ok := x.(Value); ok { + return v + } + v, ok := e.cache[x] + if !ok { + if e.cache == nil { + e.cache = map[Expr]Value{} + } + env, src := c.e, c.src + c.e, c.src = e, x.Source() + v = c.evalState(x, Partial) // TODO: should this be Finalized? + c.e, c.src = env, src + e.cache[x] = v + } + return v +} + +// A Vertex is a node in the value tree. It may be a leaf or internal node. +// It may have arcs to represent elements of a fully evaluated struct or list. +// +// For structs, it only contains definitions and concrete fields. +// optional fields are dropped. +// +// It maintains source information such as a list of conjuncts that contributed +// to the value. +type Vertex struct { + // Parent links to a parent Vertex. This parent should only be used to + // access the parent's Label field to find the relative location within a + // tree. + Parent *Vertex + + // Label is the feature leading to this vertex. + Label Feature + + // State: + // eval: nil, BaseValue: nil -- unevaluated + // eval: *, BaseValue: nil -- evaluating + // eval: *, BaseValue: * -- finalized + // + state *nodeContext + // TODO: move the following status fields to nodeContext. + + // status indicates the evaluation progress of this vertex. + status VertexStatus + + // isData indicates that this Vertex is to be interepreted as data: pattern + // and additional constraints, as well as optional fields, should be + // ignored. + isData bool + Closed bool + nonMonotonicReject bool + nonMonotonicInsertGen int32 + nonMonotonicLookupGen int32 + + // EvalCount keeps track of temporary dereferencing during evaluation. + // If EvalCount > 0, status should be considered to be EvaluatingArcs. + EvalCount int32 + + // SelfCount is used for tracking self-references. + SelfCount int32 + + // BaseValue is the value associated with this vertex. For lists and structs + // this is a sentinel value indicating its kind. + BaseValue BaseValue + + // ChildErrors is the collection of all errors of children. + ChildErrors *Bottom + + // The parent of nodes can be followed to determine the path within the + // configuration of this node. + // Value Value + Arcs []*Vertex // arcs are sorted in display order. + + // Conjuncts lists the structs that ultimately formed this Composite value. + // This includes all selected disjuncts. + // + // This value may be nil, in which case the Arcs are considered to define + // the final value of this Vertex. + Conjuncts []Conjunct + + // Structs is a slice of struct literals that contributed to this value. + // This information is used to compute the topological sort of arcs. + Structs []*StructInfo +} + +type StructInfo struct { + *StructLit + + Env *Environment + + CloseInfo + + // Embed indicates the struct in which this struct is embedded (originally), + // or nil if this is a root structure. + // Embed *StructInfo + // Context *RefInfo // the location from which this struct originates. + Disable bool + + Embedding bool +} + +// TODO(perf): this could be much more aggressive for eliminating structs that +// are immaterial for closing. +func (s *StructInfo) useForAccept() bool { + if c := s.closeInfo; c != nil { + return !c.noCheck + } + return true +} + +// VertexStatus indicates the evaluation progress of a Vertex. +type VertexStatus int8 + +const ( + // Unprocessed indicates a Vertex has not been processed before. + // Value must be nil. + Unprocessed VertexStatus = iota + + // Evaluating means that the current Vertex is being evaluated. If this is + // encountered it indicates a reference cycle. Value must be nil. + Evaluating + + // Partial indicates that the result was only partially evaluated. It will + // need to be fully evaluated to get a complete results. + // + // TODO: this currently requires a renewed computation. Cache the + // nodeContext to allow reusing the computations done so far. + Partial + + // AllArcs is request only. It must be past Partial, but + // before recursively resolving arcs. + AllArcs + + // EvaluatingArcs indicates that the arcs of the Vertex are currently being + // evaluated. If this is encountered it indicates a structural cycle. + // Value does not have to be nil + EvaluatingArcs + + // Finalized means that this node is fully evaluated and that the results + // are save to use without further consideration. + Finalized +) + +func (s VertexStatus) String() string { + switch s { + case Unprocessed: + return "unprocessed" + case Evaluating: + return "evaluating" + case Partial: + return "partial" + case AllArcs: + return "allarcs" + case EvaluatingArcs: + return "evaluatingArcs" + case Finalized: + return "finalized" + default: + return "unknown" + } +} + +func (v *Vertex) Status() VertexStatus { + if v.EvalCount > 0 { + return EvaluatingArcs + } + return v.status +} + +func (v *Vertex) UpdateStatus(s VertexStatus) { + Assertf(v.status <= s+1, "attempt to regress status from %d to %d", v.Status(), s) + + if s == Finalized && v.BaseValue == nil { + // panic("not finalized") + } + v.status = s +} + +// Value returns the Value of v without definitions if it is a scalar +// or itself otherwise. +func (v *Vertex) Value() Value { + switch x := v.BaseValue.(type) { + case nil: + return nil + case *StructMarker, *ListMarker: + return v + case Value: + return x + default: + panic(fmt.Sprintf("unexpected type %T", v.BaseValue)) + } +} + +// isUndefined reports whether a vertex does not have a useable BaseValue yet. +func (v *Vertex) isUndefined() bool { + switch v.BaseValue { + case nil, cycle: + return true + } + return false +} + +func (x *Vertex) IsConcrete() bool { + return x.Concreteness() <= Concrete +} + +// IsData reports whether v should be interpreted in data mode. In other words, +// it tells whether optional field matching and non-regular fields, like +// definitions and hidden fields, should be ignored. +func (v *Vertex) IsData() bool { + return v.isData || len(v.Conjuncts) == 0 +} + +// ToDataSingle creates a new Vertex that represents just the regular fields +// of this vertex. Arcs are left untouched. +// It is used by cue.Eval to convert nodes to data on per-node basis. +func (v *Vertex) ToDataSingle() *Vertex { + w := *v + w.isData = true + w.state = nil + w.status = Finalized + return &w +} + +// ToDataAll returns a new v where v and all its descendents contain only +// the regular fields. +func (v *Vertex) ToDataAll() *Vertex { + arcs := make([]*Vertex, 0, len(v.Arcs)) + for _, a := range v.Arcs { + if a.Label.IsRegular() { + arcs = append(arcs, a.ToDataAll()) + } + } + w := *v + w.state = nil + w.status = Finalized + + w.BaseValue = toDataAll(w.BaseValue) + w.Arcs = arcs + w.isData = true + w.Conjuncts = make([]Conjunct, len(v.Conjuncts)) + // TODO(perf): this is not strictly necessary for evaluation, but it can + // hurt performance greatly. Drawback is that it may disable ordering. + for _, s := range w.Structs { + s.Disable = true + } + copy(w.Conjuncts, v.Conjuncts) + for i, c := range w.Conjuncts { + if v, _ := c.x.(Value); v != nil { + w.Conjuncts[i].x = toDataAll(v).(Value) + } + } + return &w +} + +func toDataAll(v BaseValue) BaseValue { + switch x := v.(type) { + default: + return x + + case *Vertex: + return x.ToDataAll() + + // The following cases are always erroneous, but we handle them anyway + // to avoid issues with the closedness algorithm down the line. + case *Disjunction: + d := *x + d.Values = make([]*Vertex, len(x.Values)) + for i, v := range x.Values { + d.Values[i] = v.ToDataAll() + } + return &d + + case *Conjunction: + c := *x + c.Values = make([]Value, len(x.Values)) + for i, v := range x.Values { + // This case is okay because the source is of type Value. + c.Values[i] = toDataAll(v).(Value) + } + return &c + } +} + +// func (v *Vertex) IsEvaluating() bool { +// return v.Value == cycle +// } + +func (v *Vertex) IsErr() bool { + // if v.Status() > Evaluating { + if _, ok := v.BaseValue.(*Bottom); ok { + return true + } + // } + return false +} + +func (v *Vertex) Err(c *OpContext, state VertexStatus) *Bottom { + c.Unify(v, state) + if b, ok := v.BaseValue.(*Bottom); ok { + return b + } + return nil +} + +// func (v *Vertex) Evaluate() + +func (v *Vertex) Finalize(c *OpContext) { + c.Unify(v, Finalized) +} + +func (v *Vertex) AddErr(ctx *OpContext, b *Bottom) { + v.BaseValue = CombineErrors(nil, v.Value(), b) + v.UpdateStatus(Finalized) +} + +func (v *Vertex) SetValue(ctx *OpContext, state VertexStatus, value BaseValue) *Bottom { + v.BaseValue = value + v.UpdateStatus(state) + return nil +} + +// ToVertex wraps v in a new Vertex, if necessary. +func ToVertex(v Value) *Vertex { + switch x := v.(type) { + case *Vertex: + return x + default: + n := &Vertex{ + status: Finalized, + BaseValue: x, + } + n.AddConjunct(MakeRootConjunct(nil, v)) + return n + } +} + +// Unwrap returns the possibly non-concrete scalar value of v or nil if v is +// a list, struct or of undefined type. +func Unwrap(v Value) Value { + x, ok := v.(*Vertex) + if !ok { + return v + } + // b, _ := x.BaseValue.(*Bottom) + if n := x.state; n != nil && isCyclePlaceholder(x.BaseValue) { + if n.errs != nil && !n.errs.IsIncomplete() { + return n.errs + } + if n.scalar != nil { + return n.scalar + } + } + return x.Value() +} + +// OptionalType is a bit field of the type of optional constraints in use by an +// Acceptor. +type OptionalType int8 + +const ( + HasField OptionalType = 1 << iota // X: T + HasDynamic // (X): T or "\(X)": T + HasPattern // [X]: T + HasComplexPattern // anything but a basic type + HasAdditional // ...T + IsOpen // Defined for all fields +) + +func (v *Vertex) Kind() Kind { + // This is possible when evaluating comprehensions. It is potentially + // not known at this time what the type is. + if v.BaseValue == nil { + return TopKind + } + return v.BaseValue.Kind() +} + +func (v *Vertex) OptionalTypes() OptionalType { + var mask OptionalType + for _, s := range v.Structs { + mask |= s.OptionalTypes() + } + return mask +} + +// IsOptional reports whether a field is explicitly defined as optional, +// as opposed to whether it is allowed by a pattern constraint. +func (v *Vertex) IsOptional(label Feature) bool { + for _, s := range v.Structs { + if s.IsOptional(label) { + return true + } + } + return false +} + +func (v *Vertex) accepts(ok, required bool) bool { + return ok || (!required && !v.Closed) +} + +func (v *Vertex) IsClosed(ctx *OpContext) bool { + switch x := v.BaseValue.(type) { + case *ListMarker: + return !x.IsOpen + + case *StructMarker: + if x.NeedClose { + return true + } + return v.Closed || isClosed(v) + } + return false +} + +// TODO: return error instead of boolean? (or at least have version that does.) +func (v *Vertex) Accept(ctx *OpContext, f Feature) bool { + if v.IsList() { + if f.IsInt() { + // TODO(perf): use precomputed length. + if f.Index() < len(v.Elems()) { + return true + } + } + return !v.IsClosed(ctx) + } + + if !f.IsString() || !v.IsClosed(ctx) || v.Lookup(f) != nil { + return true + } + + // TODO(perf): collect positions in error. + defer ctx.ReleasePositions(ctx.MarkPositions()) + + return v.accepts(Accept(ctx, v, f)) +} + +// MatchAndInsert finds the conjuncts for optional fields, pattern +// constraints, and additional constraints that match f and inserts them in +// arc. Use f is 0 to match all additional constraints only. +func (v *Vertex) MatchAndInsert(ctx *OpContext, arc *Vertex) { + if !v.Accept(ctx, arc.Label) { + return + } + + // Go backwards to simulate old implementation. + for i := len(v.Structs) - 1; i >= 0; i-- { + s := v.Structs[i] + if s.Disable { + continue + } + s.MatchAndInsert(ctx, arc) + } +} + +func (v *Vertex) IsList() bool { + _, ok := v.BaseValue.(*ListMarker) + return ok +} + +// Lookup returns the Arc with label f if it exists or nil otherwise. +func (v *Vertex) Lookup(f Feature) *Vertex { + for _, a := range v.Arcs { + if a.Label == f { + return a + } + } + return nil +} + +// Elems returns the regular elements of a list. +func (v *Vertex) Elems() []*Vertex { + // TODO: add bookkeeping for where list arcs start and end. + a := make([]*Vertex, 0, len(v.Arcs)) + for _, x := range v.Arcs { + if x.Label.IsInt() { + a = append(a, x) + } + } + return a +} + +// GetArc returns a Vertex for the outgoing arc with label f. It creates and +// ads one if it doesn't yet exist. +func (v *Vertex) GetArc(c *OpContext, f Feature) (arc *Vertex, isNew bool) { + arc = v.Lookup(f) + if arc == nil { + for _, a := range v.state.usedArcs { + if a.Label == f { + arc = a + v.Arcs = append(v.Arcs, arc) + isNew = true + if c.nonMonotonicInsertNest > 0 { + a.nonMonotonicInsertGen = c.nonMonotonicGeneration + } + break + } + } + } + if arc == nil { + arc = &Vertex{Parent: v, Label: f} + v.Arcs = append(v.Arcs, arc) + isNew = true + if c.nonMonotonicInsertNest > 0 { + arc.nonMonotonicInsertGen = c.nonMonotonicGeneration + } + } + if c.nonMonotonicInsertNest == 0 { + arc.nonMonotonicInsertGen = 0 + } + return arc, isNew +} + +func (v *Vertex) Source() ast.Node { + if v != nil { + if b, ok := v.BaseValue.(Value); ok { + return b.Source() + } + } + return nil +} + +// AddConjunct adds the given Conjuncts to v if it doesn't already exist. +func (v *Vertex) AddConjunct(c Conjunct) *Bottom { + if v.BaseValue != nil { + // TODO: investigate why this happens at all. Removing it seems to + // change the order of fields in some cases. + // + // This is likely a bug in the evaluator and should not happen. + return &Bottom{Err: errors.Newf(token.NoPos, "cannot add conjunct")} + } + v.addConjunct(c) + return nil +} + +func (v *Vertex) addConjunct(c Conjunct) { + for _, x := range v.Conjuncts { + if x == c { + return + } + } + v.Conjuncts = append(v.Conjuncts, c) +} + +func (v *Vertex) AddStruct(s *StructLit, env *Environment, ci CloseInfo) *StructInfo { + info := StructInfo{ + StructLit: s, + Env: env, + CloseInfo: ci, + } + for _, t := range v.Structs { + if *t == info { + return t + } + } + t := &info + v.Structs = append(v.Structs, t) + return t +} + +// Path computes the sequence of Features leading from the root to of the +// instance to this Vertex. +func (v *Vertex) Path() []Feature { + return appendPath(nil, v) +} + +func appendPath(a []Feature, v *Vertex) []Feature { + if v.Parent == nil { + return a + } + a = appendPath(a, v.Parent) + if v.Label != 0 { + // A Label may be 0 for programmatically inserted nodes. + a = append(a, v.Label) + } + return a +} + +// An Conjunct is an Environment-Expr pair. The Environment is the starting +// point for reference lookup for any reference contained in X. +type Conjunct struct { + Env *Environment + x Node + + // CloseInfo is a unique number that tracks a group of conjuncts that need + // belong to a single originating definition. + CloseInfo CloseInfo +} + +// TODO(perf): replace with composite literal if this helps performance. + +// MakeRootConjunct creates a conjunct from the given environment and node. +// It panics if x cannot be used as an expression. +func MakeRootConjunct(env *Environment, x Node) Conjunct { + return MakeConjunct(env, x, CloseInfo{}) +} + +func MakeConjunct(env *Environment, x Node, id CloseInfo) Conjunct { + if env == nil { + // TODO: better is to pass one. + env = &Environment{} + } + switch x.(type) { + case Expr, interface{ expr() Expr }: + default: + panic(fmt.Sprintf("invalid Node type %T", x)) + } + return Conjunct{env, x, id} +} + +func (c *Conjunct) Source() ast.Node { + return c.x.Source() +} + +func (c *Conjunct) Field() Node { + return c.x +} + +func (c *Conjunct) Expr() Expr { + switch x := c.x.(type) { + case Expr: + return x + case interface{ expr() Expr }: + return x.expr() + default: + panic("unreachable") + } +} diff --git a/vendor/cuelang.org/go/internal/core/adt/context.go b/vendor/cuelang.org/go/internal/core/adt/context.go new file mode 100644 index 000000000..ce4fcc8bf --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/context.go @@ -0,0 +1,1239 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "fmt" + "log" + "os" + "reflect" + "regexp" + + "github.com/cockroachdb/apd/v2" + "golang.org/x/text/encoding/unicode" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/token" +) + +// Debug sets whether extra aggressive checking should be done. +// This should typically default to true for pre-releases and default to +// false otherwise. +var Debug bool = os.Getenv("CUE_DEBUG") != "0" + +// Verbosity sets the log level. There are currently only two levels: +// 0: no logging +// 1: logging +var Verbosity int + +// Assert panics if the condition is false. Assert can be used to check for +// conditions that are considers to break an internal variant or unexpected +// condition, but that nonetheless probably will be handled correctly down the +// line. For instance, a faulty condition could lead to to error being caught +// down the road, but resulting in an inaccurate error message. In production +// code it is better to deal with the bad error message than to panic. +// +// It is advisable for each use of Assert to document how the error is expected +// to be handled down the line. +func Assertf(b bool, format string, args ...interface{}) { + if Debug && !b { + panic(fmt.Sprintf("assertion failed: "+format, args...)) + } +} + +// Assertf either panics or reports an error to c if the condition is not met. +func (c *OpContext) Assertf(pos token.Pos, b bool, format string, args ...interface{}) { + if !b { + if Debug { + panic(fmt.Sprintf("assertion failed: "+format, args...)) + } + c.addErrf(0, pos, format, args...) + } +} + +func init() { + log.SetFlags(log.Lshortfile) +} + +func Logf(format string, args ...interface{}) { + if Verbosity == 0 { + return + } + s := fmt.Sprintf(format, args...) + _ = log.Output(2, s) +} + +var pMap = map[*Vertex]int{} + +func (c *OpContext) Logf(v *Vertex, format string, args ...interface{}) { + if Verbosity == 0 { + return + } + p := pMap[v] + if p == 0 { + p = len(pMap) + 1 + pMap[v] = p + } + a := append([]interface{}{ + p, + v.Label.SelectorString(c), + v.Path(), + }, args...) + for i := 2; i < len(a); i++ { + switch x := a[i].(type) { + case Node: + a[i] = c.Str(x) + case Feature: + a[i] = x.SelectorString(c) + } + } + s := fmt.Sprintf(" [%d] %s/%v"+format, a...) + _ = log.Output(2, s) +} + +// Runtime defines an interface for low-level representation conversion and +// lookup. +type Runtime interface { + // StringIndexer allows for converting string labels to and from a + // canonical numeric representation. + StringIndexer + + // LoadImport loads a unique Vertex associated with a given import path. It + // returns an error if no import for this package could be found. + LoadImport(importPath string) (*Vertex, errors.Error) + + // StoreType associates a CUE expression with a Go type. + StoreType(t reflect.Type, src ast.Expr, expr Expr) + + // LoadType retrieves a previously stored CUE expression for a given Go + // type if available. + LoadType(t reflect.Type) (src ast.Expr, expr Expr, ok bool) +} + +type Config struct { + Runtime + Format func(Node) string +} + +// New creates an operation context. +func New(v *Vertex, cfg *Config) *OpContext { + if cfg.Runtime == nil { + panic("nil Runtime") + } + ctx := &OpContext{ + Runtime: cfg.Runtime, + Format: cfg.Format, + vertex: v, + } + if v != nil { + ctx.e = &Environment{Up: nil, Vertex: v} + } + return ctx +} + +// An OpContext implements CUE's unification operation. It's operations only +// operation on values that are created with the Runtime with which an OpContext +// is associated. An OpContext is not goroutine save and only one goroutine may +// use an OpContext at a time. +// +type OpContext struct { + Runtime + Format func(Node) string + + stats Stats + freeListNode *nodeContext + + e *Environment + src ast.Node + errs *Bottom + positions []Node // keep track of error positions + + // vertex is used to determine the path location in case of error. Turning + // this into a stack could also allow determining the cyclic path for + // structural cycle errors. + vertex *Vertex + + nonMonotonicLookupNest int32 + nonMonotonicRejectNest int32 + nonMonotonicInsertNest int32 + nonMonotonicGeneration int32 + + // These fields are used associate scratch fields for computing closedness + // of a Vertex. These fields could have been included in StructInfo (like + // Tomabechi's unification algorithm), but we opted for an indirection to + // allow concurrent unification. + // + // TODO(perf): have two generations: one for each pass of the closedness + // algorithm, so that the results of the first pass can be reused for all + // features of a node. + generation int + closed map[*closeInfo]*closeStats + todo *closeStats + + // inDisjunct indicates that non-monotonic checks should be skipped. + // This is used if we want to do some extra work to eliminate disjunctions + // early. The result of unificantion should be thrown away if this check is + // used. + // + // TODO: replace this with a mechanism to determine the correct set (per + // conjunct) of StructInfos to include in closedness checking. + inDisjunct int + + // inConstaint overrides inDisjunct as field matching should always be + // enabled. + inConstraint int +} + +func (n *nodeContext) skipNonMonotonicChecks() bool { + if n.ctx.inConstraint > 0 { + return false + } + return n.ctx.inDisjunct > 0 +} + +// Impl is for internal use only. This will go. +func (c *OpContext) Impl() Runtime { + return c.Runtime +} + +func (c *OpContext) Pos() token.Pos { + if c.src == nil { + return token.NoPos + } + return c.src.Pos() +} + +func (c *OpContext) Source() ast.Node { + return c.src +} + +// NewContext creates an operation context. +func NewContext(r Runtime, v *Vertex) *OpContext { + return New(v, &Config{Runtime: r}) +} + +func (c *OpContext) pos() token.Pos { + if c.src == nil { + return token.NoPos + } + return c.src.Pos() +} + +func (c *OpContext) spawn(node *Vertex) *Environment { + node.Parent = c.e.Vertex // TODO: Is this necessary? + return &Environment{ + Up: c.e, + Vertex: node, + + // Copy cycle data. + Cyclic: c.e.Cyclic, + Deref: c.e.Deref, + Cycles: c.e.Cycles, + } +} + +func (c *OpContext) Env(upCount int32) *Environment { + e := c.e + for ; upCount > 0; upCount-- { + e = e.Up + } + return e +} + +func (c *OpContext) relNode(upCount int32) *Vertex { + e := c.e + for ; upCount > 0; upCount-- { + e = e.Up + } + c.Unify(e.Vertex, Partial) + return e.Vertex +} + +func (c *OpContext) relLabel(upCount int32) Feature { + // locate current label. + e := c.e + for ; upCount > 0; upCount-- { + e = e.Up + } + return e.DynamicLabel +} + +func (c *OpContext) concreteIsPossible(op Op, x Expr) bool { + if !AssertConcreteIsPossible(op, x) { + c.AddErrf("invalid operand %s ('%s' requires concrete value)", + c.Str(x), op) + return false + } + return true +} + +// Assert that the given expression can evaluate to a concrete value. +func AssertConcreteIsPossible(op Op, x Expr) bool { + switch v := x.(type) { + case *Bottom: + case *BoundExpr: + return false + case Value: + return v.Concreteness() == Concrete + } + return true +} + +// HasErr reports whether any error was reported, including whether value +// was incomplete. +func (c *OpContext) HasErr() bool { + return c.errs != nil +} + +func (c *OpContext) Err() *Bottom { + b := c.errs + c.errs = nil + return b +} + +func (c *OpContext) addErrf(code ErrorCode, pos token.Pos, msg string, args ...interface{}) { + for i, a := range args { + switch x := a.(type) { + case Node: + args[i] = c.Str(x) + case ast.Node: + b, _ := format.Node(x) + args[i] = string(b) + case Feature: + args[i] = x.SelectorString(c.Runtime) + } + } + + err := c.NewPosf(pos, msg, args...) + c.addErr(code, err) +} + +func (c *OpContext) addErr(code ErrorCode, err errors.Error) { + c.AddBottom(&Bottom{Code: code, Err: err}) +} + +// AddBottom records an error in OpContext. +func (c *OpContext) AddBottom(b *Bottom) { + c.errs = CombineErrors(c.src, c.errs, b) +} + +// AddErr records an error in OpContext. It returns errors collected so far. +func (c *OpContext) AddErr(err errors.Error) *Bottom { + if err != nil { + c.AddBottom(&Bottom{Err: err}) + } + return c.errs +} + +// NewErrf creates a *Bottom value and returns it. The returned uses the +// current source as the point of origin of the error. +func (c *OpContext) NewErrf(format string, args ...interface{}) *Bottom { + // TODO: consider renaming ot NewBottomf: this is now confusing as we also + // have Newf. + err := c.Newf(format, args...) + return &Bottom{Src: c.src, Err: err, Code: EvalError} +} + +// AddErrf records an error in OpContext. It returns errors collected so far. +func (c *OpContext) AddErrf(format string, args ...interface{}) *Bottom { + return c.AddErr(c.Newf(format, args...)) +} + +type frame struct { + env *Environment + err *Bottom + src ast.Node +} + +func (c *OpContext) PushState(env *Environment, src ast.Node) (saved frame) { + saved.env = c.e + saved.err = c.errs + saved.src = c.src + + c.errs = nil + if src != nil { + c.src = src + } + c.e = env + + return saved +} + +func (c *OpContext) PopState(s frame) *Bottom { + err := c.errs + c.e = s.env + c.errs = s.err + c.src = s.src + return err +} + +// PushArc signals c that arc v is currently being processed for the purpose +// of error reporting. PopArc should be called with the returned value once +// processing of v is completed. +func (c *OpContext) PushArc(v *Vertex) (saved *Vertex) { + c.vertex, saved = v, c.vertex + return saved +} + +// PopArc signals completion of processing the current arc. +func (c *OpContext) PopArc(saved *Vertex) { + c.vertex = saved +} + +// Resolve finds a node in the tree. +// +// Should only be used to insert Conjuncts. TODO: perhaps only return Conjuncts +// and error. +func (c *OpContext) Resolve(env *Environment, r Resolver) (*Vertex, *Bottom) { + s := c.PushState(env, r.Source()) + + arc := r.resolve(c, Partial) + + err := c.PopState(s) + if err != nil { + return nil, err + } + + if arc.ChildErrors != nil && arc.ChildErrors.Code == StructuralCycleError { + return nil, arc.ChildErrors + } + + for { + x, ok := arc.BaseValue.(*Vertex) + if !ok { + break + } + arc = x + } + + return arc, err +} + +// Validate calls validates value for the given validator. +// +// TODO(errors): return boolean instead: only the caller has enough information +// to generate a proper error message. +func (c *OpContext) Validate(check Validator, value Value) *Bottom { + // TODO: use a position stack to push both values. + saved := c.src + c.src = check.Source() + + err := check.validate(c, value) + + c.src = saved + + return err +} + +// Yield evaluates a Yielder and calls f for each result. +func (c *OpContext) Yield(env *Environment, y Yielder, f YieldFunc) *Bottom { + s := c.PushState(env, y.Source()) + + y.yield(c, f) + + return c.PopState(s) + +} + +// Concrete returns the concrete value of x after evaluating it. +// msg is used to mention the context in which an error occurred, if any. +func (c *OpContext) Concrete(env *Environment, x Expr, msg interface{}) (result Value, complete bool) { + + v, complete := c.Evaluate(env, x) + + v, ok := c.getDefault(v) + if !ok { + return v, false + } + v = Unwrap(v) + + if !IsConcrete(v) { + complete = false + b := c.NewErrf("non-concrete value %v in operand to %s", c.Str(v), msg) + b.Code = IncompleteError + v = b + } + + if !complete { + return v, complete + } + + return v, true +} + +// getDefault resolves a disjunction to a single value. If there is no default +// value, or if there is more than one default value, it reports an "incomplete" +// error and return false. In all other cases it will return true, even if +// v is already an error. v may be nil, in which case it will also return nil. +func (c *OpContext) getDefault(v Value) (result Value, ok bool) { + var d *Disjunction + switch x := v.(type) { + default: + return v, true + + case *Vertex: + // TODO: return vertex if not disjunction. + switch t := x.BaseValue.(type) { + case *Disjunction: + d = t + + case *Vertex: + return c.getDefault(t) + + default: + return x, true + } + + case *Disjunction: + d = x + } + + if d.NumDefaults != 1 { + c.addErrf(IncompleteError, c.pos(), + "unresolved disjunction %s (type %s)", c.Str(d), d.Kind()) + return nil, false + } + return c.getDefault(d.Values[0]) +} + +// Evaluate evaluates an expression within the given environment and indicates +// whether the result is complete. It will always return a non-nil result. +func (c *OpContext) Evaluate(env *Environment, x Expr) (result Value, complete bool) { + s := c.PushState(env, x.Source()) + + val := c.evalState(x, Partial) + + complete = true + + if err, _ := val.(*Bottom); err != nil && err.IsIncomplete() { + complete = false + } + if val == nil { + complete = false + // TODO ENSURE THIS DOESN"T HAPPEN> + val = &Bottom{ + Code: IncompleteError, + Err: c.Newf("UNANTICIPATED ERROR"), + } + + } + + _ = c.PopState(s) + + if !complete || val == nil { + return val, false + } + + return val, true +} + +func (c *OpContext) evaluateRec(env *Environment, x Expr, state VertexStatus) Value { + s := c.PushState(env, x.Source()) + + val := c.evalState(x, state) + if val == nil { + // Be defensive: this never happens, but just in case. + Assertf(false, "nil return value: unspecified error") + val = &Bottom{ + Code: IncompleteError, + Err: c.Newf("UNANTICIPATED ERROR"), + } + } + _ = c.PopState(s) + + return val +} + +// value evaluates expression v within the current environment. The result may +// be nil if the result is incomplete. value leaves errors untouched to that +// they can be collected by the caller. +func (c *OpContext) value(x Expr) (result Value) { + v := c.evalState(x, Partial) + + v, _ = c.getDefault(v) + v = Unwrap(v) + return v +} + +func (c *OpContext) evalState(v Expr, state VertexStatus) (result Value) { + savedSrc := c.src + c.src = v.Source() + err := c.errs + c.errs = nil + + defer func() { + c.errs = CombineErrors(c.src, c.errs, err) + + if v, ok := result.(*Vertex); ok { + if b, _ := v.BaseValue.(*Bottom); b != nil { + switch b.Code { + case IncompleteError: + case CycleError: + if state == Partial { + break + } + fallthrough + default: + result = b + } + } + } + + // TODO: remove this when we handle errors more principally. + if b, ok := result.(*Bottom); ok { + if c.src != nil && + b.Code == CycleError && + b.Err.Position() == token.NoPos && + len(b.Err.InputPositions()) == 0 { + bb := *b + bb.Err = errors.Wrapf(b.Err, c.src.Pos(), "") + result = &bb + } + c.errs = CombineErrors(c.src, c.errs, result) + } + if c.errs != nil { + result = c.errs + } + c.src = savedSrc + }() + + switch x := v.(type) { + case Value: + return x + + case Evaluator: + v := x.evaluate(c) + return v + + case Resolver: + arc := x.resolve(c, state) + if c.HasErr() { + return nil + } + if arc == nil { + return nil + } + + v := c.evaluate(arc, state) + return v + + default: + // This can only happen, really, if v == nil, which is not allowed. + panic(fmt.Sprintf("unexpected Expr type %T", v)) + } +} + +// unifyNode returns a possibly partially evaluated node value. +// +// TODO: maybe return *Vertex, *Bottom +// +func (c *OpContext) unifyNode(v Expr, state VertexStatus) (result Value) { + savedSrc := c.src + c.src = v.Source() + err := c.errs + c.errs = nil + + defer func() { + c.errs = CombineErrors(c.src, c.errs, err) + + if v, ok := result.(*Vertex); ok { + if b, _ := v.BaseValue.(*Bottom); b != nil { + switch b.Code { + case IncompleteError: + case CycleError: + if state == Partial { + break + } + fallthrough + default: + result = b + } + } + } + + // TODO: remove this when we handle errors more principally. + if b, ok := result.(*Bottom); ok { + if c.src != nil && + b.Code == CycleError && + b.Err.Position() == token.NoPos && + len(b.Err.InputPositions()) == 0 { + bb := *b + bb.Err = errors.Wrapf(b.Err, c.src.Pos(), "") + result = &bb + } + c.errs = CombineErrors(c.src, c.errs, result) + } + if c.errs != nil { + result = c.errs + } + c.src = savedSrc + }() + + switch x := v.(type) { + case Value: + return x + + case Evaluator: + v := x.evaluate(c) + return v + + case Resolver: + v := x.resolve(c, state) + if c.HasErr() { + return nil + } + if v == nil { + return nil + } + + if v.isUndefined() { + // Use node itself to allow for cycle detection. + c.Unify(v, AllArcs) + } + + return v + + default: + // This can only happen, really, if v == nil, which is not allowed. + panic(fmt.Sprintf("unexpected Expr type %T", v)) + } +} + +func (c *OpContext) lookup(x *Vertex, pos token.Pos, l Feature, state VertexStatus) *Vertex { + if l == InvalidLabel || x == nil { + // TODO: is it possible to have an invalid label here? Maybe through the + // API? + return &Vertex{} + } + + // var kind Kind + // if x.BaseValue != nil { + // kind = x.BaseValue.Kind() + // } + + switch x.BaseValue.(type) { + case *StructMarker: + if l.Typ() == IntLabel { + c.addErrf(0, pos, "invalid struct selector %s (type int)", l) + } + + case *ListMarker: + switch { + case l.Typ() == IntLabel: + switch { + case l.Index() < 0: + c.addErrf(0, pos, "invalid list index %s (index must be non-negative)", l) + return nil + case l.Index() > len(x.Arcs): + c.addErrf(0, pos, "invalid list index %s (out of bounds)", l) + return nil + } + + case l.IsDef(), l.IsHidden(): + + default: + c.addErrf(0, pos, "invalid list index %s (type string)", l) + return nil + } + + case nil: + // c.addErrf(IncompleteError, pos, "incomplete value %s", c.Str(x)) + // return nil + + case *Bottom: + + default: + kind := x.BaseValue.Kind() + if kind&(ListKind|StructKind) != 0 { + // c.addErrf(IncompleteError, pos, + // "cannot look up %s in incomplete type %s (type %s)", + // l, x.Source(), kind) + // return nil + } else if !l.IsDef() && !l.IsHidden() { + c.addErrf(0, pos, + "invalid selector %s for value of type %s", l, kind) + return nil + } + } + + a := x.Lookup(l) + + var hasCycle bool +outer: + switch { + case c.nonMonotonicLookupNest == 0 && c.nonMonotonicRejectNest == 0: + case a != nil: + if state == Partial { + a.nonMonotonicLookupGen = c.nonMonotonicGeneration + } + + case x.state != nil && state == Partial: + for _, e := range x.state.exprs { + if isCyclePlaceholder(e.err) { + hasCycle = true + } + } + for _, a := range x.state.usedArcs { + if a.Label == l { + a.nonMonotonicLookupGen = c.nonMonotonicGeneration + if c.nonMonotonicRejectNest > 0 { + a.nonMonotonicReject = true + } + break outer + } + } + a := &Vertex{Label: l, nonMonotonicLookupGen: c.nonMonotonicGeneration} + if c.nonMonotonicRejectNest > 0 { + a.nonMonotonicReject = true + } + x.state.usedArcs = append(x.state.usedArcs, a) + } + if a == nil { + if x.state != nil { + for _, e := range x.state.exprs { + if isCyclePlaceholder(e.err) { + hasCycle = true + } + } + } + code := IncompleteError + if !x.Accept(c, l) { + code = 0 + } else if hasCycle { + code = CycleError + } + // TODO: if the struct was a literal struct, we can also treat it as + // closed and make this a permanent error. + label := l.SelectorString(c.Runtime) + + // TODO(errors): add path reference and make message + // "undefined field %s in %s" + if l.IsInt() { + c.addErrf(code, pos, "index out of range [%d] with length %d", + l.Index(), len(x.Elems())) + } else { + if code != 0 && x.IsOptional(l) { + c.addErrf(code, pos, + "cannot reference optional field %s", label) + } else { + c.addErrf(code, pos, "undefined field %s", label) + } + } + } + return a +} + +func (c *OpContext) Label(src Expr, x Value) Feature { + return labelFromValue(c, src, x) +} + +func (c *OpContext) typeError(v Value, k Kind) { + if isError(v) { + return + } + if !IsConcrete(v) && v.Kind()&k != 0 { + c.addErrf(IncompleteError, pos(v), + "incomplete %s value '%s'", k, c.Str(v)) + } else { + c.AddErrf("cannot use %s (type %s) as type %s", c.Str(v), v.Kind(), k) + } +} + +func (c *OpContext) typeErrorAs(v Value, k Kind, as interface{}) { + if as == nil { + c.typeError(v, k) + return + } + if isError(v) { + return + } + if !IsConcrete(v) && v.Kind()&k != 0 { + c.addErrf(IncompleteError, pos(v), + "incomplete %s value '%s' in as", k, c.Str(v), as) + } else { + c.AddErrf("cannot use %s (type %s) as type %s in %v", + c.Str(v), v.Kind(), k, as) + } +} + +var emptyNode = &Vertex{} + +func pos(x Node) token.Pos { + if x.Source() == nil { + return token.NoPos + } + return x.Source().Pos() +} + +func (c *OpContext) node(orig Node, x Expr, scalar bool, state VertexStatus) *Vertex { + // TODO: always get the vertex. This allows a whole bunch of trickery + // down the line. + v := c.unifyNode(x, state) + + v, ok := c.getDefault(v) + if !ok { + // Error already generated by getDefault. + return emptyNode + } + + // The two if blocks below are rather subtle. If we have an error of + // the sentinel value cycle, we have earlier determined that the cycle is + // allowed and that it can be ignored here. Any other CycleError is an + // annotated cycle error that could be taken as is. + // TODO: do something simpler. + if scalar { + if w := Unwrap(v); !isCyclePlaceholder(w) { + v = w + } + } + + node, ok := v.(*Vertex) + if ok && !isCyclePlaceholder(node.BaseValue) { + v = node.Value() + } + + switch nv := v.(type) { + case nil: + switch orig.(type) { + case *ForClause: + c.addErrf(IncompleteError, pos(x), + "cannot range over %s (incomplete)", + c.Str(x)) + default: + c.addErrf(IncompleteError, pos(x), + "%s undefined (%s is incomplete)", c.Str(orig), c.Str(x)) + } + return emptyNode + + case *Bottom: + // TODO: this is a bit messy. In some cases errors are already added + // and in some cases not. Not a huge deal, as errors will be uniqued + // down the line, but could be better. + c.AddBottom(nv) + return emptyNode + + case *Vertex: + if node == nil { + panic("unexpected markers with nil node") + } + + default: + if kind := v.Kind(); kind&StructKind != 0 { + switch orig.(type) { + case *ForClause: + c.addErrf(IncompleteError, pos(x), + "cannot range over %s (incomplete type %s)", + c.Str(x), kind) + default: + c.addErrf(IncompleteError, pos(x), + "%s undefined as %s is incomplete (type %s)", + c.Str(orig), c.Str(x), kind) + } + return emptyNode + + } else if !ok { + c.addErrf(0, pos(x), // TODO(error): better message. + "invalid operand %s (found %s, want list or struct)", + x.Source(), v.Kind()) + return emptyNode + } + } + + return node +} + +// Elems returns the elements of a list. +func (c *OpContext) Elems(v Value) []*Vertex { + list := c.list(v) + return list.Elems() +} + +func (c *OpContext) list(v Value) *Vertex { + x, ok := v.(*Vertex) + if !ok || !x.IsList() { + c.typeError(v, ListKind) + return emptyNode + } + return x +} + +func (c *OpContext) scalar(v Value) Value { + v = Unwrap(v) + switch v.(type) { + case *Null, *Bool, *Num, *String, *Bytes: + default: + c.typeError(v, ScalarKinds) + } + return v +} + +var zero = &Num{K: NumKind} + +func (c *OpContext) Num(v Value, as interface{}) *Num { + v = Unwrap(v) + if isError(v) { + return zero + } + x, ok := v.(*Num) + if !ok { + c.typeErrorAs(v, NumKind, as) + return zero + } + return x +} + +func (c *OpContext) Int64(v Value) int64 { + v = Unwrap(v) + if isError(v) { + return 0 + } + x, ok := v.(*Num) + if !ok { + c.typeError(v, IntKind) + return 0 + } + i, err := x.X.Int64() + if err != nil { + c.AddErrf("number is not an int64: %v", err) + return 0 + } + return i +} + +func (c *OpContext) uint64(v Value, as string) uint64 { + v = Unwrap(v) + if isError(v) { + return 0 + } + x, ok := v.(*Num) + if !ok { + c.typeErrorAs(v, IntKind, as) + return 0 + } + if x.X.Negative { + // TODO: improve message + c.AddErrf("cannot convert negative number to uint64") + return 0 + } + if !x.X.Coeff.IsUint64() { + // TODO: improve message + c.AddErrf("cannot convert number %s to uint64", x.X) + return 0 + } + return x.X.Coeff.Uint64() +} + +func (c *OpContext) BoolValue(v Value) bool { + return c.boolValue(v, nil) +} + +func (c *OpContext) boolValue(v Value, as interface{}) bool { + v = Unwrap(v) + if isError(v) { + return false + } + x, ok := v.(*Bool) + if !ok { + c.typeErrorAs(v, BoolKind, as) + return false + } + return x.B +} + +func (c *OpContext) StringValue(v Value) string { + return c.stringValue(v, nil) +} + +// ToBytes returns the bytes value of a scalar value. +func (c *OpContext) ToBytes(v Value) []byte { + if x, ok := v.(*Bytes); ok { + return x.B + } + return []byte(c.ToString(v)) +} + +// ToString returns the string value of a scalar value. +func (c *OpContext) ToString(v Value) string { + return c.toStringValue(v, StringKind|NumKind|BytesKind|BoolKind, nil) + +} + +func (c *OpContext) stringValue(v Value, as interface{}) string { + return c.toStringValue(v, StringKind, as) +} + +func (c *OpContext) toStringValue(v Value, k Kind, as interface{}) string { + v = Unwrap(v) + if isError(v) { + return "" + } + if v.Kind()&k == 0 { + if as == nil { + c.typeError(v, k) + } else { + c.typeErrorAs(v, k, as) + } + return "" + } + switch x := v.(type) { + case *String: + return x.Str + + case *Bytes: + return bytesToString(x.B) + + case *Num: + return x.X.String() + + case *Bool: + if x.B { + return "true" + } + return "false" + + default: + c.addErrf(IncompleteError, c.pos(), + "non-concrete value %s (type %s)", c.Str(v), v.Kind()) + } + return "" +} + +func bytesToString(b []byte) string { + b, _ = unicode.UTF8.NewDecoder().Bytes(b) + return string(b) +} + +func (c *OpContext) bytesValue(v Value, as interface{}) []byte { + v = Unwrap(v) + if isError(v) { + return nil + } + x, ok := v.(*Bytes) + if !ok { + c.typeErrorAs(v, BytesKind, as) + return nil + } + return x.B +} + +var matchNone = regexp.MustCompile("^$") + +func (c *OpContext) regexp(v Value) *regexp.Regexp { + v = Unwrap(v) + if isError(v) { + return matchNone + } + switch x := v.(type) { + case *String: + if x.RE != nil { + return x.RE + } + // TODO: synchronization + p, err := regexp.Compile(x.Str) + if err != nil { + // FatalError? How to cache error + c.AddErrf("invalid regexp: %s", err) + x.RE = matchNone + } else { + x.RE = p + } + return x.RE + + case *Bytes: + if x.RE != nil { + return x.RE + } + // TODO: synchronization + p, err := regexp.Compile(string(x.B)) + if err != nil { + c.AddErrf("invalid regexp: %s", err) + x.RE = matchNone + } else { + x.RE = p + } + return x.RE + + default: + c.typeError(v, StringKind|BytesKind) + return matchNone + } +} + +// newNum creates a new number of the given kind. It reports an error value +// instead if any error occurred. +func (c *OpContext) newNum(d *apd.Decimal, k Kind, sources ...Node) Value { + if c.HasErr() { + return c.Err() + } + return &Num{Src: c.src, X: *d, K: k} +} + +func (c *OpContext) NewInt64(n int64, sources ...Node) Value { + if c.HasErr() { + return c.Err() + } + d := apd.New(n, 0) + return &Num{Src: c.src, X: *d, K: IntKind} +} + +func (c *OpContext) NewString(s string) Value { + if c.HasErr() { + return c.Err() + } + return &String{Src: c.src, Str: s} +} + +func (c *OpContext) newBytes(b []byte) Value { + if c.HasErr() { + return c.Err() + } + return &Bytes{Src: c.src, B: b} +} + +func (c *OpContext) newBool(b bool) Value { + if c.HasErr() { + return c.Err() + } + return &Bool{Src: c.src, B: b} +} + +func (c *OpContext) newList(src ast.Node, parent *Vertex) *Vertex { + return &Vertex{Parent: parent, BaseValue: &ListMarker{}} +} + +// Str reports a debug string of x. +func (c *OpContext) Str(x Node) string { + if c.Format == nil { + return fmt.Sprintf("%T", x) + } + return c.Format(x) +} + +// NewList returns a new list for the given values. +func (c *OpContext) NewList(values ...Value) *Vertex { + // TODO: consider making this a literal list instead. + list := &ListLit{} + v := &Vertex{ + Conjuncts: []Conjunct{{Env: nil, x: list}}, + } + + for _, x := range values { + list.Elems = append(list.Elems, x) + } + c.Unify(v, Finalized) + return v +} diff --git a/vendor/cuelang.org/go/internal/core/adt/decimal.go b/vendor/cuelang.org/go/internal/core/adt/decimal.go new file mode 100644 index 000000000..e7eba3856 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/decimal.go @@ -0,0 +1,131 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "math/big" + + "github.com/cockroachdb/apd/v2" +) + +var apdCtx apd.Context + +func init() { + apdCtx = apd.BaseContext + apdCtx.Precision = 24 +} + +func (n *Num) Impl() *apd.Decimal { + return &n.X +} + +func (n *Num) Negative() bool { + return n.X.Negative +} + +func (a *Num) Cmp(b *Num) int { + return a.X.Cmp(&b.X) +} + +func (c *OpContext) Add(a, b *Num) Value { + return numOp(c, apdCtx.Add, a, b) +} + +func (c *OpContext) Sub(a, b *Num) Value { + return numOp(c, apdCtx.Sub, a, b) +} + +func (c *OpContext) Mul(a, b *Num) Value { + return numOp(c, apdCtx.Mul, a, b) +} + +func (c *OpContext) Quo(a, b *Num) Value { + v := numOp(c, apdCtx.Quo, a, b) + if n, ok := v.(*Num); ok { + n.K = FloatKind + } + return v +} + +func (c *OpContext) Pow(a, b *Num) Value { + return numOp(c, apdCtx.Pow, a, b) +} + +type numFunc func(z, x, y *apd.Decimal) (apd.Condition, error) + +func numOp(c *OpContext, fn numFunc, x, y *Num) Value { + var d apd.Decimal + + cond, err := fn(&d, &x.X, &y.X) + + if err != nil { + return c.NewErrf("failed arithmetic: %v", err) + } + + if cond.DivisionByZero() { + return c.NewErrf("division by zero") + } + + k := x.Kind() & y.Kind() + if k == 0 { + k = FloatKind + } + return c.newNum(&d, k) +} + +func (c *OpContext) IntDiv(a, b *Num) Value { + return intDivOp(c, (*big.Int).Div, a, b) +} + +func (c *OpContext) IntMod(a, b *Num) Value { + return intDivOp(c, (*big.Int).Mod, a, b) +} + +func (c *OpContext) IntQuo(a, b *Num) Value { + return intDivOp(c, (*big.Int).Quo, a, b) +} + +func (c *OpContext) IntRem(a, b *Num) Value { + return intDivOp(c, (*big.Int).Rem, a, b) +} + +type intFunc func(z, x, y *big.Int) *big.Int + +func intDivOp(c *OpContext, fn intFunc, a, b *Num) Value { + if b.X.IsZero() { + return c.NewErrf("division by zero") + } + + var x, y apd.Decimal + _, _ = apdCtx.RoundToIntegralValue(&x, &a.X) + if x.Negative { + x.Coeff.Neg(&x.Coeff) + } + _, _ = apdCtx.RoundToIntegralValue(&y, &b.X) + if y.Negative { + y.Coeff.Neg(&y.Coeff) + } + + var d apd.Decimal + + fn(&d.Coeff, &x.Coeff, &y.Coeff) + + if d.Coeff.Sign() < 0 { + d.Coeff.Neg(&d.Coeff) + d.Negative = true + } + + return c.newNum(&d, IntKind) +} diff --git a/vendor/cuelang.org/go/internal/core/adt/default.go b/vendor/cuelang.org/go/internal/core/adt/default.go new file mode 100644 index 000000000..31e137fcb --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/default.go @@ -0,0 +1,137 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +// Default returns the default value or itself if there is no default. +func Default(v Value) Value { + switch x := v.(type) { + case *Vertex: + return x.Default() + case *Disjunction: + return x.Default() + default: + return v + } +} + +func (d *Disjunction) Default() Value { + switch d.NumDefaults { + case 0: + if d.HasDefaults { + // empty disjunction + return &Bottom{} + } + return d + case 1: + return d.Values[0] + default: + return &Disjunction{ + Src: d.Src, + Values: d.Values[:d.NumDefaults], + NumDefaults: 0, + } + } +} + +// Default returns the default value or itself if there is no default. +// +// It also closes a list, representing its default value. +func (v *Vertex) Default() *Vertex { + switch d := v.BaseValue.(type) { + default: + return v + + case *Disjunction: + var w *Vertex + + switch d.NumDefaults { + case 0: + if d.HasDefaults { + v = &Vertex{ + Parent: v.Parent, + status: Finalized, + BaseValue: &Bottom{}, + } + } + return v + case 1: + w = d.Values[0] + default: + x := *v + x.state = nil + x.BaseValue = &Disjunction{ + Src: d.Src, + Values: d.Values[:d.NumDefaults], + NumDefaults: 0, + } + w = &x + } + + w.Conjuncts = nil + for _, c := range v.Conjuncts { + // TODO: preserve field information. + expr, _ := stripNonDefaults(c.Expr()) + w.Conjuncts = append(w.Conjuncts, MakeRootConjunct(c.Env, expr)) + } + return w + + case *ListMarker: + m := *d + m.IsOpen = false + + w := *v + w.BaseValue = &m + w.state = nil + return &w + } +} + +// TODO: this should go: record preexpanded disjunctions in Vertex. +func stripNonDefaults(expr Expr) (r Expr, stripped bool) { + switch x := expr.(type) { + case *DisjunctionExpr: + if !x.HasDefaults { + return x, false + } + d := *x + d.Values = []Disjunct{} + for _, v := range x.Values { + if v.Default { + d.Values = append(d.Values, v) + } + } + if len(d.Values) == 1 { + return d.Values[0].Val, true + } + return &d, true + + case *BinaryExpr: + if x.Op != AndOp { + return x, false + } + a, sa := stripNonDefaults(x.X) + b, sb := stripNonDefaults(x.Y) + if sa || sb { + bin := *x + bin.X = a + bin.Y = b + return &bin, true + } + return x, false + + default: + return x, false + } +} diff --git a/vendor/cuelang.org/go/internal/core/adt/disjunct.go b/vendor/cuelang.org/go/internal/core/adt/disjunct.go new file mode 100644 index 000000000..8f2074a40 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/disjunct.go @@ -0,0 +1,588 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// Nodes man not reenter a disjunction. +// +// Copy one layer deep; throw away items on failure. + +// DISJUNCTION ALGORITHM +// +// The basic concept of the algorithm is to use backtracking to find valid +// disjunctions. The algorithm can stop if two matching disjuncts are found +// where one does not subsume the other. +// +// At a later point, we can introduce a filter step to filter out possible +// disjuncts based on, say, discriminator fields or field exclusivity (oneOf +// fields in Protobuf). +// +// To understand the details of the algorithm, it is important to understand +// some properties of disjunction. +// +// +// EVALUATION OF A DISJUNCTION IS SELF CONTAINED +// +// In other words, fields outside of a disjunction cannot bind to values within +// a disjunction whilst evaluating that disjunction. This allows the computation +// of disjunctions to be isolated from side effects. +// +// The intuition behind this is as follows: as a disjunction is not a concrete +// value, it is not possible to lookup a field within a disjunction if it has +// not yet been evaluated. So if a reference within a disjunction that is needed +// to disambiguate that disjunction refers to a field outside the scope of the +// disjunction which, in turn, refers to a field within the disjunction, this +// results in a cycle error. We achieve this by not removing the cycle marker of +// the Vertex of the disjunction until the disjunction is resolved. +// +// Note that the following disjunct is still allowed: +// +// a: 1 +// b: a +// +// Even though `a` refers to the root of the disjunction, it does not _select +// into_ the disjunction. Implementation-wise, it also doesn't have to, as the +// respective vertex is available within the Environment. Referencing a node +// outside the disjunction that in turn selects the disjunction root, however, +// will result in a detected cycle. +// +// As usual, cycle detection should be interpreted marked as incomplete, so that +// the referring node will not be fixed to an error prematurely. +// +// +// SUBSUMPTION OF AMBIGUOUS DISJUNCTS +// +// A disjunction can be evaluated to a concrete value if only one disjunct +// remains. Aside from disambiguating through unification failure, disjuncts +// may also be disambiguated by taking the least specific of two disjuncts. +// For instance, if a subsumes b, then the result of disjunction may be a. +// +// NEW ALGORITHM NO LONGER VERIFIES SUBSUMPTION. SUBSUMPTION IS INHERENTLY +// IMPRECISE (DUE TO BULK OPTIONAL FIELDS). OTHER THAN THAT, FOR SCALAR VALUES +// IT JUST MEANS THERE IS AMBIGUITY, AND FOR STRUCTS IT CAN LEAD TO STRANGE +// CONSEQUENCES. +// +// USE EQUALITY INSTEAD: +// - Undefined == error for optional fields. +// - So only need to check exact labels for vertices. + +type envDisjunct struct { + env *Environment + cloneID CloseInfo + expr *DisjunctionExpr + value *Disjunction + hasDefaults bool + + // These are used for book keeping, tracking whether any of the + // disjuncts marked with a default marker remains after unification. + // If no default is used, all other elements are treated as "maybeDefault". + // Otherwise, elements are treated as is. + parentDefaultUsed bool + childDefaultUsed bool +} + +func (n *nodeContext) addDisjunction(env *Environment, x *DisjunctionExpr, cloneID CloseInfo) { + + // TODO: precompute + numDefaults := 0 + for _, v := range x.Values { + isDef := v.Default // || n.hasDefaults(env, v.Val) + if isDef { + numDefaults++ + } + } + + n.disjunctions = append(n.disjunctions, + envDisjunct{env, cloneID, x, nil, numDefaults > 0, false, false}) +} + +func (n *nodeContext) addDisjunctionValue(env *Environment, x *Disjunction, cloneID CloseInfo) { + n.disjunctions = append(n.disjunctions, + envDisjunct{env, cloneID, nil, x, x.HasDefaults, false, false}) + +} + +func (n *nodeContext) expandDisjuncts( + state VertexStatus, + parent *nodeContext, + parentMode defaultMode, // default mode of this disjunct + recursive, last bool) { + + n.ctx.stats.DisjunctCount++ + + node := n.node + defer func() { + n.node = node + }() + + for n.expandOne() { + } + + // save node to snapShot in nodeContex + // save nodeContext. + + if recursive || len(n.disjunctions) > 0 { + n.snapshot = clone(*n.node) + } else { + n.snapshot = *n.node + } + + defaultOffset := len(n.usedDefault) + + switch { + default: // len(n.disjunctions) == 0 + m := *n + n.postDisjunct(state) + + switch { + case n.hasErr(): + // TODO: consider finalizing the node thusly: + // if recursive { + // n.node.Finalize(n.ctx) + // } + x := n.node + err, ok := x.BaseValue.(*Bottom) + if !ok { + err = n.getErr() + } + if err == nil { + // TODO(disjuncts): Is this always correct? Especially for partial + // evaluation it is okay for child errors to have incomplete errors. + // Perhaps introduce an Err() method. + err = x.ChildErrors + } + if err.IsIncomplete() { + break + } + if err != nil { + parent.disjunctErrs = append(parent.disjunctErrs, err) + } + if recursive { + n.free() + } + return + } + + if recursive { + *n = m + n.result = *n.node // XXX: n.result = snapshotVertex(n.node)? + n.node = &n.result + n.disjuncts = append(n.disjuncts, n) + } + if n.node.BaseValue == nil { + n.node.BaseValue = n.getValidators() + } + + n.usedDefault = append(n.usedDefault, defaultInfo{ + parentMode: parentMode, + nestedMode: parentMode, + origMode: parentMode, + }) + + case len(n.disjunctions) > 0: + // Process full disjuncts to ensure that erroneous disjuncts are + // eliminated as early as possible. + state = Finalized + + n.disjuncts = append(n.disjuncts, n) + + n.refCount++ + defer n.free() + + for i, d := range n.disjunctions { + a := n.disjuncts + n.disjuncts = n.buffer[:0] + n.buffer = a[:0] + + last := i+1 == len(n.disjunctions) + skipNonMonotonicChecks := i+1 < len(n.disjunctions) + if skipNonMonotonicChecks { + n.ctx.inDisjunct++ + } + + for _, dn := range a { + switch { + case d.expr != nil: + for _, v := range d.expr.Values { + cn := dn.clone() + *cn.node = clone(dn.snapshot) + cn.node.state = cn + + c := MakeConjunct(d.env, v.Val, d.cloneID) + cn.addExprConjunct(c) + + newMode := mode(d.hasDefaults, v.Default) + + cn.expandDisjuncts(state, n, newMode, true, last) + } + + case d.value != nil: + for i, v := range d.value.Values { + cn := dn.clone() + *cn.node = clone(dn.snapshot) + cn.node.state = cn + + cn.addValueConjunct(d.env, v, d.cloneID) + + newMode := mode(d.hasDefaults, i < d.value.NumDefaults) + + cn.expandDisjuncts(state, n, newMode, true, last) + } + } + } + + if skipNonMonotonicChecks { + n.ctx.inDisjunct-- + } + + if len(n.disjuncts) == 0 { + n.makeError() + } + + if recursive || i > 0 { + for _, x := range a { + x.free() + } + } + + if len(n.disjuncts) == 0 { + break + } + } + + // Annotate disjunctions with whether any of the default disjunctions + // was used. + for _, d := range n.disjuncts { + for i, info := range d.usedDefault[defaultOffset:] { + if info.parentMode == isDefault { + n.disjunctions[i].parentDefaultUsed = true + } + if info.origMode == isDefault { + n.disjunctions[i].childDefaultUsed = true + } + } + } + + // Combine parent and child default markers, considering that a parent + // "notDefault" is treated as "maybeDefault" if none of the disjuncts + // marked as default remain. + // + // NOTE for a parent marked as "notDefault", a child is *never* + // considered as default. It may either be "not" or "maybe" default. + // + // The result for each disjunction is conjoined into a single value. + for _, d := range n.disjuncts { + m := maybeDefault + orig := maybeDefault + for i, info := range d.usedDefault[defaultOffset:] { + parent := info.parentMode + + used := n.disjunctions[i].parentDefaultUsed + childUsed := n.disjunctions[i].childDefaultUsed + hasDefaults := n.disjunctions[i].hasDefaults + + orig = combineDefault(orig, info.parentMode) + orig = combineDefault(orig, info.nestedMode) + + switch { + case childUsed: + // One of the children used a default. This is "normal" + // mode. This may also happen when we are in + // hasDefaults/notUsed mode. Consider + // + // ("a" | "b") & (*(*"a" | string) | string) + // + // Here the doubly nested default is called twice, once + // for "a" and then for "b", where the second resolves to + // not using a default. The first does, however, and on that + // basis the "ot default marker cannot be overridden. + m = combineDefault(m, info.parentMode) + m = combineDefault(m, info.origMode) + + case !hasDefaults, used: + m = combineDefault(m, info.parentMode) + m = combineDefault(m, info.nestedMode) + + case hasDefaults && !used: + Assertf(parent == notDefault, "unexpected default mode") + } + } + d.defaultMode = m + + d.usedDefault = d.usedDefault[:defaultOffset] + d.usedDefault = append(d.usedDefault, defaultInfo{ + parentMode: parentMode, + nestedMode: m, + origMode: orig, + }) + + } + + // TODO: this is an old trick that seems no longer necessary for the new + // implementation. Keep around until we finalize the semantics for + // defaults, though. The recursion of nested defaults is not entirely + // proper yet. + // + // A better approach, that avoids the need for recursion (semantically), + // would be to only consider default usage for one level, but then to + // also allow a default to be passed if only one value is remaining. + // This means that a nested subsumption would first have to be evaluated + // in isolation, however, to determine that it is not previous + // disjunctions that cause the disambiguation. + // + // HACK alert: this replaces the hack of the previous algorithm with a + // slightly less worse hack: instead of dropping the default info when + // the value was scalar before, we drop this information when there is + // only one disjunct, while not discarding hard defaults. TODO: a more + // principled approach would be to recognize that there is only one + // default at a point where this does not break commutativity. if + // if len(n.disjuncts) == 1 && n.disjuncts[0].defaultMode != isDefault { + // n.disjuncts[0].defaultMode = maybeDefault + // } + } + + // Compare to root, but add to this one. + switch p := parent; { + case p != n: + p.disjunctErrs = append(p.disjunctErrs, n.disjunctErrs...) + n.disjunctErrs = n.disjunctErrs[:0] + + outer: + for _, d := range n.disjuncts { + for k, v := range p.disjuncts { + if !d.done() || !v.done() { + break + } + flags := CheckStructural + if last { + flags |= IgnoreOptional + } + if Equal(n.ctx, &v.result, &d.result, flags) { + m := maybeDefault + for _, u := range d.usedDefault { + m = combineDefault(m, u.nestedMode) + } + if m == isDefault { + p.disjuncts[k] = d + v.free() + } else { + d.free() + } + continue outer + } + } + + p.disjuncts = append(p.disjuncts, d) + } + + n.disjuncts = n.disjuncts[:0] + } +} + +func (n *nodeContext) makeError() { + code := IncompleteError + + if len(n.disjunctErrs) > 0 { + code = EvalError + for _, c := range n.disjunctErrs { + if c.Code > code { + code = c.Code + } + } + } + + b := &Bottom{ + Code: code, + Err: n.disjunctError(), + } + n.node.SetValue(n.ctx, Finalized, b) +} + +func mode(hasDefault, marked bool) defaultMode { + var mode defaultMode + switch { + case !hasDefault: + mode = maybeDefault + case marked: + mode = isDefault + default: + mode = notDefault + } + return mode +} + +// clone makes a shallow copy of a Vertex. The purpose is to create different +// disjuncts from the same Vertex under computation. This allows the conjuncts +// of an arc to be reset to a previous position and the reuse of earlier +// computations. +// +// Notes: only Arcs need to be copied recursively. Either the arc is finalized +// and can be used as is, or Structs is assumed to not yet be computed at the +// time that a clone is needed and must be nil. Conjuncts no longer needed and +// can become nil. All other fields can be copied shallowly. +func clone(v Vertex) Vertex { + v.state = nil + if a := v.Arcs; len(a) > 0 { + v.Arcs = make([]*Vertex, len(a)) + for i, arc := range a { + switch arc.status { + case Finalized: + v.Arcs[i] = arc + + case 0: + a := *arc + v.Arcs[i] = &a + + a.Conjuncts = make([]Conjunct, len(arc.Conjuncts)) + copy(a.Conjuncts, arc.Conjuncts) + + default: + a := *arc + a.state = arc.state.clone() + a.state.node = &a + a.state.snapshot = clone(a) + v.Arcs[i] = &a + } + } + } + + if a := v.Structs; len(a) > 0 { + v.Structs = make([]*StructInfo, len(a)) + copy(v.Structs, a) + } + + return v +} + +// Default rules from spec: +// +// U1: (v1, d1) & v2 => (v1&v2, d1&v2) +// U2: (v1, d1) & (v2, d2) => (v1&v2, d1&d2) +// +// D1: (v1, d1) | v2 => (v1|v2, d1) +// D2: (v1, d1) | (v2, d2) => (v1|v2, d1|d2) +// +// M1: *v => (v, v) +// M2: *(v1, d1) => (v1, d1) +// +// NOTE: M2 cannot be *(v1, d1) => (v1, v1), as this has the weird property +// of making a value less specific. This causes issues, for instance, when +// trimming. +// +// The old implementation does something similar though. It will discard +// default information after first determining if more than one conjunct +// has survived. +// +// def + maybe -> def +// not + maybe -> def +// not + def -> def + +type defaultMode int + +const ( + maybeDefault defaultMode = iota + isDefault + notDefault +) + +// combineDefaults combines default modes for unifying conjuncts. +// +// Default rules from spec: +// +// U1: (v1, d1) & v2 => (v1&v2, d1&v2) +// U2: (v1, d1) & (v2, d2) => (v1&v2, d1&d2) +func combineDefault(a, b defaultMode) defaultMode { + if a > b { + return a + } + return b +} + +// disjunctError returns a compound error for a failed disjunction. +// +// TODO(perf): the set of errors is now computed during evaluation. Eventually, +// this could be done lazily. +func (n *nodeContext) disjunctError() (errs errors.Error) { + ctx := n.ctx + + disjuncts := selectErrors(n.disjunctErrs) + + if disjuncts == nil { + errs = ctx.Newf("empty disjunction") // XXX: add space to sort first + } else { + disjuncts = errors.Sanitize(disjuncts) + k := len(errors.Errors(disjuncts)) + // prefix '-' to sort to top + errs = ctx.Newf("%d errors in empty disjunction:", k) + } + + errs = errors.Append(errs, disjuncts) + + return errs +} + +func selectErrors(a []*Bottom) (errs errors.Error) { + // return all errors if less than a certain number. + if len(a) <= 2 { + for _, b := range a { + errs = errors.Append(errs, b.Err) + + } + return errs + } + + // First select only relevant errors. + isIncomplete := false + k := 0 + for _, b := range a { + if !isIncomplete && b.Code >= IncompleteError { + k = 0 + isIncomplete = true + } + a[k] = b + k++ + } + a = a[:k] + + // filter errors + positions := map[token.Pos]bool{} + + add := func(b *Bottom, p token.Pos) bool { + if positions[p] { + return false + } + positions[p] = true + errs = errors.Append(errs, b.Err) + return true + } + + for _, b := range a { + // TODO: Should we also distinguish by message type? + if add(b, b.Err.Position()) { + continue + } + for _, p := range b.Err.InputPositions() { + if add(b, p) { + break + } + } + } + + return errs +} diff --git a/vendor/cuelang.org/go/internal/core/adt/doc.go b/vendor/cuelang.org/go/internal/core/adt/doc.go new file mode 100644 index 000000000..26c978e2f --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/doc.go @@ -0,0 +1,78 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package adt represents partially and fully evaluated CUE types. +// +// This package introduces several categories of types that indicate some set of +// values that may be used in a certain situation. Concrete types may belong to +// multiple categories. +// +// +// Abstract Types +// +// The following types describe the a place where a value may be used: +// +// Decl a value than can be used as a StructLit element. +// Elem a value than can be used as a ListLit element. +// Expr represents an Expr in the CUE grammar. +// Value a fully evaluated value that has no references (except for +// children in composite values). +// Node any of the above values. +// +// The following types categorize nodes by function: +// +// Resolver a reference to position in the result tree. +// Evaluator evaluates to 1 value. +// Yielder evaluates to 0 or more values. +// Validator validates another value. +// +// +// Reference resolution algorithm +// +// A Resolver is resolved within the context of an Environment. In CUE, a +// reference is evaluated by substituting it with a copy of the value to which +// it refers. If the copied value itself contains references we can distinguish +// two different cases. References that refer to values within the copied +// reference (not regarding selectors) will henceforth point to the copied node. +// References that point to outside the referened value will keep referring to +// their original value. +// +// a: b: { +// c: int +// d: c +// e: f +// } +// f: 4 +// g: a.b { // d.c points to inside the referred value, e.f, not. +// c: 3 +// } +// +// The implementation doesn't actually copy referred values, but rather resolves +// references with the aid of an Environment. During compile time, each +// references is associated with the label and a number indicating in which +// parent scope (offset from the current) this label needs to be looked up. An +// Environment keeps track of the point at which a value was referenced, +// providing enough information to look up the labeled value. This Environment +// is the identical for all references within a fields conjunct. Often, an +// Environment can even be shared among conjuncts. +// +// +// Values +// +// Values are fully evaluated expressions. As this means that all references +// will have been eliminated, Values are fully defined without the need for an +// Environment. Additionally, Values represent a fully evaluated form, stripped +// of any comprehensions, optional fields or embeddings. +// +package adt diff --git a/vendor/cuelang.org/go/internal/core/adt/equality.go b/vendor/cuelang.org/go/internal/core/adt/equality.go new file mode 100644 index 000000000..9f2d7ad03 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/equality.go @@ -0,0 +1,192 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +type Flag uint16 + +const ( + // IgnoreOptional allows optional information to be ignored. This only + // applies when CheckStructural is given. + IgnoreOptional Flag = 1 << iota + + // CheckStructural indicates that closedness information should be + // considered for equality. Equal may return false even when values are + // equal. + CheckStructural Flag = 1 << iota +) + +func Equal(ctx *OpContext, v, w Value, flags Flag) bool { + if x, ok := v.(*Vertex); ok { + return equalVertex(ctx, x, w, flags) + } + if y, ok := w.(*Vertex); ok { + return equalVertex(ctx, y, v, flags) + } + return equalTerminal(ctx, v, w, flags) +} + +func equalVertex(ctx *OpContext, x *Vertex, v Value, flags Flag) bool { + y, ok := v.(*Vertex) + if !ok { + return false + } + if x == y { + return true + } + xk := x.Kind() + yk := y.Kind() + + if xk != yk { + return false + } + + if len(x.Arcs) != len(y.Arcs) { + return false + } + + // TODO: this really should be subsumption. + if flags != 0 { + if x.IsClosed(ctx) != y.IsClosed(ctx) { + return false + } + if !equalClosed(ctx, x, y, flags) { + return false + } + } + +loop1: + for _, a := range x.Arcs { + for _, b := range y.Arcs { + if a.Label == b.Label { + if !Equal(ctx, a, b, flags) { + return false + } + continue loop1 + } + } + return false + } + + // We do not need to do the following check, because of the pigeon-hole principle. + // loop2: + // for _, b := range y.Arcs { + // for _, a := range x.Arcs { + // if a.Label == b.Label { + // continue loop2 + // } + // } + // return false + // } + + v, ok1 := x.BaseValue.(Value) + w, ok2 := y.BaseValue.(Value) + if !ok1 && !ok2 { + return true // both are struct or list. + } + + return equalTerminal(ctx, v, w, flags) +} + +// equalClosed tests if x and y have the same set of close information. +// TODO: the following refinements are possible: +// - unify optional fields and equate the optional fields +// - do the same for pattern constraints, where the pattern constraints +// are collated by pattern equality. +// - a further refinement would collate patterns by ranges. +// +// For all these refinements it would be necessary to have well-working +// structure sharing so as to not repeatedly recompute optional arcs. +func equalClosed(ctx *OpContext, x, y *Vertex, flags Flag) bool { + return verifyStructs(x, y, flags) && verifyStructs(y, x, flags) +} + +func verifyStructs(x, y *Vertex, flags Flag) bool { +outer: + for _, s := range x.Structs { + if (flags&IgnoreOptional != 0) && !s.StructLit.HasOptional() { + continue + } + if s.closeInfo == nil || s.closeInfo.span&DefinitionSpan == 0 { + if !s.StructLit.HasOptional() { + continue + } + } + for _, t := range y.Structs { + if s.StructLit == t.StructLit { + continue outer + } + } + return false + } + return true +} + +func equalTerminal(ctx *OpContext, v, w Value, flags Flag) bool { + if v == w { + return true + } + + switch x := v.(type) { + case *Num, *String, *Bool, *Bytes: + if b, ok := BinOp(ctx, EqualOp, v, w).(*Bool); ok { + return b.B + } + return false + + // TODO: for the remainder we are dealing with non-concrete values, so we + // could also just not bother. + + case *BoundValue: + if y, ok := w.(*BoundValue); ok { + return x.Op == y.Op && Equal(ctx, x.Value, y.Value, flags) + } + + case *BasicType: + if y, ok := w.(*BasicType); ok { + return x.K == y.K + } + + case *Conjunction: + y, ok := w.(*Conjunction) + if !ok || len(x.Values) != len(y.Values) { + return false + } + // always ordered the same + for i, xe := range x.Values { + if !Equal(ctx, xe, y.Values[i], flags) { + return false + } + } + return true + + case *Disjunction: + // The best way to compute this is with subsumption, but even that won't + // be too accurate. Assume structural equivalence for now. + y, ok := w.(*Disjunction) + if !ok || len(x.Values) != len(y.Values) { + return false + } + for i, xe := range x.Values { + if !Equal(ctx, xe, y.Values[i], flags) { + return false + } + } + return true + + case *BuiltinValidator: + } + + return false +} diff --git a/vendor/cuelang.org/go/internal/core/adt/errors.go b/vendor/cuelang.org/go/internal/core/adt/errors.go new file mode 100644 index 000000000..1b20606b3 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/errors.go @@ -0,0 +1,312 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +// This file contains error encodings. +// +// +// *Bottom: +// - an adt.Value +// - always belongs to a single vertex. +// - does NOT implement error +// - marks error code used for control flow +// +// errors.Error +// - CUE default error +// - implements error +// - tracks error locations +// - has error message details +// - supports multiple errors +// + +import ( + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// ErrorCode indicates the type of error. The type of error may influence +// control flow. No other aspects of an error may influence control flow. +type ErrorCode int + +const ( + // An EvalError is a fatal evaluation error. + EvalError ErrorCode = iota + + // A UserError is a fatal error originating from the user. + UserError + + // NotExistError is used to indicate a value does not exist. + // Mostly used for legacy reasons. + NotExistError + + // StructuralCycleError means a structural cycle was found. Structural + // cycles are permanent errors, but they are not passed up recursively, + // as a unification of a value with a structural cycle with one that + // doesn't may still give a useful result. + StructuralCycleError + + // IncompleteError means an evaluation could not complete because of + // insufficient information that may still be added later. + IncompleteError + + // A CycleError indicates a reference error. It is considered to be + // an incomplete error, as reference errors may be broken by providing + // a concrete value. + CycleError +) + +func (c ErrorCode) String() string { + switch c { + case EvalError: + return "eval" + case UserError: + return "user" + case StructuralCycleError: + return "structural cycle" + case IncompleteError: + return "incomplete" + case CycleError: + return "cycle" + } + return "unknown" +} + +// Bottom represents an error or bottom symbol. +// +// Although a Bottom node holds control data, it should not be created until the +// control information already resulted in an error. +type Bottom struct { + Src ast.Node + Err errors.Error + + Code ErrorCode + HasRecursive bool + ChildError bool // Err is the error of the child + // Value holds the computed value so far in case + Value Value +} + +func (x *Bottom) Source() ast.Node { return x.Src } +func (x *Bottom) Kind() Kind { return BottomKind } +func (x *Bottom) Specialize(k Kind) Value { return x } // XXX remove + +func (b *Bottom) IsIncomplete() bool { + if b == nil { + return false + } + return b.Code == IncompleteError || b.Code == CycleError +} + +// isLiteralBottom reports whether x is an error originating from a user. +func isLiteralBottom(x Expr) bool { + b, ok := x.(*Bottom) + return ok && b.Code == UserError +} + +// isError reports whether v is an error or nil. +func isError(v Value) bool { + if v == nil { + return true + } + _, ok := v.(*Bottom) + return ok +} + +// isIncomplete reports whether v is associated with an incomplete error. +func isIncomplete(v *Vertex) bool { + if v == nil { + return true + } + if b, ok := v.BaseValue.(*Bottom); ok { + return b.IsIncomplete() + } + return false +} + +// AddChildError updates x to record an error that occurred in one of +// its descendent arcs. The resulting error will record the worst error code of +// the current error or recursive error. +// +// If x is not already an error, the value is recorded in the error for +// reference. +// +func (v *Vertex) AddChildError(recursive *Bottom) { + v.ChildErrors = CombineErrors(nil, v.ChildErrors, recursive) + if recursive.IsIncomplete() { + return + } + x := v.BaseValue + err, _ := x.(*Bottom) + if err == nil { + v.BaseValue = &Bottom{ + Code: recursive.Code, + Value: v, + HasRecursive: true, + ChildError: true, + Err: recursive.Err, + } + return + } + + err.HasRecursive = true + if err.Code > recursive.Code { + err.Code = recursive.Code + } + + v.BaseValue = err +} + +// CombineErrors combines two errors that originate at the same Vertex. +func CombineErrors(src ast.Node, x, y Value) *Bottom { + a, _ := Unwrap(x).(*Bottom) + b, _ := Unwrap(y).(*Bottom) + + if a == b && isCyclePlaceholder(a) { + return a + } + switch { + case a != nil && b != nil: + case a != nil: + return a + case b != nil: + return b + default: + return nil + } + + if a.Code != b.Code { + if a.Code > b.Code { + a, b = b, a + } + + if b.Code >= IncompleteError { + return a + } + } + + return &Bottom{ + Src: src, + Err: errors.Append(a.Err, b.Err), + Code: a.Code, + } +} + +// A ValueError is returned as a result of evaluating a value. +type ValueError struct { + r Runtime + v *Vertex + pos token.Pos + auxpos []token.Pos + err errors.Error + errors.Message +} + +func (v *ValueError) AddPosition(n Node) { + if n == nil { + return + } + if p := pos(n); p != token.NoPos { + for _, q := range v.auxpos { + if p == q { + return + } + } + v.auxpos = append(v.auxpos, p) + } +} + +func (v *ValueError) AddClosedPositions(c CloseInfo) { + for s := c.closeInfo; s != nil; s = s.parent { + if loc := s.location; loc != nil { + v.AddPosition(loc) + } + } +} + +func (c *OpContext) errNode() *Vertex { + return c.vertex +} + +// MarkPositions marks the current position stack. +func (c *OpContext) MarkPositions() int { + return len(c.positions) +} + +// ReleasePositions sets the position state to one from a call to MarkPositions. +func (c *OpContext) ReleasePositions(p int) { + c.positions = c.positions[:p] +} + +func (c *OpContext) AddPosition(n Node) { + if n != nil { + c.positions = append(c.positions, n) + } +} + +func (c *OpContext) Newf(format string, args ...interface{}) *ValueError { + return c.NewPosf(c.pos(), format, args...) +} + +func (c *OpContext) NewPosf(p token.Pos, format string, args ...interface{}) *ValueError { + var a []token.Pos + if len(c.positions) > 0 { + a = make([]token.Pos, 0, len(c.positions)) + for _, n := range c.positions { + if p := pos(n); p != token.NoPos { + a = append(a, p) + } else if v, ok := n.(*Vertex); ok { + for _, c := range v.Conjuncts { + if p := pos(c.x); p != token.NoPos { + a = append(a, p) + } + } + } + } + } + return &ValueError{ + r: c.Runtime, + v: c.errNode(), + pos: p, + auxpos: a, + Message: errors.NewMessage(format, args), + } +} + +func (e *ValueError) Error() string { + return errors.String(e) +} + +func (e *ValueError) Position() token.Pos { + return e.pos +} + +func (e *ValueError) InputPositions() (a []token.Pos) { + return e.auxpos +} + +func (e *ValueError) Path() (a []string) { + if e.v == nil { + return nil + } + for _, f := range appendPath(nil, e.v) { + a = append(a, f.SelectorString(e.r)) + } + return a +} + +func (e ValueError) Unwrap() error { + return e.err +} diff --git a/vendor/cuelang.org/go/internal/core/adt/eval.go b/vendor/cuelang.org/go/internal/core/adt/eval.go new file mode 100644 index 000000000..393b69db1 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/eval.go @@ -0,0 +1,2146 @@ +// Copyright 2021 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package eval contains the high level CUE evaluation strategy. +// +// CUE allows for a significant amount of freedom in order of evaluation due to +// the commutativity of the unification operation. This package implements one +// of the possible strategies. +package adt + +// TODO: +// - result should be nodeContext: this allows optionals info to be extracted +// and computed. +// + +import ( + "fmt" + "html/template" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO TODO +// +// - Reuse work from previous cycles. For instance, if we can guarantee that a +// value is always correct for partial results, we can just process the arcs +// going from Partial to Finalized, without having to reevaluate the value. +// +// - Test closedness far more thoroughly. +// + +type Stats struct { + DisjunctCount int + UnifyCount int + + Freed int + Retained int + Reused int + Allocs int +} + +// Leaks reports the number of nodeContext structs leaked. These are typically +// benign, as they will just be garbage collected, as long as the pointer from +// the original nodes has been eliminated or the original nodes are also not +// referred to. But Leaks may have notable impact on performance, and thus +// should be avoided. +func (s *Stats) Leaks() int { + return s.Allocs + s.Reused - s.Freed +} + +var stats = template.Must(template.New("stats").Parse(`{{"" -}} + +Leaks: {{.Leaks}} +Freed: {{.Freed}} +Reused: {{.Reused}} +Allocs: {{.Allocs}} +Retain: {{.Retained}} + +Unifications: {{.UnifyCount}} +Disjuncts: {{.DisjunctCount}}`)) + +func (s *Stats) String() string { + buf := &strings.Builder{} + _ = stats.Execute(buf, s) + return buf.String() +} + +func (c *OpContext) Stats() *Stats { + return &c.stats +} + +// TODO: Note: NewContext takes essentially a cue.Value. By making this +// type more central, we can perhaps avoid context creation. + +// func NewContext(r Runtime, v *Vertex) *OpContext { +// e := NewUnifier(r) +// return e.NewContext(v) +// } + +var structSentinel = &StructMarker{} + +var incompleteSentinel = &Bottom{ + Code: IncompleteError, + Err: errors.Newf(token.NoPos, "incomplete"), +} + +// evaluate returns the evaluated value associated with v. It may return a +// partial result. That is, if v was not yet unified, it may return a +// concrete value that must be the result assuming the configuration has no +// errors. +// +// This semantics allows CUE to break reference cycles in a straightforward +// manner. +// +// Vertex v must still be evaluated at some point to catch the underlying +// error. +// +// TODO: return *Vertex +func (c *OpContext) evaluate(v *Vertex, state VertexStatus) Value { + if v.isUndefined() { + // Use node itself to allow for cycle detection. + c.Unify(v, state) + } + + if n := v.state; n != nil { + if n.errs != nil && !n.errs.IsIncomplete() { + return n.errs + } + if n.scalar != nil && isCyclePlaceholder(v.BaseValue) { + return n.scalar + } + } + + switch x := v.BaseValue.(type) { + case *Bottom: + if x.IsIncomplete() { + c.AddBottom(x) + return nil + } + return x + + case nil: + if v.state != nil { + switch x := v.state.getValidators().(type) { + case Value: + return x + default: + w := *v + w.BaseValue = x + return &w + } + } + Assertf(false, "no BaseValue: state: %v; requested: %v", v.status, state) + } + + if v.status < Finalized && v.state != nil { + // TODO: errors are slightly better if we always add addNotify, but + // in this case it is less likely to cause a performance penalty. + // See https://github.com/cuelang/cue/issues/661. It may be possible to + // relax this again once we have proper tests to prevent regressions of + // that issue. + if !v.state.done() || v.state.errs != nil { + v.state.addNotify(c.vertex) + } + } + + return v +} + +// Unify fully unifies all values of a Vertex to completion and stores +// the result in the Vertex. If unify was called on v before it returns +// the cached results. +func (c *OpContext) Unify(v *Vertex, state VertexStatus) { + // defer c.PopVertex(c.PushVertex(v)) + + // Ensure a node will always have a nodeContext after calling Unify if it is + // not yet Finalized. + n := v.getNodeContext(c) + defer v.freeNode(n) + + if state <= v.Status() { + if v.Status() != Partial && state != Partial { + return + } + } + + switch v.Status() { + case Evaluating: + n.insertConjuncts() + return + + case EvaluatingArcs: + Assertf(v.status > 0, "unexpected status %d", v.status) + return + + case 0: + if v.Label.IsDef() { + v.Closed = true + } + + if v.Parent != nil { + if v.Parent.Closed { + v.Closed = true + } + } + + // TODO(perf): ideally we should always perform a closedness check if + // state is Finalized. This is currently not possible when computing a + // partial disjunction as the closedness information is not yet + // complete, possibly leading to a disjunct to be rejected prematurely. + // It is probably possible to fix this if we could add StructInfo + // structures demarked per conjunct. + // + // In practice this should not be a problem: when disjuncts originate + // from the same disjunct, they will have the same StructInfos, and thus + // Equal is able to equate them even in the precense of optional field. + // In general, combining any limited set of disjuncts will soon reach + // a fixed point where duplicate elements can be eliminated this way. + // + // Note that not checking closedness is irrelevant for disjunctions of + // scalars. This means it also doesn't hurt performance where structs + // have a discriminator field (e.g. Kubernetes). We should take care, + // though, that any potential performance issues are eliminated for + // Protobuf-like oneOf fields. + ignore := state != Finalized || n.skipNonMonotonicChecks() + + if !v.Label.IsInt() && v.Parent != nil && !ignore { + // Visit arcs recursively to validate and compute error. + if _, err := verifyArc2(c, v.Label, v, v.Closed); err != nil { + // Record error in child node to allow recording multiple + // conflicts at the appropriate place, to allow valid fields to + // be represented normally and, most importantly, to avoid + // recursive processing of a disallowed field. + v.SetValue(c, Finalized, err) + return + } + } + + defer c.PopArc(c.PushArc(v)) + + c.stats.UnifyCount++ + + // Clear any remaining error. + if err := c.Err(); err != nil { + panic("uncaught error") + } + + // Set the cache to a cycle error to ensure a cyclic reference will result + // in an error if applicable. A cyclic error may be ignored for + // non-expression references. The cycle error may also be removed as soon + // as there is evidence what a correct value must be, but before all + // validation has taken place. + // + // TODO(cycle): having a more recursive algorithm would make this + // special cycle handling unnecessary. + v.BaseValue = cycle + + v.UpdateStatus(Evaluating) + + n.conjuncts = v.Conjuncts + n.insertConjuncts() + + fallthrough + + case Partial: + defer c.PopArc(c.PushArc(v)) + + v.status = Evaluating + + // Use maybeSetCache for cycle breaking + for n.maybeSetCache(); n.expandOne(); n.maybeSetCache() { + } + + n.doNotify() + + if !n.done() { + switch { + case len(n.disjunctions) > 0 && isCyclePlaceholder(v.BaseValue): + // We disallow entering computations of disjunctions with + // incomplete data. + if state == Finalized { + b := c.NewErrf("incomplete cause disjunction") + b.Code = IncompleteError + n.errs = CombineErrors(nil, n.errs, b) + v.SetValue(n.ctx, Finalized, b) + } else { + n.node.UpdateStatus(Partial) + } + return + + case state <= AllArcs: + n.node.UpdateStatus(Partial) + return + } + } + + if s := v.Status(); state <= s { + // We have found a partial result. There may still be errors + // down the line which may result from further evaluating this + // field, but that will be caught when evaluating this field + // for real. + + // This also covers the case where a recursive evaluation triggered + // this field to become finalized in the mean time. In that case + // we can avoid running another expandDisjuncts. + return + } + + // Disjunctions should always be finalized. If there are nested + // disjunctions the last one should be finalized. + disState := state + if len(n.disjunctions) > 0 && disState != Finalized { + disState = Finalized + } + n.expandDisjuncts(disState, n, maybeDefault, false, true) + + n.finalizeDisjuncts() + + switch len(n.disjuncts) { + case 0: + case 1: + x := n.disjuncts[0].result + x.state = nil + *v = x + + default: + d := n.createDisjunct() + v.BaseValue = d + // The conjuncts will have too much information. Better have no + // information than incorrect information. + for _, d := range d.Values { + // We clear the conjuncts for now. As these disjuncts are for API + // use only, we will fill them out when necessary (using Defaults). + d.Conjuncts = nil + + // TODO: use a more principled form of dereferencing. For instance, + // disjuncts could already be assumed to be the given Vertex, and + // the the main vertex could be dereferenced during evaluation. + for _, a := range d.Arcs { + for _, x := range a.Conjuncts { + // All the environments for embedded structs need to be + // dereferenced. + for env := x.Env; env != nil && env.Vertex == v; env = env.Up { + env.Vertex = d + } + } + } + } + v.Arcs = nil + // v.Structs = nil // TODO: should we keep or discard the Structs? + // TODO: how to represent closedness information? Do we need it? + } + + // If the state has changed, it is because a disjunct has been run, or + // because a single disjunct has replaced it. Restore the old state as + // to not confuse memory management. + v.state = n + + // We don't do this in postDisjuncts, as it should only be done after + // completing all disjunctions. + if !n.done() { + if err := n.incompleteErrors(); err != nil { + b, _ := n.node.BaseValue.(*Bottom) + if b != err { + err = CombineErrors(n.ctx.src, b, err) + } + n.node.BaseValue = err + } + } + + if state != Finalized { + return + } + + if v.BaseValue == nil { + v.BaseValue = n.getValidators() + } + + // Free memory here? + v.UpdateStatus(Finalized) + + case AllArcs: + defer c.PopArc(c.PushArc(v)) + + n.completeArcs(state) + + case Finalized: + } +} + +// insertConjuncts inserts conjuncts previously uninserted. +func (n *nodeContext) insertConjuncts() { + for len(n.conjuncts) > 0 { + nInfos := len(n.node.Structs) + p := &n.conjuncts[0] + n.conjuncts = n.conjuncts[1:] + n.addExprConjunct(*p) + + // Record the OptionalTypes for all structs that were inferred by this + // Conjunct. This information can be used by algorithms such as trim. + for i := nInfos; i < len(n.node.Structs); i++ { + p.CloseInfo.FieldTypes |= n.node.Structs[i].types + } + } +} + +// finalizeDisjuncts: incomplete errors are kept around and not removed early. +// This call filters the incomplete errors and removes them +// +// This also collects all errors of empty disjunctions. These cannot be +// collected during the finalization state of individual disjuncts. Care should +// be taken to only call this after all disjuncts have been finalized. +func (n *nodeContext) finalizeDisjuncts() { + a := n.disjuncts + if len(a) == 0 { + return + } + k := 0 + for i, d := range a { + switch d.finalDone() { + case true: + a[k], a[i] = d, a[k] + k++ + default: + if err := d.incompleteErrors(); err != nil { + n.disjunctErrs = append(n.disjunctErrs, err) + } + } + d.free() + } + if k == 0 { + n.makeError() + } + n.disjuncts = a[:k] +} + +func (n *nodeContext) doNotify() { + if n.errs == nil || len(n.notify) == 0 { + return + } + for _, v := range n.notify { + if v.state == nil { + if b, ok := v.BaseValue.(*Bottom); ok { + v.BaseValue = CombineErrors(nil, b, n.errs) + } else { + v.BaseValue = n.errs + } + } else { + v.state.addBottom(n.errs) + } + } + n.notify = n.notify[:0] +} + +func (n *nodeContext) postDisjunct(state VertexStatus) { + ctx := n.ctx + + for { + // Use maybeSetCache for cycle breaking + for n.maybeSetCache(); n.expandOne(); n.maybeSetCache() { + } + + if aList, id := n.addLists(); aList != nil { + n.updateNodeType(ListKind, aList, id) + } else { + break + } + } + + if n.aStruct != nil { + n.updateNodeType(StructKind, n.aStruct, n.aStructID) + } + + switch err := n.getErr(); { + case err != nil: + n.node.BaseValue = err + n.errs = nil + + default: + if isCyclePlaceholder(n.node.BaseValue) { + if !n.done() { + n.node.BaseValue = n.incompleteErrors() + } else { + n.node.BaseValue = nil + } + } + // TODO: this ideally should be done here. However, doing so causes + // a somewhat more aggressive cutoff in disjunction cycles, which cause + // some incompatibilities. Fix in another CL. + // + // else if !n.done() { + // n.expandOne() + // if err := n.incompleteErrors(); err != nil { + // n.node.BaseValue = err + // } + // } + + // We are no longer evaluating. + // n.node.UpdateStatus(Partial) + n.node.UpdateStatus(Evaluating) + + // Either set to Conjunction or error. + // TODO: verify and simplify the below code to determine whether + // something is a struct. + markStruct := false + if n.aStruct != nil { + markStruct = true + } else if len(n.node.Structs) > 0 { + markStruct = n.kind&StructKind != 0 && !n.hasTop + } + v := n.node.Value() + if n.node.BaseValue == nil && markStruct { + n.node.BaseValue = &StructMarker{} + v = n.node + } + if v != nil && IsConcrete(v) { + // Also check when we already have errors as we may find more + // serious errors and would like to know about all errors anyway. + + if n.lowerBound != nil { + if b := ctx.Validate(n.lowerBound, v); b != nil { + // TODO(errors): make Validate return boolean and generate + // optimized conflict message. Also track and inject IDs + // to determine origin location.s + if e, _ := b.Err.(*ValueError); e != nil { + e.AddPosition(n.lowerBound) + e.AddPosition(v) + } + n.addBottom(b) + } + } + if n.upperBound != nil { + if b := ctx.Validate(n.upperBound, v); b != nil { + // TODO(errors): make Validate return boolean and generate + // optimized conflict message. Also track and inject IDs + // to determine origin location.s + if e, _ := b.Err.(*ValueError); e != nil { + e.AddPosition(n.upperBound) + e.AddPosition(v) + } + n.addBottom(b) + } + } + // MOVE BELOW + // TODO(perf): only delay processing of actual non-monotonic checks. + skip := n.skipNonMonotonicChecks() + if v := n.node.Value(); v != nil && IsConcrete(v) && !skip { + for _, v := range n.checks { + // TODO(errors): make Validate return bottom and generate + // optimized conflict message. Also track and inject IDs + // to determine origin location.s + if b := ctx.Validate(v, n.node); b != nil { + n.addBottom(b) + } + } + } + } else if state == Finalized { + n.node.BaseValue = n.getValidators() + } + + if v == nil { + break + } + + switch { + case v.Kind() == ListKind: + for _, a := range n.node.Arcs { + if a.Label.Typ() == StringLabel { + n.addErr(ctx.Newf("list may not have regular fields")) + // TODO(errors): add positions for list and arc definitions. + + } + } + + // case !isStruct(n.node) && v.Kind() != BottomKind: + // for _, a := range n.node.Arcs { + // if a.Label.IsRegular() { + // n.addErr(errors.Newf(token.NoPos, + // // TODO(errors): add positions of non-struct values and arcs. + // "cannot combine scalar values with arcs")) + // } + // } + } + } + + if err := n.getErr(); err != nil { + if b, _ := n.node.BaseValue.(*Bottom); b != nil { + err = CombineErrors(nil, b, err) + } + n.node.BaseValue = err + // TODO: add return: if evaluation of arcs is important it can be done + // later. Logically we're done. + } + + n.completeArcs(state) +} + +func (n *nodeContext) incompleteErrors() *Bottom { + // collect incomplete errors. + var err *Bottom // n.incomplete + for _, d := range n.dynamicFields { + err = CombineErrors(nil, err, d.err) + } + for _, c := range n.forClauses { + err = CombineErrors(nil, err, c.err) + } + for _, c := range n.ifClauses { + err = CombineErrors(nil, err, c.err) + } + for _, x := range n.exprs { + err = CombineErrors(nil, err, x.err) + } + if err == nil { + // safeguard. + err = incompleteSentinel + } + return err +} + +func (n *nodeContext) completeArcs(state VertexStatus) { + + if state <= AllArcs { + n.node.UpdateStatus(AllArcs) + return + } + + n.node.UpdateStatus(EvaluatingArcs) + + ctx := n.ctx + + if cyclic := n.hasCycle && !n.hasNonCycle; cyclic { + n.node.BaseValue = CombineErrors(nil, + n.node.Value(), + &Bottom{ + Code: StructuralCycleError, + Err: ctx.Newf("structural cycle"), + Value: n.node.Value(), + // TODO: probably, this should have the referenced arc. + }) + // Don't process Arcs. This is mostly to ensure that no Arcs with + // an Unprocessed status remain in the output. + n.node.Arcs = nil + } else { + // Visit arcs recursively to validate and compute error. + for _, a := range n.node.Arcs { + if a.nonMonotonicInsertGen >= a.nonMonotonicLookupGen && a.nonMonotonicLookupGen > 0 { + err := ctx.Newf( + "cycle: new field %s inserted by if clause that was previously evaluated by another if clause", a.Label.SelectorString(ctx)) + err.AddPosition(n.node) + n.node.BaseValue = &Bottom{Err: err} + } else if a.nonMonotonicReject { + err := ctx.Newf( + "cycle: field %s was added after an if clause evaluated it", + a.Label.SelectorString(ctx)) + err.AddPosition(n.node) + n.node.BaseValue = &Bottom{Err: err} + } + + // Call UpdateStatus here to be absolutely sure the status is set + // correctly and that we are not regressing. + n.node.UpdateStatus(EvaluatingArcs) + ctx.Unify(a, state) + // Don't set the state to Finalized if the child arcs are not done. + if state == Finalized && a.status < Finalized { + state = AllArcs + } + if err, _ := a.BaseValue.(*Bottom); err != nil { + n.node.AddChildError(err) + } + } + } + + n.node.UpdateStatus(state) +} + +// TODO: this is now a sentinel. Use a user-facing error that traces where +// the cycle originates. +var cycle = &Bottom{ + Err: errors.Newf(token.NoPos, "cycle error"), + Code: CycleError, +} + +func isCyclePlaceholder(v BaseValue) bool { + return v == cycle +} + +func (n *nodeContext) createDisjunct() *Disjunction { + a := make([]*Vertex, len(n.disjuncts)) + p := 0 + hasDefaults := false + for i, x := range n.disjuncts { + v := new(Vertex) + *v = x.result + v.state = nil + switch x.defaultMode { + case isDefault: + a[i] = a[p] + a[p] = v + p++ + hasDefaults = true + + case notDefault: + hasDefaults = true + fallthrough + case maybeDefault: + a[i] = v + } + } + // TODO: disambiguate based on concrete values. + // TODO: consider not storing defaults. + // if p > 0 { + // a = a[:p] + // } + return &Disjunction{ + Values: a, + NumDefaults: p, + HasDefaults: hasDefaults, + } +} + +type arcKey struct { + arc *Vertex + id CloseInfo +} + +// A nodeContext is used to collate all conjuncts of a value to facilitate +// unification. Conceptually order of unification does not matter. However, +// order has relevance when performing checks of non-monotic properities. Such +// checks should only be performed once the full value is known. +type nodeContext struct { + nextFree *nodeContext + refCount int + + ctx *OpContext + node *Vertex + + // usedArcs is a list of arcs that were looked up during non-monotonic operations, but do not exist yet. + usedArcs []*Vertex + + // TODO: (this is CL is first step) + // filter *Vertex a subset of composite with concrete fields for + // bloom-like filtering of disjuncts. We should first verify, however, + // whether some breath-first search gives sufficient performance, as this + // should already ensure a quick-fail for struct disjunctions with + // discriminators. + + arcMap []arcKey + + // snapshot holds the last value of the vertex before calling postDisjunct. + snapshot Vertex + + // Result holds the last evaluated value of the vertex after calling + // postDisjunct. + result Vertex + + // Current value (may be under construction) + scalar Value // TODO: use Value in node. + scalarID CloseInfo + + // Concrete conjuncts + kind Kind + kindExpr Expr // expr that adjust last value (for error reporting) + kindID CloseInfo // for error tracing + lowerBound *BoundValue // > or >= + upperBound *BoundValue // < or <= + checks []Validator // BuiltinValidator, other bound values. + errs *Bottom + + // Conjuncts holds a reference to the Vertex Arcs that still need + // processing. It does NOT need to be copied. + conjuncts []Conjunct + + // notify is used to communicate errors in cyclic dependencies. + // TODO: also use this to communicate increasingly more concrete values. + notify []*Vertex + + // Struct information + dynamicFields []envDynamic + ifClauses []envYield + forClauses []envYield + aStruct Expr + aStructID CloseInfo + + // Expression conjuncts + lists []envList + vLists []*Vertex + exprs []envExpr + + hasTop bool + hasCycle bool // has conjunct with structural cycle + hasNonCycle bool // has conjunct without structural cycle + + // Disjunction handling + disjunctions []envDisjunct + + // usedDefault indicates the for each of possibly multiple parent + // disjunctions whether it is unified with a default disjunct or not. + // This is then later used to determine whether a disjunction should + // be treated as a marked disjunction. + usedDefault []defaultInfo + + defaultMode defaultMode + disjuncts []*nodeContext + buffer []*nodeContext + disjunctErrs []*Bottom +} + +type defaultInfo struct { + // parentMode indicates whether this values was used as a default value, + // based on the parent mode. + parentMode defaultMode + + // The result of default evaluation for a nested disjunction. + nestedMode defaultMode + + origMode defaultMode +} + +func (n *nodeContext) addNotify(v *Vertex) { + if v != nil { + n.notify = append(n.notify, v) + } +} + +func (n *nodeContext) clone() *nodeContext { + d := n.ctx.newNodeContext(n.node) + + d.refCount++ + + d.ctx = n.ctx + d.node = n.node + + d.scalar = n.scalar + d.scalarID = n.scalarID + d.kind = n.kind + d.kindExpr = n.kindExpr + d.kindID = n.kindID + d.aStruct = n.aStruct + d.aStructID = n.aStructID + d.hasTop = n.hasTop + + d.lowerBound = n.lowerBound + d.upperBound = n.upperBound + d.errs = n.errs + d.hasTop = n.hasTop + d.hasCycle = n.hasCycle + d.hasNonCycle = n.hasNonCycle + + // d.arcMap = append(d.arcMap, n.arcMap...) // XXX add? + // d.usedArcs = append(d.usedArcs, n.usedArcs...) // XXX: add? + d.notify = append(d.notify, n.notify...) + d.checks = append(d.checks, n.checks...) + d.dynamicFields = append(d.dynamicFields, n.dynamicFields...) + d.ifClauses = append(d.ifClauses, n.ifClauses...) + d.forClauses = append(d.forClauses, n.forClauses...) + d.lists = append(d.lists, n.lists...) + d.vLists = append(d.vLists, n.vLists...) + d.exprs = append(d.exprs, n.exprs...) + d.usedDefault = append(d.usedDefault, n.usedDefault...) + + // No need to clone d.disjunctions + + return d +} + +func (c *OpContext) newNodeContext(node *Vertex) *nodeContext { + if n := c.freeListNode; n != nil { + c.stats.Reused++ + c.freeListNode = n.nextFree + + *n = nodeContext{ + ctx: c, + node: node, + kind: TopKind, + usedArcs: n.usedArcs[:0], + arcMap: n.arcMap[:0], + notify: n.notify[:0], + checks: n.checks[:0], + dynamicFields: n.dynamicFields[:0], + ifClauses: n.ifClauses[:0], + forClauses: n.forClauses[:0], + lists: n.lists[:0], + vLists: n.vLists[:0], + exprs: n.exprs[:0], + disjunctions: n.disjunctions[:0], + usedDefault: n.usedDefault[:0], + disjunctErrs: n.disjunctErrs[:0], + disjuncts: n.disjuncts[:0], + buffer: n.buffer[:0], + } + + return n + } + c.stats.Allocs++ + + return &nodeContext{ + ctx: c, + node: node, + kind: TopKind, + } +} + +func (v *Vertex) getNodeContext(c *OpContext) *nodeContext { + if v.state == nil { + if v.status == Finalized { + return nil + } + v.state = c.newNodeContext(v) + } else if v.state.node != v { + panic("getNodeContext: nodeContext out of sync") + } + v.state.refCount++ + return v.state +} + +func (v *Vertex) freeNode(n *nodeContext) { + if n == nil { + return + } + if n.node != v { + panic("freeNode: unpaired free") + } + if v.state != nil && v.state != n { + panic("freeNode: nodeContext out of sync") + } + if n.refCount--; n.refCount == 0 { + if v.status == Finalized { + v.freeNodeState() + } else { + n.ctx.stats.Retained++ + } + } +} + +func (v *Vertex) freeNodeState() { + if v.state == nil { + return + } + state := v.state + v.state = nil + + state.ctx.freeNodeContext(state) +} + +func (n *nodeContext) free() { + if n.refCount--; n.refCount == 0 { + n.ctx.freeNodeContext(n) + } +} + +func (c *OpContext) freeNodeContext(n *nodeContext) { + c.stats.Freed++ + n.nextFree = c.freeListNode + c.freeListNode = n + n.node = nil + n.refCount = 0 +} + +// TODO(perf): return a dedicated ConflictError that can track original +// positions on demand. +func (n *nodeContext) addConflict( + v1, v2 Node, + k1, k2 Kind, + id1, id2 CloseInfo) { + + ctx := n.ctx + + var err *ValueError + if k1 == k2 { + err = ctx.NewPosf(token.NoPos, + "conflicting values %s and %s", ctx.Str(v1), ctx.Str(v2)) + } else { + err = ctx.NewPosf(token.NoPos, + "conflicting values %s and %s (mismatched types %s and %s)", + ctx.Str(v1), ctx.Str(v2), k1, k2) + } + + err.AddPosition(v1) + err.AddPosition(v2) + err.AddClosedPositions(id1) + err.AddClosedPositions(id2) + + n.addErr(err) +} + +func (n *nodeContext) updateNodeType(k Kind, v Expr, id CloseInfo) bool { + ctx := n.ctx + kind := n.kind & k + + switch { + case n.kind == BottomKind, + k == BottomKind: + return false + + case kind == BottomKind: + if n.kindExpr != nil { + n.addConflict(n.kindExpr, v, n.kind, k, n.kindID, id) + } else { + n.addErr(ctx.Newf( + "conflicting value %s (mismatched types %s and %s)", + ctx.Str(v), n.kind, k)) + } + } + + if n.kind != kind || n.kindExpr == nil { + n.kindExpr = v + } + n.kind = kind + return kind != BottomKind +} + +func (n *nodeContext) done() bool { + return len(n.dynamicFields) == 0 && + len(n.ifClauses) == 0 && + len(n.forClauses) == 0 && + len(n.exprs) == 0 +} + +// finalDone is like done, but allows for cycle errors, which can be ignored +// as they essentially indicate a = a & _. +func (n *nodeContext) finalDone() bool { + for _, x := range n.exprs { + if x.err.Code != CycleError { + return false + } + } + return len(n.dynamicFields) == 0 && + len(n.ifClauses) == 0 && + len(n.forClauses) == 0 +} + +// hasErr is used to determine if an evaluation path, for instance a single +// path after expanding all disjunctions, has an error. +func (n *nodeContext) hasErr() bool { + if n.node.ChildErrors != nil { + return true + } + if n.node.Status() > Evaluating && n.node.IsErr() { + return true + } + return n.ctx.HasErr() || n.errs != nil +} + +func (n *nodeContext) getErr() *Bottom { + n.errs = CombineErrors(nil, n.errs, n.ctx.Err()) + return n.errs +} + +// getValidators sets the vertex' Value in case there was no concrete value. +func (n *nodeContext) getValidators() BaseValue { + ctx := n.ctx + + a := []Value{} + // if n.node.Value != nil { + // a = append(a, n.node.Value) + // } + kind := TopKind + if n.lowerBound != nil { + a = append(a, n.lowerBound) + kind &= n.lowerBound.Kind() + } + if n.upperBound != nil { + a = append(a, n.upperBound) + kind &= n.upperBound.Kind() + } + for _, c := range n.checks { + // Drop !=x if x is out of bounds with another bound. + if b, _ := c.(*BoundValue); b != nil && b.Op == NotEqualOp { + if n.upperBound != nil && + SimplifyBounds(ctx, n.kind, n.upperBound, b) != nil { + continue + } + if n.lowerBound != nil && + SimplifyBounds(ctx, n.kind, n.lowerBound, b) != nil { + continue + } + } + a = append(a, c) + kind &= c.Kind() + } + if kind&^n.kind != 0 { + a = append(a, &BasicType{K: n.kind}) + } + + var v BaseValue + switch len(a) { + case 0: + // Src is the combined input. + v = &BasicType{K: n.kind} + + case 1: + v = a[0].(Value) // remove cast + + default: + v = &Conjunction{Values: a} + } + + return v +} + +// TODO: this function can probably go as this is now handled in the nodeContext. +func (n *nodeContext) maybeSetCache() { + if n.node.Status() > Partial { // n.node.BaseValue != nil + return + } + if n.scalar != nil { + n.node.BaseValue = n.scalar + } + // NOTE: this is now handled by associating the nodeContext + // if n.errs != nil { + // n.node.SetValue(n.ctx, Partial, n.errs) + // } +} + +type envExpr struct { + c Conjunct + err *Bottom +} + +type envDynamic struct { + env *Environment + field *DynamicField + id CloseInfo + err *Bottom +} + +type envYield struct { + env *Environment + yield Yielder + id CloseInfo + err *Bottom +} + +type envList struct { + env *Environment + list *ListLit + n int64 // recorded length after evaluator + elipsis *Ellipsis + id CloseInfo +} + +func (n *nodeContext) addBottom(b *Bottom) { + n.errs = CombineErrors(nil, n.errs, b) + // TODO(errors): consider doing this + // n.kindExpr = n.errs + // n.kind = 0 +} + +func (n *nodeContext) addErr(err errors.Error) { + if err != nil { + n.addBottom(&Bottom{Err: err}) + } +} + +// addExprConjuncts will attempt to evaluate an Expr and insert the value +// into the nodeContext if successful or queue it for later evaluation if it is +// incomplete or is not value. +func (n *nodeContext) addExprConjunct(v Conjunct) { + env := v.Env + id := v.CloseInfo + + switch x := v.Expr().(type) { + case *Vertex: + if x.IsData() { + n.addValueConjunct(env, x, id) + } else { + n.addVertexConjuncts(env, id, x, x, true) + } + + case Value: + n.addValueConjunct(env, x, id) + + case *BinaryExpr: + if x.Op == AndOp { + n.addExprConjunct(MakeConjunct(env, x.X, id)) + n.addExprConjunct(MakeConjunct(env, x.Y, id)) + } else { + n.evalExpr(v) + } + + case *StructLit: + n.addStruct(env, x, id) + + case *ListLit: + n.lists = append(n.lists, envList{env: env, list: x, id: id}) + + case *DisjunctionExpr: + n.addDisjunction(env, x, id) + + default: + // Must be Resolver or Evaluator. + n.evalExpr(v) + } +} + +// evalExpr is only called by addExprConjunct. If an error occurs, it records +// the error in n and returns nil. +func (n *nodeContext) evalExpr(v Conjunct) { + // Require an Environment. + ctx := n.ctx + + closeID := v.CloseInfo + + // TODO: see if we can do without these counters. + for _, d := range v.Env.Deref { + d.EvalCount++ + } + for _, d := range v.Env.Cycles { + d.SelfCount++ + } + defer func() { + for _, d := range v.Env.Deref { + d.EvalCount-- + } + for _, d := range v.Env.Cycles { + d.SelfCount++ + } + }() + + switch x := v.Expr().(type) { + case Resolver: + arc, err := ctx.Resolve(v.Env, x) + if err != nil && !err.IsIncomplete() { + n.addBottom(err) + break + } + if arc == nil { + n.exprs = append(n.exprs, envExpr{v, err}) + break + } + + n.addVertexConjuncts(v.Env, v.CloseInfo, v.Expr(), arc, false) + + case Evaluator: + // Interpolation, UnaryExpr, BinaryExpr, CallExpr + // Could be unify? + val := ctx.evaluateRec(v.Env, v.Expr(), Partial) + if b, ok := val.(*Bottom); ok && b.IsIncomplete() { + n.exprs = append(n.exprs, envExpr{v, b}) + break + } + + if v, ok := val.(*Vertex); ok { + // Handle generated disjunctions (as in the 'or' builtin). + // These come as a Vertex, but should not be added as a value. + b, ok := v.BaseValue.(*Bottom) + if ok && b.IsIncomplete() && len(v.Conjuncts) > 0 { + for _, c := range v.Conjuncts { + c.CloseInfo = closeID + n.addExprConjunct(c) + } + break + } + } + + // TODO: also to through normal Vertex handling here. At the moment + // addValueConjunct handles StructMarker.NeedsClose, as this is always + // only needed when evaluation an Evaluator, and not a Resolver. + // The two code paths should ideally be merged once this separate + // mechanism is eliminated. + // + // if arc, ok := val.(*Vertex); ok && !arc.IsData() { + // n.addVertexConjuncts(v.Env, closeID, v.Expr(), arc) + // break + // } + + // TODO: insert in vertex as well + n.addValueConjunct(v.Env, val, closeID) + + default: + panic(fmt.Sprintf("unknown expression of type %T", x)) + } +} + +func (n *nodeContext) addVertexConjuncts(env *Environment, closeInfo CloseInfo, x Expr, arc *Vertex, inline bool) { + + // We need to ensure that each arc is only unified once (or at least) a + // bounded time, witch each conjunct. Comprehensions, for instance, may + // distribute a value across many values that get unified back into the + // same value. If such a value is a disjunction, than a disjunction of N + // disjuncts will result in a factor N more unifications for each + // occurrence of such value, resulting in exponential running time. This + // is especially common values that are used as a type. + // + // However, unification is idempotent, so each such conjunct only needs + // to be unified once. This cache checks for this and prevents an + // exponential blowup in such case. + // + // TODO(perf): this cache ensures the conjuncts of an arc at most once + // per ID. However, we really need to add the conjuncts of an arc only + // once total, and then add the close information once per close ID + // (pointer can probably be shared). Aside from being more performant, + // this is probably the best way to guarantee that conjunctions are + // linear in this case. + key := arcKey{arc, closeInfo} + for _, k := range n.arcMap { + if key == k { + return + } + } + n.arcMap = append(n.arcMap, key) + + // Pass detection of structural cycles from parent to children. + cyclic := false + if env != nil { + // If a reference is in a tainted set, so is the value it refers to. + cyclic = env.Cyclic + } + + status := arc.Status() + + switch status { + case Evaluating: + // Reference cycle detected. We have reached a fixed point and + // adding conjuncts at this point will not change the value. Also, + // continuing to pursue this value will result in an infinite loop. + + // TODO: add a mechanism so that the computation will only have to + // be done once? + + if arc == n.node { + // TODO: we could use node sharing here. This may avoid an + // exponential blowup during evaluation, like is possible with + // YAML. + return + } + + case EvaluatingArcs: + // Structural cycle detected. Continue evaluation as usual, but + // keep track of whether any other conjuncts without structural + // cycles are added. If not, evaluation of child arcs will end + // with this node. + + // For the purpose of determining whether at least one non-cyclic + // conjuncts exists, we consider all conjuncts of a cyclic conjuncts + // also cyclic. + + cyclic = true + n.hasCycle = true + + // As the EvaluatingArcs mechanism bypasses the self-reference + // mechanism, we need to separately keep track of it here. + // If this (originally) is a self-reference node, adding them + // will result in recursively adding the same reference. For this + // we also mark the node as evaluating. + if arc.SelfCount > 0 { + return + } + + // This count is added for values that are directly added below. + // The count is handled separately for delayed values. + arc.SelfCount++ + defer func() { arc.SelfCount-- }() + } + + closeInfo = closeInfo.SpawnRef(arc, IsDef(x), x) + + if arc.status == 0 && !inline { + // This is a rare condition, but can happen in certain + // evaluation orders. Unfortunately, adding this breaks + // resolution of cyclic mutually referring disjunctions. But it + // is necessary to prevent lookups in unevaluated structs. + // TODO(cycles): this can probably most easily be fixed with a + // having a more recursive implementation. + n.ctx.Unify(arc, AllArcs) + } + + for _, c := range arc.Conjuncts { + var a []*Vertex + if env != nil { + a = env.Deref + } + if inline { + c = updateCyclic(c, cyclic, nil, nil) + } else { + c = updateCyclic(c, cyclic, arc, a) + } + + // Note that we are resetting the tree here. We hereby assume that + // closedness conflicts resulting from unifying the referenced arc were + // already caught there and that we can ignore further errors here. + c.CloseInfo = closeInfo + n.addExprConjunct(c) + } +} + +// isDef reports whether an expressions is a reference that references a +// definition anywhere in its selection path. +// +// TODO(performance): this should be merged with resolve(). But for now keeping +// this code isolated makes it easier to see what it is for. +func isDef(x Expr) bool { + switch r := x.(type) { + case *FieldReference: + return r.Label.IsDef() + + case *SelectorExpr: + if r.Sel.IsDef() { + return true + } + return isDef(r.X) + + case *IndexExpr: + return isDef(r.X) + } + return false +} + +// updateCyclicStatus looks for proof of non-cyclic conjuncts to override +// a structural cycle. +func (n *nodeContext) updateCyclicStatus(env *Environment) { + if env == nil || !env.Cyclic { + n.hasNonCycle = true + } +} + +func updateCyclic(c Conjunct, cyclic bool, deref *Vertex, a []*Vertex) Conjunct { + env := c.Env + switch { + case env == nil: + if !cyclic && deref == nil { + return c + } + env = &Environment{Cyclic: cyclic} + case deref == nil && env.Cyclic == cyclic && len(a) == 0: + return c + default: + // The conjunct may still be in use in other fields, so we should + // make a new copy to mark Cyclic only for this case. + e := *env + e.Cyclic = e.Cyclic || cyclic + env = &e + } + if deref != nil || len(a) > 0 { + cp := make([]*Vertex, 0, len(a)+1) + cp = append(cp, a...) + if deref != nil { + cp = append(cp, deref) + } + env.Deref = cp + } + if deref != nil { + env.Cycles = append(env.Cycles, deref) + } + return MakeConjunct(env, c.Expr(), c.CloseInfo) +} + +func (n *nodeContext) addValueConjunct(env *Environment, v Value, id CloseInfo) { + n.updateCyclicStatus(env) + + ctx := n.ctx + + if x, ok := v.(*Vertex); ok { + if m, ok := x.BaseValue.(*StructMarker); ok { + n.aStruct = x + n.aStructID = id + if m.NeedClose { + id = id.SpawnRef(x, IsDef(x), x) + id.IsClosed = true + } + } + + cyclic := env != nil && env.Cyclic + + if !x.IsData() { + // TODO: this really shouldn't happen anymore. + if isComplexStruct(ctx, x) { + // This really shouldn't happen, but just in case. + n.addVertexConjuncts(env, id, x, x, true) + return + } + + for _, c := range x.Conjuncts { + c = updateCyclic(c, cyclic, nil, nil) + c.CloseInfo = id + n.addExprConjunct(c) // TODO: Pass from eval + } + return + } + + // TODO: evaluate value? + switch v := x.BaseValue.(type) { + default: + panic(fmt.Sprintf("invalid type %T", x.BaseValue)) + + case *ListMarker: + n.vLists = append(n.vLists, x) + return + + case *StructMarker: + + case Value: + n.addValueConjunct(env, v, id) + } + + if len(x.Arcs) == 0 { + return + } + + s := &StructLit{} + + // Keep ordering of Go struct for topological sort. + n.node.AddStruct(s, env, id) + n.node.Structs = append(n.node.Structs, x.Structs...) + + for _, a := range x.Arcs { + // TODO(errors): report error when this is a regular field. + c := MakeConjunct(nil, a, id) + c = updateCyclic(c, cyclic, nil, nil) + n.insertField(a.Label, c) + s.MarkField(a.Label) + } + return + } + + switch b := v.(type) { + case *Bottom: + n.addBottom(b) + return + case *Builtin: + if v := b.BareValidator(); v != nil { + n.addValueConjunct(env, v, id) + return + } + } + + if !n.updateNodeType(v.Kind(), v, id) { + return + } + + switch x := v.(type) { + case *Disjunction: + n.addDisjunctionValue(env, x, id) + + case *Conjunction: + for _, x := range x.Values { + n.addValueConjunct(env, x, id) + } + + case *Top: + n.hasTop = true + + case *BasicType: + // handled above + + case *BoundValue: + switch x.Op { + case LessThanOp, LessEqualOp: + if y := n.upperBound; y != nil { + n.upperBound = nil + v := SimplifyBounds(ctx, n.kind, x, y) + if err := valueError(v); err != nil { + err.AddPosition(v) + err.AddPosition(n.upperBound) + err.AddClosedPositions(id) + } + n.addValueConjunct(env, v, id) + return + } + n.upperBound = x + + case GreaterThanOp, GreaterEqualOp: + if y := n.lowerBound; y != nil { + n.lowerBound = nil + v := SimplifyBounds(ctx, n.kind, x, y) + if err := valueError(v); err != nil { + err.AddPosition(v) + err.AddPosition(n.lowerBound) + err.AddClosedPositions(id) + } + n.addValueConjunct(env, v, id) + return + } + n.lowerBound = x + + case EqualOp, NotEqualOp, MatchOp, NotMatchOp: + // This check serves as simplifier, but also to remove duplicates. + k := 0 + match := false + for _, c := range n.checks { + if y, ok := c.(*BoundValue); ok { + switch z := SimplifyBounds(ctx, n.kind, x, y); { + case z == y: + match = true + case z == x: + continue + } + } + n.checks[k] = c + k++ + } + n.checks = n.checks[:k] + if !match { + n.checks = append(n.checks, x) + } + return + } + + case Validator: + // This check serves as simplifier, but also to remove duplicates. + for i, y := range n.checks { + if b := SimplifyValidator(ctx, x, y); b != nil { + n.checks[i] = b + return + } + } + n.updateNodeType(x.Kind(), x, id) + n.checks = append(n.checks, x) + + case *Vertex: + // handled above. + + case Value: // *NullLit, *BoolLit, *NumLit, *StringLit, *BytesLit, *Builtin + if y := n.scalar; y != nil { + if b, ok := BinOp(ctx, EqualOp, x, y).(*Bool); !ok || !b.B { + n.addConflict(x, y, x.Kind(), y.Kind(), n.scalarID, id) + } + // TODO: do we need to explicitly add again? + // n.scalar = nil + // n.addValueConjunct(c, BinOp(c, EqualOp, x, y)) + break + } + n.scalar = x + n.scalarID = id + + default: + panic(fmt.Sprintf("unknown value type %T", x)) + } + + if n.lowerBound != nil && n.upperBound != nil { + if u := SimplifyBounds(ctx, n.kind, n.lowerBound, n.upperBound); u != nil { + if err := valueError(u); err != nil { + err.AddPosition(n.lowerBound) + err.AddPosition(n.upperBound) + err.AddClosedPositions(id) + } + n.lowerBound = nil + n.upperBound = nil + n.addValueConjunct(env, u, id) + } + } +} + +func valueError(v Value) *ValueError { + if v == nil { + return nil + } + b, _ := v.(*Bottom) + if b == nil { + return nil + } + err, _ := b.Err.(*ValueError) + if err == nil { + return nil + } + return err +} + +// addStruct collates the declarations of a struct. +// +// addStruct fulfills two additional pivotal functions: +// 1) Implement vertex unification (this happens through De Bruijn indices +// combined with proper set up of Environments). +// 2) Implied closedness for definitions. +// +func (n *nodeContext) addStruct( + env *Environment, + s *StructLit, + closeInfo CloseInfo) { + + n.updateCyclicStatus(env) // to handle empty structs. + + ctx := n.ctx + + // NOTE: This is a crucial point in the code: + // Unification derferencing happens here. The child nodes are set to + // an Environment linked to the current node. Together with the De Bruijn + // indices, this determines to which Vertex a reference resolves. + + // TODO(perf): consider using environment cache: + // var childEnv *Environment + // for _, s := range n.nodeCache.sub { + // if s.Up == env { + // childEnv = s + // } + // } + childEnv := &Environment{ + Up: env, + Vertex: n.node, + } + if env != nil { + childEnv.Cyclic = env.Cyclic + childEnv.Deref = env.Deref + } + + s.Init() + + if s.HasEmbed && !s.IsFile() { + closeInfo = closeInfo.SpawnGroup(nil) + } + + parent := n.node.AddStruct(s, childEnv, closeInfo) + closeInfo.IsClosed = false + parent.Disable = true // disable until processing is done. + + for _, d := range s.Decls { + switch x := d.(type) { + case *Field: + // handle in next iteration. + + case *DynamicField: + n.aStruct = s + n.aStructID = closeInfo + n.dynamicFields = append(n.dynamicFields, envDynamic{childEnv, x, closeInfo, nil}) + + case *ForClause: + // Why is this not an embedding? + n.forClauses = append(n.forClauses, envYield{childEnv, x, closeInfo, nil}) + + case Yielder: + // Why is this not an embedding? + n.ifClauses = append(n.ifClauses, envYield{childEnv, x, closeInfo, nil}) + + case Expr: + // add embedding to optional + + // TODO(perf): only do this if addExprConjunct below will result in + // a fieldSet. Otherwise the entry will just be removed next. + id := closeInfo.SpawnEmbed(x) + + // push and opo embedding type. + n.addExprConjunct(MakeConjunct(childEnv, x, id)) + + case *OptionalField, *BulkOptionalField, *Ellipsis: + // Nothing to do here. Note that the precense of these fields do not + // excluded embedded scalars: only when they match actual fields + // does it exclude those. + + default: + panic("unreachable") + } + } + + if !s.HasEmbed { + n.aStruct = s + n.aStructID = closeInfo + } + + // Apply existing fields + for _, arc := range n.node.Arcs { + // Reuse Acceptor interface. + parent.MatchAndInsert(ctx, arc) + } + + parent.Disable = false + + for _, d := range s.Decls { + switch x := d.(type) { + case *Field: + if x.Label.IsString() { + n.aStruct = s + n.aStructID = closeInfo + } + n.insertField(x.Label, MakeConjunct(childEnv, x, closeInfo)) + } + } +} + +// TODO(perf): if an arc is the only arc with that label added to a Vertex, and +// if there are no conjuncts of optional fields to be added, then the arc could +// be added as is until any of these conditions change. This would allow +// structure sharing in many cases. One should be careful, however, to +// recursively track arcs of previously unified evaluated vertices ot make this +// optimization meaningful. +// +// An alternative approach to avoid evaluating optional arcs (if we take that +// route) is to not recursively evaluate those arcs, even for Finalize. This is +// possible as it is not necessary to evaluate optional arcs to evaluate +// disjunctions. +func (n *nodeContext) insertField(f Feature, x Conjunct) *Vertex { + ctx := n.ctx + arc, isNew := n.node.GetArc(ctx, f) + + arc.addConjunct(x) + + switch { + case isNew: + for _, s := range n.node.Structs { + if s.Disable { + continue + } + s.MatchAndInsert(ctx, arc) + } + + case arc.state != nil: + s := arc.state + switch { + case arc.Status() <= AllArcs: + // This may happen when a struct has multiple comprehensions, where + // the insertion of one of which depends on the outcome of another. + + // TODO: to something more principled by allowing values to + // monotonically increase. + arc.status = Partial + arc.BaseValue = nil + s.disjuncts = s.disjuncts[:0] + s.disjunctErrs = s.disjunctErrs[:0] + + fallthrough + + default: + arc.state.addExprConjunct(x) + } + + case arc.Status() == 0: + default: + n.addErr(ctx.NewPosf(pos(x.Field()), + "cannot add field %s: was already used", + f.SelectorString(ctx))) + } + return arc +} + +// expandOne adds dynamic fields to a node until a fixed point is reached. +// On each iteration, dynamic fields that cannot resolve due to incomplete +// values are skipped. They will be retried on the next iteration until no +// progress can be made. Note that a dynamic field may add more dynamic fields. +// +// forClauses are processed after all other clauses. A struct may be referenced +// before it is complete, meaning that fields added by other forms of injection +// may influence the result of a for clause _after_ it has already been +// processed. We could instead detect such insertion and feed it to the +// ForClause to generate another entry or have the for clause be recomputed. +// This seems to be too complicated and lead to iffy edge cases. +// TODO(errors): detect when a field is added to a struct that is already used +// in a for clause. +func (n *nodeContext) expandOne() (done bool) { + // Don't expand incomplete expressions if we detected a cycle. + if n.done() || (n.hasCycle && !n.hasNonCycle) { + return false + } + + var progress bool + + if progress = n.injectDynamic(); progress { + return true + } + + if progress = n.injectEmbedded(&(n.ifClauses)); progress { + return true + } + + if progress = n.injectEmbedded(&(n.forClauses)); progress { + return true + } + + // Do expressions after comprehensions, as comprehensions can never + // refer to embedded scalars, whereas expressions may refer to generated + // fields if we were to allow attributes to be defined alongside + // scalars. + exprs := n.exprs + n.exprs = n.exprs[:0] + for _, x := range exprs { + n.addExprConjunct(x.c) + + // collect and and or + } + if len(n.exprs) < len(exprs) { + return true + } + + // No progress, report error later if needed: unification with + // disjuncts may resolve this later later on. + return false +} + +// injectDynamic evaluates and inserts dynamic declarations. +func (n *nodeContext) injectDynamic() (progress bool) { + ctx := n.ctx + k := 0 + + a := n.dynamicFields + for _, d := range n.dynamicFields { + var f Feature + v, complete := ctx.Evaluate(d.env, d.field.Key) + if !complete { + d.err, _ = v.(*Bottom) + a[k] = d + k++ + continue + } + if b, _ := v.(*Bottom); b != nil { + n.addValueConjunct(nil, b, d.id) + continue + } + f = ctx.Label(d.field.Key, v) + n.insertField(f, MakeConjunct(d.env, d.field, d.id)) + } + + progress = k < len(n.dynamicFields) + + n.dynamicFields = a[:k] + + return progress +} + +// injectEmbedded evaluates and inserts embeddings. It first evaluates all +// embeddings before inserting the results to ensure that the order of +// evaluation does not matter. +func (n *nodeContext) injectEmbedded(all *[]envYield) (progress bool) { + ctx := n.ctx + type envStruct struct { + env *Environment + s *StructLit + } + var sa []envStruct + f := func(env *Environment, st *StructLit) { + sa = append(sa, envStruct{env, st}) + } + + k := 0 + for i := 0; i < len(*all); i++ { + d := (*all)[i] + sa = sa[:0] + + if err := ctx.Yield(d.env, d.yield, f); err != nil { + if err.IsIncomplete() { + d.err = err + (*all)[k] = d + k++ + } else { + // continue to collect other errors. + n.addBottom(err) + } + continue + } + + if len(sa) == 0 { + continue + } + id := d.id.SpawnSpan(d.yield, ComprehensionSpan) + + n.ctx.nonMonotonicInsertNest++ + for _, st := range sa { + n.addStruct(st.env, st.s, id) + } + n.ctx.nonMonotonicInsertNest-- + } + + progress = k < len(*all) + + *all = (*all)[:k] + + return progress +} + +// addLists +// +// TODO: association arrays: +// If an association array marker was present in a struct, create a struct node +// instead of a list node. In either case, a node may only have list fields +// or struct fields and not both. +// +// addLists should be run after the fixpoint expansion: +// - it enforces that comprehensions may not refer to the list itself +// - there may be no other fields within the list. +// +// TODO(embeddedScalars): for embedded scalars, there should be another pass +// of evaluation expressions after expanding lists. +func (n *nodeContext) addLists() (oneOfTheLists Expr, anID CloseInfo) { + if len(n.lists) == 0 && len(n.vLists) == 0 { + return nil, CloseInfo{} + } + + isOpen := true + max := 0 + var maxNode Expr + + if m, ok := n.node.BaseValue.(*ListMarker); ok { + isOpen = m.IsOpen + max = len(n.node.Arcs) + } + + c := n.ctx + + for _, l := range n.vLists { + oneOfTheLists = l + + elems := l.Elems() + isClosed := l.IsClosed(c) + + switch { + case len(elems) < max: + if isClosed { + n.invalidListLength(len(elems), max, l, maxNode) + continue + } + + case len(elems) > max: + if !isOpen { + n.invalidListLength(max, len(elems), maxNode, l) + continue + } + isOpen = !isClosed + max = len(elems) + maxNode = l + + case isClosed: + isOpen = false + maxNode = l + } + + for _, a := range elems { + if a.Conjuncts == nil { + x := a.BaseValue.(Value) + n.insertField(a.Label, MakeConjunct(nil, x, CloseInfo{})) + continue + } + for _, c := range a.Conjuncts { + n.insertField(a.Label, c) + } + } + } + +outer: + for i, l := range n.lists { + n.updateCyclicStatus(l.env) + + index := int64(0) + hasComprehension := false + for j, elem := range l.list.Elems { + switch x := elem.(type) { + case Yielder: + err := c.Yield(l.env, x, func(e *Environment, st *StructLit) { + label, err := MakeLabel(x.Source(), index, IntLabel) + n.addErr(err) + index++ + c := MakeConjunct(e, st, l.id) + n.insertField(label, c) + }) + hasComprehension = true + if err != nil { + n.addBottom(err) + continue outer + } + + case *Ellipsis: + if j != len(l.list.Elems)-1 { + n.addErr(c.Newf("ellipsis must be last element in list")) + } + + n.lists[i].elipsis = x + + default: + label, err := MakeLabel(x.Source(), index, IntLabel) + n.addErr(err) + index++ // TODO: don't use insertField. + n.insertField(label, MakeConjunct(l.env, x, l.id)) + } + + // Terminate early n case of runaway comprehension. + if !isOpen && int(index) > max { + n.invalidListLength(max, int(index), maxNode, l.list) + continue outer + } + } + + oneOfTheLists = l.list + anID = l.id + + switch closed := n.lists[i].elipsis == nil; { + case int(index) < max: + if closed { + n.invalidListLength(int(index), max, l.list, maxNode) + continue + } + + case int(index) > max, + closed && isOpen, + (!closed == isOpen) && !hasComprehension: + max = int(index) + maxNode = l.list + isOpen = !closed + } + + n.lists[i].n = index + } + + // add additionalItem values to list and construct optionals. + elems := n.node.Elems() + for _, l := range n.vLists { + if !l.IsClosed(c) { + continue + } + + newElems := l.Elems() + if len(newElems) >= len(elems) { + continue // error generated earlier, if applicable. + } + + for _, arc := range elems[len(newElems):] { + l.MatchAndInsert(c, arc) + } + } + + for _, l := range n.lists { + if l.elipsis == nil { + continue + } + + s := &StructLit{Decls: []Decl{l.elipsis}} + s.Init() + info := n.node.AddStruct(s, l.env, l.id) + + for _, arc := range elems[l.n:] { + info.MatchAndInsert(c, arc) + } + } + + sources := []ast.Expr{} + // Add conjuncts for additional items. + for _, l := range n.lists { + if l.elipsis == nil { + continue + } + if src, _ := l.elipsis.Source().(ast.Expr); src != nil { + sources = append(sources, src) + } + } + + if m, ok := n.node.BaseValue.(*ListMarker); !ok { + n.node.SetValue(c, Partial, &ListMarker{ + Src: ast.NewBinExpr(token.AND, sources...), + IsOpen: isOpen, + }) + } else { + if expr, _ := m.Src.(ast.Expr); expr != nil { + sources = append(sources, expr) + } + m.Src = ast.NewBinExpr(token.AND, sources...) + m.IsOpen = m.IsOpen && isOpen + } + + n.lists = n.lists[:0] + n.vLists = n.vLists[:0] + + return oneOfTheLists, anID +} + +func (n *nodeContext) invalidListLength(na, nb int, a, b Expr) { + n.addErr(n.ctx.Newf("incompatible list lengths (%d and %d)", na, nb)) +} diff --git a/vendor/cuelang.org/go/internal/core/adt/expr.go b/vendor/cuelang.org/go/internal/core/adt/expr.go new file mode 100644 index 000000000..29245c0da --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/expr.go @@ -0,0 +1,1693 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "bytes" + "fmt" + "io" + "regexp" + + "github.com/cockroachdb/apd/v2" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/token" +) + +// A StructLit represents an unevaluated struct literal or file body. +type StructLit struct { + Src ast.Node // ast.File or ast.StructLit + Decls []Decl + + // TODO: record the merge order somewhere. + + // The below fields are redundant to Decls and are computed with Init. + + // field marks the optional conjuncts of all explicit Fields. + // Required Fields are marked as empty + Fields []FieldInfo + + Dynamic []*DynamicField + + // excluded are all literal fields that already exist. + Bulk []*BulkOptionalField + + Additional []Expr + HasEmbed bool + IsOpen bool // has a ... + initialized bool + + types OptionalType + + // administrative fields like hasreferences. + // hasReferences bool +} + +func (o *StructLit) IsFile() bool { + _, ok := o.Src.(*ast.File) + return ok +} + +type FieldInfo struct { + Label Feature + Optional []Node +} + +func (x *StructLit) HasOptional() bool { + return x.types&(HasField|HasPattern|HasAdditional) != 0 +} + +func (x *StructLit) Source() ast.Node { return x.Src } + +func (x *StructLit) evaluate(c *OpContext) Value { + e := c.Env(0) + v := &Vertex{Conjuncts: []Conjunct{{e, x, CloseInfo{}}}} + // evaluate may not finalize a field, as the resulting value may be + // used in a context where more conjuncts are added. It may also lead + // to disjuncts being in a partially expanded state, leading to + // misaligned nodeContexts. + c.Unify(v, AllArcs) + return v +} + +// TODO: remove this method +func (o *StructLit) MarkField(f Feature) { + o.Fields = append(o.Fields, FieldInfo{Label: f}) +} + +func (o *StructLit) Init() { + if o.initialized { + return + } + o.initialized = true + for _, d := range o.Decls { + switch x := d.(type) { + case *Field: + if o.fieldIndex(x.Label) < 0 { + o.Fields = append(o.Fields, FieldInfo{Label: x.Label}) + } + + case *OptionalField: + p := o.fieldIndex(x.Label) + if p < 0 { + p = len(o.Fields) + o.Fields = append(o.Fields, FieldInfo{Label: x.Label}) + } + o.Fields[p].Optional = append(o.Fields[p].Optional, x) + o.types |= HasField + + case *DynamicField: + o.Dynamic = append(o.Dynamic, x) + o.types |= HasDynamic + + case Expr: + o.HasEmbed = true + + case *ForClause, Yielder: + o.HasEmbed = true + + case *BulkOptionalField: + o.Bulk = append(o.Bulk, x) + o.types |= HasPattern + switch x.Filter.(type) { + case *BasicType, *Top: + default: + o.types |= HasComplexPattern + } + + case *Ellipsis: + expr := x.Value + if x.Value == nil { + o.IsOpen = true + o.types |= IsOpen + // TODO(perf): encode more efficiently. + expr = &Top{} + } else { + o.types |= HasAdditional + } + o.Additional = append(o.Additional, expr) + + default: + panic("unreachable") + } + } +} + +func (o *StructLit) fieldIndex(f Feature) int { + for i := range o.Fields { + if o.Fields[i].Label == f { + return i + } + } + return -1 +} + +func (o *StructLit) OptionalTypes() OptionalType { + return o.types +} + +func (o *StructLit) IsOptional(label Feature) bool { + for _, f := range o.Fields { + if f.Label == label && len(f.Optional) > 0 { + return true + } + } + return false +} + +// FIELDS +// +// Fields can also be used as expressions whereby the value field is the +// expression this allows retaining more context. + +// Field represents a field with a fixed label. It can be a regular field, +// definition or hidden field. +// +// foo: bar +// #foo: bar +// _foo: bar +// +// Legacy: +// +// Foo :: bar +// +type Field struct { + Src *ast.Field + + Label Feature + Value Expr +} + +func (x *Field) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +// An OptionalField represents an optional regular field. +// +// foo?: expr +// +type OptionalField struct { + Src *ast.Field + Label Feature + Value Expr +} + +func (x *OptionalField) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +// A BulkOptionalField represents a set of optional field. +// +// [expr]: expr +// +type BulkOptionalField struct { + Src *ast.Field // Elipsis or Field + Filter Expr + Value Expr + Label Feature // for reference and formatting +} + +func (x *BulkOptionalField) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +// A Ellipsis represents a set of optional fields of a given type. +// +// ...T +// +type Ellipsis struct { + Src *ast.Ellipsis + Value Expr +} + +func (x *Ellipsis) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +// A DynamicField represents a regular field for which the key is computed. +// +// "\(expr)": expr +// (expr): expr +// +type DynamicField struct { + Src *ast.Field + Key Expr + Value Expr +} + +func (x *DynamicField) IsOptional() bool { + return x.Src.Optional != token.NoPos +} + +func (x *DynamicField) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +// A ListLit represents an unevaluated list literal. +// +// [a, for x in src { ... }, b, ...T] +// +type ListLit struct { + Src *ast.ListLit + + // scalars, comprehensions, ...T + Elems []Elem +} + +func (x *ListLit) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *ListLit) evaluate(c *OpContext) Value { + e := c.Env(0) + v := &Vertex{Conjuncts: []Conjunct{{e, x, CloseInfo{}}}} + // TODO: should be AllArcs and then use Finalize for builtins? + c.Unify(v, Finalized) // TODO: also partial okay? + return v +} + +// Null represents null. It can be used as a Value and Expr. +type Null struct { + Src ast.Node +} + +func (x *Null) Source() ast.Node { return x.Src } +func (x *Null) Kind() Kind { return NullKind } + +// Bool is a boolean value. It can be used as a Value and Expr. +type Bool struct { + Src ast.Node + B bool +} + +func (x *Bool) Source() ast.Node { return x.Src } +func (x *Bool) Kind() Kind { return BoolKind } + +// Num is a numeric value. It can be used as a Value and Expr. +type Num struct { + Src ast.Node + K Kind // needed? + X apd.Decimal // Is integer if the apd.Decimal is an integer. +} + +// TODO: do we need this? +// func NewNumFromString(src ast.Node, s string) Value { +// n := &Num{Src: src, K: IntKind} +// if strings.ContainsAny(s, "eE.") { +// n.K = FloatKind +// } +// _, _, err := n.X.SetString(s) +// if err != nil { +// pos := token.NoPos +// if src != nil { +// pos = src.Pos() +// } +// return &Bottom{Err: errors.Newf(pos, "invalid number: %v", err)} +// } +// return n +// } + +func (x *Num) Source() ast.Node { return x.Src } +func (x *Num) Kind() Kind { return x.K } + +// TODO: do we still need this? +// func (x *Num) Specialize(k Kind) Value { +// k = k & x.K +// if k == x.K { +// return x +// } +// y := *x +// y.K = k +// return &y +// } + +// String is a string value. It can be used as a Value and Expr. +type String struct { + Src ast.Node + Str string + RE *regexp.Regexp // only set if needed +} + +func (x *String) Source() ast.Node { return x.Src } +func (x *String) Kind() Kind { return StringKind } + +// Bytes is a bytes value. It can be used as a Value and Expr. +type Bytes struct { + Src ast.Node + B []byte + RE *regexp.Regexp // only set if needed +} + +func (x *Bytes) Source() ast.Node { return x.Src } +func (x *Bytes) Kind() Kind { return BytesKind } + +// Composites: the evaluated fields of a composite are recorded in the arc +// vertices. + +type ListMarker struct { + Src ast.Node + IsOpen bool +} + +func (x *ListMarker) Source() ast.Node { return x.Src } +func (x *ListMarker) Kind() Kind { return ListKind } +func (x *ListMarker) node() {} + +type StructMarker struct { + // NeedClose is used to signal that the evaluator should close this struct. + // It is only set by the close builtin. + NeedClose bool +} + +func (x *StructMarker) Source() ast.Node { return nil } +func (x *StructMarker) Kind() Kind { return StructKind } +func (x *StructMarker) node() {} + +// Top represents all possible values. It can be used as a Value and Expr. +type Top struct{ Src *ast.Ident } + +func (x *Top) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} +func (x *Top) Kind() Kind { return TopKind } + +// BasicType represents all values of a certain Kind. It can be used as a Value +// and Expr. +// +// string +// int +// num +// bool +// +type BasicType struct { + Src *ast.Ident + K Kind +} + +func (x *BasicType) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} +func (x *BasicType) Kind() Kind { return x.K } + +// TODO: do we still need this? +// func (x *BasicType) Specialize(k Kind) Value { +// k = x.K & k +// if k == x.K { +// return x +// } +// y := *x +// y.K = k +// return &y +// } + +// TODO: should we use UnaryExpr for Bound now we have BoundValue? + +// BoundExpr represents an unresolved unary comparator. +// +// Concrete { + return ctx.NewErrf("bound has fixed non-concrete value") + } + return &BoundValue{x.Src, x.Op, v} + } + + // This simplifies boundary expressions. It is an alternative to an + // evaluation strategy that makes nodes increasingly more specific. + // + // For instance, a completely different implementation would be to allow + // the precense of a concrete value to ignore incomplete errors. + // + // TODO: consider an alternative approach. + switch y := v.(type) { + case *BoundValue: + switch { + case y.Op == NotEqualOp: + switch x.Op { + case LessEqualOp, LessThanOp, GreaterEqualOp, GreaterThanOp: + // <(!=3) => number + // Smaller than an arbitrarily large number is any number. + return &BasicType{K: y.Kind()} + case NotEqualOp: + // !=(!=3) ==> 3 + // Not a value that is anything but a given value is that + // given value. + return y.Value + } + + case x.Op == NotEqualOp: + // Invert if applicable. + switch y.Op { + case LessEqualOp: + return &BoundValue{x.Src, GreaterThanOp, y.Value} + case LessThanOp: + return &BoundValue{x.Src, GreaterEqualOp, y.Value} + case GreaterEqualOp: + return &BoundValue{x.Src, LessThanOp, y.Value} + case GreaterThanOp: + return &BoundValue{x.Src, LessEqualOp, y.Value} + } + + case (x.Op == LessThanOp || x.Op == LessEqualOp) && + (y.Op == GreaterThanOp || y.Op == GreaterEqualOp), + (x.Op == GreaterThanOp || x.Op == GreaterEqualOp) && + (y.Op == LessThanOp || y.Op == LessEqualOp): + // <(>=3) + // Something smaller than an arbitrarily large number is any number. + return &BasicType{K: y.Kind()} + + case x.Op == LessThanOp && + (y.Op == LessEqualOp || y.Op == LessThanOp), + x.Op == GreaterThanOp && + (y.Op == GreaterEqualOp || y.Op == GreaterThanOp): + // <(<=x) => <=x + // Less or equal than something that is less than x is less than x. + return y + } + + case *BasicType: + switch x.Op { + case LessEqualOp, LessThanOp, GreaterEqualOp, GreaterThanOp: + return y + } + } + if v.Concreteness() > Concrete { + // TODO(errors): analyze dependencies of x.Expr to get positions. + ctx.addErrf(IncompleteError, ctx.pos(), + "non-concrete value %s for bound %s", ctx.Str(x.Expr), x.Op) + return nil + } + return &BoundValue{x.Src, x.Op, v} +} + +// A BoundValue is a fully evaluated unary comparator that can be used to +// validate other values. +// +// <5 +// =~"Name$" +// +type BoundValue struct { + Src ast.Expr + Op Op + Value Value +} + +func (x *BoundValue) Source() ast.Node { return x.Src } +func (x *BoundValue) Kind() Kind { + k := x.Value.Kind() + switch k { + case IntKind, FloatKind, NumKind: + return NumKind + + case NullKind: + if x.Op == NotEqualOp { + return TopKind &^ NullKind + } + } + return k +} + +func (x *BoundValue) validate(c *OpContext, y Value) *Bottom { + a := y // Can be list or struct. + b := c.scalar(x.Value) + if c.HasErr() { + return c.Err() + } + + switch v := BinOp(c, x.Op, a, b).(type) { + case *Bottom: + return v + + case *Bool: + if v.B { + return nil + } + // TODO(errors): use "invalid value %v (not an %s)" if x is a + // predeclared identifier such as `int`. + err := c.Newf("invalid value %v (out of bound %s)", + c.Str(y), c.Str(x)) + err.AddPosition(y) + return &Bottom{Src: c.src, Err: err, Code: EvalError} + + default: + panic(fmt.Sprintf("unsupported type %T", v)) + } +} + +func (x *BoundValue) validateStr(c *OpContext, a string) bool { + if str, ok := x.Value.(*String); ok { + b := str.Str + switch x.Op { + case LessEqualOp: + return a <= b + case LessThanOp: + return a < b + case GreaterEqualOp: + return a >= b + case GreaterThanOp: + return a > b + case EqualOp: + return a == b + case NotEqualOp: + return a != b + case MatchOp: + return c.regexp(x.Value).MatchString(a) + case NotMatchOp: + return !c.regexp(x.Value).MatchString(a) + } + } + return x.validate(c, &String{Str: a}) == nil +} + +func (x *BoundValue) validateInt(c *OpContext, a int64) bool { + switch n := x.Value.(type) { + case *Num: + b, err := n.X.Int64() + if err != nil { + break + } + switch x.Op { + case LessEqualOp: + return a <= b + case LessThanOp: + return a < b + case GreaterEqualOp: + return a >= b + case GreaterThanOp: + return a > b + case EqualOp: + return a == b + case NotEqualOp: + return a != b + } + } + return x.validate(c, c.NewInt64(a)) == nil +} + +// A NodeLink is used during computation to refer to an existing Vertex. +// It is used to signal a potential cycle or reference. +// Note that a NodeLink may be used as a value. This should be taken into +// account. +type NodeLink struct { + Node *Vertex +} + +func (x *NodeLink) Kind() Kind { + return x.Node.Kind() +} +func (x *NodeLink) Source() ast.Node { return x.Node.Source() } + +func (x *NodeLink) resolve(c *OpContext, state VertexStatus) *Vertex { + return x.Node +} + +// A FieldReference represents a lexical reference to a field. +// +// a +// +type FieldReference struct { + Src *ast.Ident + UpCount int32 + Label Feature +} + +func (x *FieldReference) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *FieldReference) resolve(c *OpContext, state VertexStatus) *Vertex { + n := c.relNode(x.UpCount) + pos := pos(x) + return c.lookup(n, pos, x.Label, state) +} + +// A LabelReference refers to the string or integer value of a label. +// +// [X=Pattern]: b: X +// +type LabelReference struct { + Src *ast.Ident + UpCount int32 +} + +// TODO: should this implement resolver at all? + +func (x *LabelReference) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *LabelReference) evaluate(ctx *OpContext) Value { + label := ctx.relLabel(x.UpCount) + if label == 0 { + // There is no label. This may happen if a LabelReference is evaluated + // outside of the context of a parent node, for instance if an + // "additional" items or properties is evaluated in isolation. + // + // TODO: this should return the pattern of the label. + return &BasicType{K: StringKind} + } + return label.ToValue(ctx) +} + +// A DynamicReference is like a LabelReference, but with a computed label. +// +// X=(x): X +// X="\(x)": X +// +type DynamicReference struct { + Src *ast.Ident + UpCount int32 + Label Expr + + // TODO: only use aliases and store the actual expression only in the scope. + // The feature is unique for every instance. This will also allow dynamic + // fields to be ordered among normal fields. + // + // This could also be used to assign labels to embedded values, if they + // don't match a label. + Alias Feature +} + +func (x *DynamicReference) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *DynamicReference) resolve(ctx *OpContext, state VertexStatus) *Vertex { + e := ctx.Env(x.UpCount) + frame := ctx.PushState(e, x.Src) + v := ctx.value(x.Label) + ctx.PopState(frame) + f := ctx.Label(x.Label, v) + return ctx.lookup(e.Vertex, pos(x), f, state) +} + +// An ImportReference refers to an imported package. +// +// import "strings" +// +// strings.ToLower("Upper") +// +type ImportReference struct { + Src *ast.Ident + ImportPath Feature + Label Feature // for informative purposes +} + +func (x *ImportReference) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *ImportReference) resolve(ctx *OpContext, state VertexStatus) *Vertex { + path := x.ImportPath.StringValue(ctx) + v, _ := ctx.Runtime.LoadImport(path) + return v +} + +// A LetReference evaluates a let expression in its original environment. +// +// let X = x +// +type LetReference struct { + Src *ast.Ident + UpCount int32 + Label Feature // for informative purposes + X Expr +} + +func (x *LetReference) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *LetReference) resolve(c *OpContext, state VertexStatus) *Vertex { + e := c.Env(x.UpCount) + label := e.Vertex.Label + if x.X == nil { + panic("nil expression") + } + // Anonymous arc. + return &Vertex{Parent: nil, Label: label, Conjuncts: []Conjunct{{e, x.X, CloseInfo{}}}} +} + +func (x *LetReference) evaluate(c *OpContext) Value { + e := c.Env(x.UpCount) + + // Not caching let expressions may lead to exponential behavior. + return e.evalCached(c, x.X) +} + +// A SelectorExpr looks up a fixed field in an expression. +// +// X.Sel +// +type SelectorExpr struct { + Src *ast.SelectorExpr + X Expr + Sel Feature +} + +func (x *SelectorExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *SelectorExpr) resolve(c *OpContext, state VertexStatus) *Vertex { + n := c.node(x, x.X, x.Sel.IsRegular(), state) + if n == emptyNode { + return n + } + return c.lookup(n, x.Src.Sel.Pos(), x.Sel, state) +} + +// IndexExpr is like a selector, but selects an index. +// +// X[Index] +// +type IndexExpr struct { + Src *ast.IndexExpr + X Expr + Index Expr +} + +func (x *IndexExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *IndexExpr) resolve(ctx *OpContext, state VertexStatus) *Vertex { + // TODO: support byte index. + n := ctx.node(x, x.X, true, state) + i := ctx.value(x.Index) + if n == emptyNode { + return n + } + f := ctx.Label(x.Index, i) + return ctx.lookup(n, x.Src.Index.Pos(), f, state) +} + +// A SliceExpr represents a slice operation. (Not currently in spec.) +// +// X[Lo:Hi:Stride] +// +type SliceExpr struct { + Src *ast.SliceExpr + X Expr + Lo Expr + Hi Expr + Stride Expr +} + +func (x *SliceExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *SliceExpr) evaluate(c *OpContext) Value { + // TODO: strides + + v := c.value(x.X) + const as = "slice index" + + switch v := v.(type) { + case nil: + c.addErrf(IncompleteError, c.pos(), + "non-concrete slice subject %s", c.Str(x.X)) + return nil + case *Vertex: + if !v.IsList() { + break + } + + var ( + lo = uint64(0) + hi = uint64(len(v.Arcs)) + ) + if x.Lo != nil { + lo = c.uint64(c.value(x.Lo), as) + } + if x.Hi != nil { + hi = c.uint64(c.value(x.Hi), as) + if hi > uint64(len(v.Arcs)) { + return c.NewErrf("index %d out of range", hi) + } + } + if lo > hi { + return c.NewErrf("invalid slice index: %d > %d", lo, hi) + } + + n := c.newList(c.src, v.Parent) + for i, a := range v.Arcs[lo:hi] { + label, err := MakeLabel(a.Source(), int64(i), IntLabel) + if err != nil { + c.AddBottom(&Bottom{Src: a.Source(), Err: err}) + return nil + } + arc := *a + arc.Parent = n + arc.Label = label + n.Arcs = append(n.Arcs, &arc) + } + n.status = Finalized + return n + + case *Bytes: + var ( + lo = uint64(0) + hi = uint64(len(v.B)) + ) + if x.Lo != nil { + lo = c.uint64(c.value(x.Lo), as) + } + if x.Hi != nil { + hi = c.uint64(c.value(x.Hi), as) + if hi > uint64(len(v.B)) { + return c.NewErrf("index %d out of range", hi) + } + } + if lo > hi { + return c.NewErrf("invalid slice index: %d > %d", lo, hi) + } + return c.newBytes(v.B[lo:hi]) + } + + if isError(v) { + return v + } + return c.NewErrf("cannot slice %v (type %s)", c.Str(v), v.Kind()) +} + +// An Interpolation is a string interpolation. +// +// "a \(b) c" +// +type Interpolation struct { + Src *ast.Interpolation + K Kind // string or bytes + Parts []Expr // odd: strings, even sources +} + +func (x *Interpolation) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *Interpolation) evaluate(c *OpContext) Value { + buf := bytes.Buffer{} + for _, e := range x.Parts { + v := c.value(e) + if x.K == BytesKind { + buf.Write(c.ToBytes(v)) + } else { + buf.WriteString(c.ToString(v)) + } + } + if err := c.Err(); err != nil { + err = &Bottom{ + Code: err.Code, + Err: errors.Wrapf(err.Err, pos(x), "invalid interpolation"), + } + // c.AddBottom(err) + // return nil + return err + } + if x.K == BytesKind { + return &Bytes{x.Src, buf.Bytes(), nil} + } + return &String{x.Src, buf.String(), nil} +} + +// UnaryExpr is a unary expression. +// +// Op X +// -X !X +X +// +type UnaryExpr struct { + Src *ast.UnaryExpr + Op Op + X Expr +} + +func (x *UnaryExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *UnaryExpr) evaluate(c *OpContext) Value { + if !c.concreteIsPossible(x.Op, x.X) { + return nil + } + v := c.value(x.X) + if isError(v) { + return v + } + + op := x.Op + k := kind(v) + expectedKind := k + switch op { + case SubtractOp: + if v, ok := v.(*Num); ok { + f := *v + f.X.Neg(&v.X) + f.Src = x.Src + return &f + } + expectedKind = NumKind + + case AddOp: + if v, ok := v.(*Num); ok { + // TODO: wrap in thunk to save position of '+'? + return v + } + expectedKind = NumKind + + case NotOp: + if v, ok := v.(*Bool); ok { + return &Bool{x.Src, !v.B} + } + expectedKind = BoolKind + } + if k&expectedKind != BottomKind { + c.addErrf(IncompleteError, pos(x.X), + "operand %s of '%s' not concrete (was %s)", c.Str(x.X), op, k) + return nil + } + return c.NewErrf("invalid operation %s (%s %s)", c.Str(x), op, k) +} + +// BinaryExpr is a binary expression. +// +// X + Y +// X & Y +// +type BinaryExpr struct { + Src *ast.BinaryExpr + Op Op + X Expr + Y Expr +} + +func (x *BinaryExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *BinaryExpr) evaluate(c *OpContext) Value { + env := c.Env(0) + if x.Op == AndOp { + // Anonymous Arc + v := &Vertex{Conjuncts: []Conjunct{{env, x, CloseInfo{}}}} + c.Unify(v, Finalized) + return v + } + + if !c.concreteIsPossible(x.Op, x.X) || !c.concreteIsPossible(x.Op, x.Y) { + return nil + } + + // TODO: allow comparing to a literal Bottom only. Find something more + // principled perhaps. One should especially take care that two values + // evaluating to Bottom don't evaluate to true. For now we check for + // Bottom here and require that one of the values be a Bottom literal. + if x.Op == EqualOp || x.Op == NotEqualOp { + if isLiteralBottom(x.X) { + return c.validate(env, x.Src, x.Y, x.Op) + } + if isLiteralBottom(x.Y) { + return c.validate(env, x.Src, x.X, x.Op) + } + } + + left, _ := c.Concrete(env, x.X, x.Op) + right, _ := c.Concrete(env, x.Y, x.Op) + + if err := CombineErrors(x.Src, left, right); err != nil { + return err + } + + if err := c.Err(); err != nil { + return err + } + + return BinOp(c, x.Op, left, right) +} + +func (c *OpContext) validate(env *Environment, src ast.Node, x Expr, op Op) (r Value) { + s := c.PushState(env, src) + if c.nonMonotonicLookupNest == 0 { + c.nonMonotonicGeneration++ + } + + var match bool + // NOTE: using Unwrap is maybe note entirely accurate, as it may discard + // a future error. However, if it does so, the error will at least be + // reported elsewhere. + switch b := c.value(x).(type) { + case nil: + case *Bottom: + if b.Code == CycleError { + c.PopState(s) + c.AddBottom(b) + return nil + } + match = op == EqualOp + // We have a nonmonotonic use of a failure. Referenced fields should + // not be added anymore. + c.nonMonotonicRejectNest++ + c.evalState(x, Partial) + c.nonMonotonicRejectNest-- + + default: + // TODO(cycle): if EqualOp: + // - ensure to pass special status to if clause or keep a track of "hot" + // paths. + // - evaluate hypothetical struct + // - walk over all fields and verify that fields are not contradicting + // previously marked fields. + // + switch { + case b.Concreteness() > Concrete: + // TODO: mimic comparison to bottom semantics. If it is a valid + // value, check for concreteness that this level only. This + // should ultimately be replaced with an exists and valid + // builtin. + match = op == EqualOp + default: + match = op != EqualOp + } + c.nonMonotonicLookupNest++ + c.evalState(x, Partial) + c.nonMonotonicLookupNest-- + } + + c.PopState(s) + return &Bool{src, match} +} + +// A CallExpr represents a call to a builtin. +// +// len(x) +// strings.ToLower(x) +// +type CallExpr struct { + Src *ast.CallExpr + Fun Expr + Args []Expr +} + +func (x *CallExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *CallExpr) evaluate(c *OpContext) Value { + fun := c.value(x.Fun) + var b *Builtin + switch f := fun.(type) { + case *Builtin: + b = f + + case *BuiltinValidator: + // We allow a validator that takes no arguments accept the validated + // value to be called with zero arguments. + switch { + case f.Src != nil: + c.addErrf(0, pos(x.Fun), + "cannot call previously called validator %s", c.Str(x.Fun)) + + case f.Builtin.IsValidator(len(x.Args)): + v := *f + v.Src = x + return &v + + default: + b = f.Builtin + } + + default: + c.addErrf(0, pos(x.Fun), "cannot call non-function %s (type %s)", + c.Str(x.Fun), kind(fun)) + return nil + } + args := []Value{} + for i, a := range x.Args { + expr := c.value(a) + switch v := expr.(type) { + case nil: + // There SHOULD be an error in the context. If not, we generate + // one. + c.Assertf(pos(x.Fun), c.HasErr(), + "argument %d to function %s is incomplete", i, c.Str(x.Fun)) + + case *Bottom: + // TODO(errors): consider adding an argument index for this errors. + // On the other hand, this error is really not related to the + // argument itself, so maybe it is good as it is. + c.AddBottom(v) + + default: + args = append(args, expr) + } + } + if c.HasErr() { + return nil + } + if b.IsValidator(len(args)) { + return &BuiltinValidator{x, b, args} + } + result := b.call(c, pos(x), args) + if result == nil { + return nil + } + return c.evalState(result, Partial) +} + +// A Builtin is a value representing a native function call. +type Builtin struct { + // TODO: make these values for better type checking. + Params []Param + Result Kind + Func func(c *OpContext, args []Value) Expr + + Package Feature + Name string +} + +type Param struct { + Name Feature // name of the argument; mostly for documentation + Value Value // Could become Value later, using disjunctions for defaults. +} + +// Kind returns the kind mask of this parameter. +func (p Param) Kind() Kind { + return p.Value.Kind() +} + +// Default reports the default value for this Param or nil if there is none. +func (p Param) Default() Value { + d, ok := p.Value.(*Disjunction) + if !ok || d.NumDefaults != 1 { + return nil + } + return d.Values[0] +} + +func (x *Builtin) WriteName(w io.Writer, c *OpContext) { + _, _ = fmt.Fprintf(w, "%s.%s", x.Package.StringValue(c), x.Name) +} + +// Kind here represents the case where Builtin is used as a Validator. +func (x *Builtin) Kind() Kind { + return FuncKind +} + +func (x *Builtin) BareValidator() *BuiltinValidator { + if len(x.Params) != 1 || + (x.Result != BoolKind && x.Result != BottomKind) { + return nil + } + return &BuiltinValidator{Builtin: x} +} + +// IsValidator reports whether b should be interpreted as a Validator for the +// given number of arguments. +func (b *Builtin) IsValidator(numArgs int) bool { + return numArgs == len(b.Params)-1 && + b.Result&^BoolKind == 0 && + b.Params[numArgs].Default() == nil +} + +func bottom(v Value) *Bottom { + if x, ok := v.(*Vertex); ok { + v = x.Value() + } + b, _ := v.(*Bottom) + return b +} + +func (x *Builtin) call(c *OpContext, p token.Pos, args []Value) Expr { + fun := x // right now always x. + if len(args) > len(x.Params) { + c.addErrf(0, p, + "too many arguments in call to %s (have %d, want %d)", + fun, len(args), len(x.Params)) + return nil + } + for i := len(args); i < len(x.Params); i++ { + v := x.Params[i].Default() + if v == nil { + c.addErrf(0, p, + "not enough arguments in call to %s (have %d, want %d)", + fun, len(args), len(x.Params)) + return nil + } + args = append(args, v) + } + for i, a := range args { + if x.Params[i].Kind() == BottomKind { + continue + } + if b := bottom(a); b != nil { + return b + } + if k := kind(a); x.Params[i].Kind()&k == BottomKind { + code := EvalError + b, _ := args[i].(*Bottom) + if b != nil { + code = b.Code + } + c.addErrf(code, pos(a), + "cannot use %s (type %s) as %s in argument %d to %s", + a, k, x.Params[i].Kind(), i+1, fun) + return nil + } + v := x.Params[i].Value + if _, ok := v.(*BasicType); !ok { + env := c.Env(0) + x := &BinaryExpr{Op: AndOp, X: v, Y: a} + n := &Vertex{Conjuncts: []Conjunct{{env, x, CloseInfo{}}}} + c.Unify(n, Finalized) + if _, ok := n.BaseValue.(*Bottom); ok { + c.addErrf(0, pos(a), + "cannot use %s as %s in argument %d to %s", + a, v, i+1, fun) + } + args[i] = n + } + } + return x.Func(c, args) +} + +func (x *Builtin) Source() ast.Node { return nil } + +// A BuiltinValidator is a Value that results from evaluation a partial call +// to a builtin (using CallExpr). +// +// strings.MinRunes(4) +// +type BuiltinValidator struct { + Src *CallExpr + Builtin *Builtin + Args []Value // any but the first value +} + +func (x *BuiltinValidator) Source() ast.Node { + if x.Src == nil { + return x.Builtin.Source() + } + return x.Src.Source() +} + +func (x *BuiltinValidator) Pos() token.Pos { + if src := x.Source(); src != nil { + return src.Pos() + } + return token.NoPos +} + +func (x *BuiltinValidator) Kind() Kind { + return x.Builtin.Params[0].Kind() +} + +func (x *BuiltinValidator) validate(c *OpContext, v Value) *Bottom { + args := make([]Value, len(x.Args)+1) + args[0] = v + copy(args[1:], x.Args) + + return validateWithBuiltin(c, x.Pos(), x.Builtin, args) +} + +func validateWithBuiltin(c *OpContext, src token.Pos, b *Builtin, args []Value) *Bottom { + var severeness ErrorCode + var err errors.Error + + res := b.call(c, src, args) + switch v := res.(type) { + case nil: + return nil + + case *Bottom: + if v == nil { + return nil // caught elsewhere, but be defensive. + } + severeness = v.Code + err = v.Err + + case *Bool: + if v.B { + return nil + } + + default: + return c.NewErrf("invalid validator %s.%s", b.Package.StringValue(c), b.Name) + } + + // failed: + var buf bytes.Buffer + b.WriteName(&buf, c) + if len(args) > 1 { + buf.WriteString("(") + for i, a := range args[1:] { + if i > 0 { + _, _ = buf.WriteString(", ") + } + buf.WriteString(c.Str(a)) + } + buf.WriteString(")") + } + + vErr := c.NewPosf(src, "invalid value %s (does not satisfy %s)", c.Str(args[0]), buf.String()) + vErr.err = err + + for _, v := range args { + vErr.AddPosition(v) + } + + return &Bottom{Code: severeness, Err: vErr} +} + +// A Disjunction represents a disjunction, where each disjunct may or may not +// be marked as a default. +type DisjunctionExpr struct { + Src *ast.BinaryExpr + Values []Disjunct + + HasDefaults bool +} + +// A Disjunct is used in Disjunction. +type Disjunct struct { + Val Expr + Default bool +} + +func (x *DisjunctionExpr) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *DisjunctionExpr) evaluate(c *OpContext) Value { + e := c.Env(0) + v := &Vertex{Conjuncts: []Conjunct{{e, x, CloseInfo{}}}} + c.Unify(v, Finalized) // TODO: also partial okay? + // TODO: if the disjunction result originated from a literal value, we may + // consider the result closed to create more permanent errors. + return v +} + +// A Conjunction is a conjunction of values that cannot be represented as a +// single value. It is the result of unification. +type Conjunction struct { + Src ast.Expr + Values []Value +} + +func (x *Conjunction) Source() ast.Node { return x.Src } +func (x *Conjunction) Kind() Kind { + k := TopKind + for _, v := range x.Values { + k &= v.Kind() + } + return k +} + +// A disjunction is a disjunction of values. It is the result of expanding +// a DisjunctionExpr if the expression cannot be represented as a single value. +type Disjunction struct { + Src ast.Expr + + // Values are the non-error disjuncts of this expression. The first + // NumDefault values are default values. + Values []*Vertex + + Errors *Bottom // []bottom + + // NumDefaults indicates the number of default values. + NumDefaults int + HasDefaults bool +} + +func (x *Disjunction) Source() ast.Node { return x.Src } +func (x *Disjunction) Kind() Kind { + k := BottomKind + for _, v := range x.Values { + k |= v.Kind() + } + return k +} + +// A ForClause represents a for clause of a comprehension. It can be used +// as a struct or list element. +// +// for k, v in src {} +// +type ForClause struct { + Syntax *ast.ForClause + Key Feature + Value Feature + Src Expr + Dst Yielder +} + +func (x *ForClause) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Syntax +} + +func (x *ForClause) yield(c *OpContext, f YieldFunc) { + n := c.node(x, x.Src, true, AllArcs) + for _, a := range n.Arcs { + if !a.Label.IsRegular() { + continue + } + + c.Unify(a, Partial) + + n := &Vertex{status: Finalized} + + // TODO: only needed if value label != _ + + b := &Vertex{ + Label: x.Value, + BaseValue: a, + } + n.Arcs = append(n.Arcs, b) + + if x.Key != 0 { + v := &Vertex{Label: x.Key} + key := a.Label.ToValue(c) + v.AddConjunct(MakeRootConjunct(c.Env(0), key)) + v.SetValue(c, Finalized, key) + n.Arcs = append(n.Arcs, v) + } + + sub := c.spawn(n) + saved := c.PushState(sub, x.Dst.Source()) + x.Dst.yield(c, f) + if b := c.PopState(saved); b != nil { + c.AddBottom(b) + break + } + if c.HasErr() { + break + } + } +} + +// An IfClause represents an if clause of a comprehension. It can be used +// as a struct or list element. +// +// if cond {} +// +type IfClause struct { + Src *ast.IfClause + Condition Expr + Dst Yielder +} + +func (x *IfClause) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *IfClause) yield(ctx *OpContext, f YieldFunc) { + if ctx.BoolValue(ctx.value(x.Condition)) { + x.Dst.yield(ctx, f) + } +} + +// An LetClause represents a let clause in a comprehension. +// +// for k, v in src {} +// +type LetClause struct { + Src *ast.LetClause + Label Feature + Expr Expr + Dst Yielder +} + +func (x *LetClause) Source() ast.Node { + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *LetClause) yield(c *OpContext, f YieldFunc) { + n := &Vertex{Arcs: []*Vertex{ + {Label: x.Label, Conjuncts: []Conjunct{{c.Env(0), x.Expr, CloseInfo{}}}}, + }} + + sub := c.spawn(n) + saved := c.PushState(sub, x.Dst.Source()) + x.Dst.yield(c, f) + if b := c.PopState(saved); b != nil { + c.AddBottom(b) + } +} + +// A ValueClause represents the value part of a comprehension. +type ValueClause struct { + *StructLit +} + +func (x *ValueClause) Source() ast.Node { + if x.StructLit == nil { + return nil + } + if x.Src == nil { + return nil + } + return x.Src +} + +func (x *ValueClause) yield(op *OpContext, f YieldFunc) { + f(op.Env(0), x.StructLit) +} diff --git a/vendor/cuelang.org/go/internal/core/adt/feature.go b/vendor/cuelang.org/go/internal/core/adt/feature.go new file mode 100644 index 000000000..ce4e76b33 --- /dev/null +++ b/vendor/cuelang.org/go/internal/core/adt/feature.go @@ -0,0 +1,299 @@ +// Copyright 2020 CUE Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package adt + +import ( + "fmt" + "strconv" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/errors" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" + "cuelang.org/go/internal" +) + +// A Feature is an encoded form of a label which comprises a compact +// representation of an integer or string label as well as a label type. +type Feature uint32 + +// TODO: create labels such that list are sorted first (or last with index.) + +// InvalidLabel is an encoding of an erroneous label. +const InvalidLabel Feature = 0x7 // 0xb111 + +// MaxIndex indicates the maximum number of unique strings that are used for +// labeles within this CUE implementation. +const MaxIndex int64 = 1<<28 - 1 + +// A StringIndexer coverts strings to and from an index that is unique for a +// given string. +type StringIndexer interface { + // ToIndex returns a unique positive index for s (0 < index < 2^28-1). + // + // For each pair of strings s and t it must return the same index if and + // only if s == t. + StringToIndex(s string) (index int64) + + // ToString returns a string s for index such that ToIndex(s) == index. + IndexToString(index int64) string +} + +// SelectorString reports the shortest string representation of f when used as a +// selector. +func (f Feature) SelectorString(index StringIndexer) string { + if f == 0 { + return "_" + } + x := f.Index() + switch f.Typ() { + case IntLabel: + return strconv.Itoa(int(x)) + case StringLabel: + s := index.IndexToString(int64(x)) + if ast.IsValidIdent(s) && !internal.IsDefOrHidden(s) { + return s + } + return literal.String.Quote(s) + default: + return f.IdentString(index) + } +} + +// IdentString reports the identifier of f. The result is undefined if f +// is not an identifier label. +func (f Feature) IdentString(index StringIndexer) string { + s := index.IndexToString(int64(f.Index())) + if f.IsHidden() { + if p := strings.IndexByte(s, '\x00'); p >= 0 { + s = s[:p] + } + } + return s +} + +// PkgID returns the package identifier, composed of the module and package +// name, associated with this identifier. It will return "" if this is not +// a hidden label. +func (f Feature) PkgID(index StringIndexer) string { + if !f.IsHidden() { + return "" + } + s := index.IndexToString(int64(f.Index())) + if p := strings.IndexByte(s, '\x00'); p >= 0 { + return s[p+1:] + } + return s +} + +// StringValue reports the string value of f, which must be a string label. +func (f Feature) StringValue(index StringIndexer) string { + if !f.IsString() { + panic("not a string label") + } + x := f.Index() + return index.IndexToString(int64(x)) +} + +// ToValue converts a label to a value, which will be a Num for integer labels +// and a String for string labels. It panics when f is not a regular label. +func (f Feature) ToValue(ctx *OpContext) Value { + if !f.IsRegular() { + panic("not a regular label") + } + if f.IsInt() { + return ctx.NewInt64(int64(f.Index())) + } + x := f.Index() + str := ctx.IndexToString(int64(x)) + return ctx.NewString(str) +} + +// StringLabel converts s to a string label. +func (c *OpContext) StringLabel(s string) Feature { + return labelFromValue(c, nil, &String{Str: s}) +} + +// MakeStringLabel creates a label for the given string. +func MakeStringLabel(r StringIndexer, s string) Feature { + i := r.StringToIndex(s) + + // TODO: set position if it exists. + f, err := MakeLabel(nil, i, StringLabel) + if err != nil { + panic("out of free string slots") + } + return f +} + +// MakeIdentLabel creates a label for the given identifier. +func MakeIdentLabel(r StringIndexer, s, pkgpath string) Feature { + t := StringLabel + switch { + case strings.HasPrefix(s, "_#"): + t = HiddenDefinitionLabel + s = fmt.Sprintf("%s\x00%s", s, pkgpath) + case strings.HasPrefix(s, "#"): + t = DefinitionLabel + case strings.HasPrefix(s, "_"): + s = fmt.Sprintf("%s\x00%s", s, pkgpath) + t = HiddenLabel + } + i := r.StringToIndex(s) + f, err := MakeLabel(nil, i, t) + if err != nil { + panic("out of free string slots") + } + return f +} + +const msgGround = "invalid non-ground value %s (must be concrete %s)" + +func labelFromValue(c *OpContext, src Expr, v Value) Feature { + var i int64 + var t FeatureType + if isError(v) { + return InvalidLabel + } + switch v.Kind() { + case IntKind, NumKind: + x, _ := v.(*Num) + if x == nil { + c.addErrf(IncompleteError, pos(v), msgGround, v, "int") + return InvalidLabel + } + t = IntLabel + var err error + i, err = x.X.Int64() + if err != nil || x.K != IntKind { + if src == nil { + src = v + } + c.AddErrf("invalid index %v: %v", src, err) + return InvalidLabel + } + if i < 0 { + switch src.(type) { + case nil, *Num, *UnaryExpr: + // If the value is a constant, we know it is always an error. + // UnaryExpr is an approximation for a constant value here. + c.AddErrf("invalid index %s (index must be non-negative)", + c.Str(x)) + default: + // Use a different message is it is the result of evaluation. + c.AddErrf("index %s out of range [%s]", c.Str(src), c.Str(x)) + } + return InvalidLabel + } + + case StringKind: + x, _ := v.(*String) + if x == nil { + c.addErrf(IncompleteError, pos(v), msgGround, v, "string") + return InvalidLabel + } + t = StringLabel + i = c.StringToIndex(x.Str) + + default: + if src != nil { + c.AddErrf("invalid index %s (invalid type %v)", + c.Str(src), v.Kind()) + } else { + c.AddErrf("invalid index type %v", v.Kind()) + } + return InvalidLabel + } + + // TODO: set position if it exists. + f, err := MakeLabel(nil, i, t) + if err != nil { + c.AddErr(err) + } + return f +} + +// MakeLabel creates a label. It reports an error if the index is out of range. +func MakeLabel(src ast.Node, index int64, f FeatureType) (Feature, errors.Error) { + if 0 > index || index > MaxIndex { + p := token.NoPos + if src != nil { + p = src.Pos() + } + return InvalidLabel, + errors.Newf(p, "int label out of range (%d not >=0 and <= %d)", + index, MaxIndex) + } + return Feature(index)<