diff --git a/cmd/controller/main.go b/cmd/controller/main.go index d8541a617f..295aeb3f8c 100644 --- a/cmd/controller/main.go +++ b/cmd/controller/main.go @@ -41,5 +41,6 @@ func main() { flag.Parse() ctx := injection.WithNamespaceScope(signals.NewContext(), *namespace) - sharedmain.MainWithContext(ctx, "watcher", taskrun.NewController, pipelinerun.NewController) + sharedmain.MainWithContext(ctx, "watcher", taskrun.NewControllerV1, pipelinerun.NewControllerV1) + sharedmain.MainWithContext(ctx, "watcher", taskrun.NewControllerV1Beta1, pipelinerun.NewControllerV1Beta1) } diff --git a/go.mod b/go.mod index 3c969d41d1..2ba20b6b96 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/tektoncd/chains -go 1.20 +go 1.21 require ( cloud.google.com/go/compute/metadata v0.2.3 @@ -15,6 +15,7 @@ require ( github.com/hashicorp/go-multierror v1.1.1 github.com/in-toto/in-toto-golang v0.9.0 github.com/opencontainers/go-digest v1.0.0 + github.com/opentracing/opentracing-go v1.2.0 github.com/pkg/errors v0.9.1 github.com/secure-systems-lab/go-securesystemslib v0.7.0 github.com/sigstore/cosign/v2 v2.2.1 @@ -335,7 +336,6 @@ require ( github.com/oleiade/reflections v1.0.1 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/opencontainers/image-spec v1.1.0-rc5 // indirect - github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/openzipkin/zipkin-go v0.3.0 // indirect github.com/outcaste-io/ristretto v0.2.3 // indirect github.com/pborman/uuid v1.2.1 // indirect diff --git a/go.sum b/go.sum index 941540dad7..3210233e5a 100644 --- a/go.sum +++ b/go.sum @@ -71,6 +71,7 @@ cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+ cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= cloud.google.com/go/pubsub v1.33.0 h1:6SPCPvWav64tj0sVX/+npCBKhUi/UjJehy9op/V3p2g= +cloud.google.com/go/pubsub v1.33.0/go.mod h1:f+w71I33OMyxf9VpMVcZbnG5KSUkCOUHYpFd5U1GdRc= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= @@ -81,11 +82,13 @@ cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq cloud.google.com/go/storage v1.35.1 h1:B59ahL//eDfx2IIKFBeT5Atm9wnNmj3+8xG/W4WB//w= cloud.google.com/go/storage v1.35.1/go.mod h1:M6M/3V/D3KpzMTJyPOR/HU6n2Si5QdaXYEsng2xgOs8= code.gitea.io/sdk/gitea v0.16.0 h1:gAfssETO1Hv9QbE+/nhWu7EjoFQYKt6kPoyDytQgw00= +code.gitea.io/sdk/gitea v0.16.0/go.mod h1:ndkDk99BnfiUCCYEUhpNzi0lpmApXlwRFqClBlOlEBg= contrib.go.opencensus.io/exporter/ocagent v0.7.1-0.20200907061046-05415f1de66d h1:LblfooH1lKOpp1hIhukktmSAxFkqMPFk9KR6iZ0MJNI= contrib.go.opencensus.io/exporter/ocagent v0.7.1-0.20200907061046-05415f1de66d/go.mod h1:IshRmMJBhDfFj5Y67nVhMYTTIze91RUeT73ipWKs/GY= contrib.go.opencensus.io/exporter/prometheus v0.4.0 h1:0QfIkj9z/iVZgK31D9H9ohjjIDApI2GOPScCKwxedbs= contrib.go.opencensus.io/exporter/prometheus v0.4.0/go.mod h1:o7cosnyfuPVK0tB8q0QmaQNhGnptITnPQB+z1+qeFB0= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= filippo.io/edwards25519 v1.0.0 h1:0wAIcmJUqRdI8IJ/3eGi5/HwXZWPujYXXlkrQogz0Ek= filippo.io/edwards25519 v1.0.0/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= @@ -94,6 +97,7 @@ github.com/4meepo/tagalign v1.3.3/go.mod h1:Q9c1rYMZJc9dPRkbQPpcBNCLEmY2njbAsXhQ github.com/Abirdcfly/dupword v0.0.13 h1:SMS17YXypwP000fA7Lr+kfyBQyW14tTT+nRv9ASwUUo= github.com/Abirdcfly/dupword v0.0.13/go.mod h1:Ut6Ue2KgF/kCOawpW4LnExT+xZLQviJPE4klBPMK/5Y= github.com/AdamKorcz/go-fuzz-headers-1 v0.0.0-20230618160516-e936619f9f18 h1:rd389Q26LMy03gG4anandGFC2LW/xvjga5GezeeaxQk= +github.com/AdamKorcz/go-fuzz-headers-1 v0.0.0-20230618160516-e936619f9f18/go.mod h1:fgJuSBrJP5qZtKqaMJE0hmhS2tmRH+44IkfZvjtaf1M= github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/alibabacloudsdkgo/helper v0.2.0 h1:8+4G8JaejP8Xa6W46PzJEwisNgBXMvFcz78N6zG/ARw= github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/alibabacloudsdkgo/helper v0.2.0/go.mod h1:GgeIE+1be8Ivm7Sh4RgwI42aTtC9qrcj+Y9Y6CjJhJs= github.com/Antonboom/errname v0.1.12 h1:oh9ak2zUtsLp5oaEd/erjB4GPu9w19NyoIskZClDcQY= @@ -156,6 +160,7 @@ github.com/DataDog/go-libddwaf v1.5.0/go.mod h1:Fpnmoc2k53h6desQrH1P0/gR52CUzkLN github.com/DataDog/go-tuf v1.0.2-0.5.2 h1:EeZr937eKAWPxJ26IykAdWA4A0jQXJgkhUjqEI/w7+I= github.com/DataDog/go-tuf v1.0.2-0.5.2/go.mod h1:zBcq6f654iVqmkk8n2Cx81E1JnNTMOAx1UEO/wZR+P0= github.com/DataDog/gostackparse v0.7.0 h1:i7dLkXHvYzHV308hnkvVGDL3BR4FWl7IsXNPz/IGQh4= +github.com/DataDog/gostackparse v0.7.0/go.mod h1:lTfqcJKqS9KnXQGnyQMCugq3u1FP6UZMfWR0aitKFMM= github.com/DataDog/sketches-go v1.4.3 h1:ZB9nijteJRFUQixkQfatCqASartGNfiolIlMiEv3u/w= github.com/DataDog/sketches-go v1.4.3/go.mod h1:XR0ns2RtEEF09mDKXiKZiQg+nfZStrq1ZuL1eezeZe0= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= @@ -169,6 +174,7 @@ github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.11.1 h1:hJ3s7GbWlGK4YVV92sO88BQSyF4ZLVy7/awqOlPxFbA= +github.com/Microsoft/hcsshim v0.11.1/go.mod h1:nFJmaO4Zr5Y7eADdFOpYswDDlNVbvcIJJNJLECr5JQg= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OpenPeeDeeP/depguard/v2 v2.1.0 h1:aQl70G173h/GZYhWf36aE5H0KaujXfVMnn/f1kSDVYY= github.com/OpenPeeDeeP/depguard/v2 v2.1.0/go.mod h1:PUBgk35fX4i7JDmwzlJwJ+GMe6NfO1723wmJMgPThNQ= @@ -181,21 +187,26 @@ github.com/Shopify/sarama v1.38.1 h1:lqqPUPQZ7zPqYlWpTh+LQ9bhYNu2xJL6k1SJN4WVe2A github.com/Shopify/sarama v1.38.1/go.mod h1:iwv9a67Ha8VNa+TifujYoWGxWnu2kNVAQdSdZ4X2o5g= github.com/Shopify/toxiproxy/v2 v2.1.6-0.20210914104332-15ea381dcdae/go.mod h1:/cvHQkZ1fst0EmZnA5dFtiQdWCNCFYzb+uE2vqVgvx0= github.com/Shopify/toxiproxy/v2 v2.5.0 h1:i4LPT+qrSlKNtQf5QliVjdP08GyAH8+BUIc9gT0eahc= +github.com/Shopify/toxiproxy/v2 v2.5.0/go.mod h1:yhM2epWtAmel9CB8r2+L+PCmhH6yH2pITaPAo7jxJl0= github.com/ThalesIgnite/crypto11 v1.2.5 h1:1IiIIEqYmBvUYFeMnHqRft4bwf/O36jryEUpY+9ef8E= github.com/ThalesIgnite/crypto11 v1.2.5/go.mod h1:ILDKtnCKiQ7zRoNxcp36Y1ZR8LBPmR2E23+wTQe/MlE= github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ= +github.com/acomagu/bufpipe v1.0.4/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk= +github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= github.com/alecthomas/go-check-sumtype v0.1.3 h1:M+tqMxB68hcgccRXBMVCPI4UJ+QUfdSx0xdbypKCqA8= github.com/alecthomas/go-check-sumtype v0.1.3/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ= github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= +github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= +github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30= github.com/alexkohler/nakedret/v2 v2.0.2 h1:qnXuZNvv3/AxkAb22q/sEsEpcA99YxLFACDtEw9TPxE= github.com/alexkohler/nakedret/v2 v2.0.2/go.mod h1:2b8Gkk0GsOrqQv/gPWjNLDSKwG8I5moSXG1K4VIBcTQ= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= @@ -246,6 +257,7 @@ github.com/aliyun/credentials-go v1.3.1 h1:uq/0v7kWrxmoLGpqjx7vtQ/s03f0zR//0br/x github.com/aliyun/credentials-go v1.3.1/go.mod h1:8jKYhQuDawt8x2+fusqa1Y6mPxemTsBEN04dgcAcYz0= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df h1:7RFfzj4SSt6nnvCPbCqijJi1nWCd+TqAT3bYCStRC18= github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM= @@ -309,6 +321,7 @@ github.com/blendle/zapdriver v1.3.1/go.mod h1:mdXfREi6u5MArG4j9fewC+FGnXaBR+T4Ox github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= github.com/bluekeyes/go-gitdiff v0.7.1 h1:graP4ElLRshr8ecu0UtqfNTCHrtSyZd3DABQm/DWesQ= +github.com/bluekeyes/go-gitdiff v0.7.1/go.mod h1:QpfYYO1E0fTVHVZAZKiRjtSGY9823iCdvGXBcEzHGbM= github.com/bmatcuk/doublestar/v4 v4.0.2 h1:X0krlUVAVmtr2cRoTqR8aDMrDqnB36ht8wpWTiQ3jsA= github.com/bmatcuk/doublestar/v4 v4.0.2/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bombsimon/wsl/v3 v3.4.0 h1:RkSxjT3tmlptwfgEgTgU+KYKLI35p/tviNXNXiL2aNU= @@ -368,10 +381,13 @@ github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE= +github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4= github.com/common-nighthawk/go-figure v0.0.0-20210622060536-734e95fb86be h1:J5BL2kskAlV9ckgEsNQXscjIaLiOYiZ75d4e94E6dcQ= github.com/common-nighthawk/go-figure v0.0.0-20210622060536-734e95fb86be/go.mod h1:mk5IQ+Y0ZeO87b858TlA645sVcEcbiX6YqP98kt+7+w= github.com/containerd/containerd v1.7.8 h1:RkwgOW3AVUT3H/dyT0W03Dc8AzlpMG65lX48KftOFSM= +github.com/containerd/containerd v1.7.8/go.mod h1:L/Hn9qylJtUFT7cPeM0Sr3fATj+WjHwRQ0lyrYk3OPY= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= github.com/containerd/stargz-snapshotter/estargz v0.14.3 h1:OqlDCK3ZVUO6C3B/5FSkDwbkEETK84kQgEeFwDC+62k= github.com/containerd/stargz-snapshotter/estargz v0.14.3/go.mod h1:KY//uOCIkSuNAHhJogcZtrNHdKrA99/FCCRjE3HD36o= github.com/coreos/go-oidc/v3 v3.7.0 h1:FTdj0uexT4diYIPlF4yoFVI5MRO1r5+SEcIpEw9vC0o= @@ -386,20 +402,24 @@ github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT8 github.com/cyberphone/json-canonicalization v0.0.0-20231011164504-785e29786b46 h1:2Dx4IHfC1yHWI12AxQDJM1QbRCDfk6M+blLzlZCXdrc= github.com/cyberphone/json-canonicalization v0.0.0-20231011164504-785e29786b46/go.mod h1:uzvlm1mxhHkdfqitSA92i7Se+S9ksOn3a3qmv/kyOCw= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/daixiang0/gci v0.11.2 h1:Oji+oPsp3bQ6bNNgX30NBAVT18P4uBH4sRZnlOlTj7Y= github.com/daixiang0/gci v0.11.2/go.mod h1:xtHP9N7AHdNvtRNfcx9gwTDfw7FRJx4bZUsiEfiNNAI= github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= +github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davidmz/go-pageant v1.0.2 h1:bPblRCh5jGU+Uptpz6LgMZGD5hJoOt7otgT454WvHn0= +github.com/davidmz/go-pageant v1.0.2/go.mod h1:P2EDDnMqIwG5Rrp05dTRITj9z2zpGcD9efWSkTNKLIE= github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0= github.com/denis-tingaikin/go-header v0.4.3 h1:tEaZKAlqql6SKCY++utLmkPLd6K8IBM20Ha7UVm+mtU= github.com/denis-tingaikin/go-header v0.4.3/go.mod h1:0wOCWuN71D5qIgE2nz9KrKmuYBAC2Mra5RassOIQ2/c= github.com/depcheck-test/depcheck-test v0.0.0-20220607135614-199033aaa936 h1:foGzavPWwtoyBvjWyKJYDYsyzy+23iBV7NKTwdk+LRY= +github.com/depcheck-test/depcheck-test v0.0.0-20220607135614-199033aaa936/go.mod h1:ttKPnOepYt4LLzD+loXQ1rT6EmpyIYHro7TAJuIIlHo= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/digitorus/pkcs7 v0.0.0-20230713084857-e76b763bdc49/go.mod h1:SKVExuS+vpu2l9IoOc0RwqE7NYnb0JlcFHFnEJkVDzc= @@ -410,6 +430,7 @@ github.com/digitorus/timestamp v0.0.0-20230902153158-687734543647/go.mod h1:GvWn github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= +github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/docker/cli v24.0.7+incompatible h1:wa/nIwYFW7BVTGa7SWPVyyXU9lgORqUb1xfI36MSkFg= github.com/docker/cli v24.0.7+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= @@ -467,6 +488,7 @@ github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8 github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= @@ -481,14 +503,19 @@ github.com/ghostiam/protogetter v0.2.3 h1:qdv2pzo3BpLqezwqfGDLZ+nHEYmc5bUpIdsMbB github.com/ghostiam/protogetter v0.2.3/go.mod h1:KmNLOsy1v04hKbvZs8EfGI1fk39AgTdRDxWNYPfXVc4= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= +github.com/gliderlabs/ssh v0.3.5/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-critic/go-critic v0.9.0 h1:Pmys9qvU3pSML/3GEQ2Xd9RZ/ip+aXHKILuxczKGV/U= github.com/go-critic/go-critic v0.9.0/go.mod h1:5P8tdXL7m/6qnyG6oRAlYLORvoXH0WDypYgAEmagT40= github.com/go-fed/httpsig v1.1.0 h1:9M+hb0jkEICD8/cAiNqEB66R87tTINszBRTjwjQzWcI= +github.com/go-fed/httpsig v1.1.0/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= github.com/go-git/go-git/v5 v5.10.0 h1:F0x3xXrAWmhwtzoCokU4IMPcBdncG+HAAqi9FcOOjbQ= +github.com/go-git/go-git/v5 v5.10.0/go.mod h1:1FOZ/pQnqw24ghP2n7cunVl0ON55BsjPYvhWHvZGhoo= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -551,6 +578,7 @@ github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+ github.com/go-openapi/validate v0.22.1 h1:G+c2ub6q47kfX1sOBLwIQwzBVt8qmOAARyo/9Fqs9NU= github.com/go-openapi/validate v0.22.1/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -558,10 +586,13 @@ github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91 github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24= github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-rod/rod v0.114.4 h1:FpkNFukjCuZLwnoLs+S9aCL95o/EMec6M+41UmvQay8= +github.com/go-rod/rod v0.114.4/go.mod h1:aiedSEFg5DwG/fnNbUOTPMTTWX3MRj6vIs/a684Mthw= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= +github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= @@ -574,6 +605,7 @@ github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlN github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk= +github.com/go-toolsmith/pkgload v1.2.2/go.mod h1:R2hxLNRKuAsiXCo2i5J6ZQPhnPMOVtU+f0arbFPWCus= github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= @@ -610,7 +642,9 @@ github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJA github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/kpoward v0.1.0 h1:UcrLMG9rq7NwrMiUc0h+qUyIlvqPzqLiPb+zQEqH8cE= +github.com/goccy/kpoward v0.1.0/go.mod h1:m13lkcWSvNXtYC9yrXzguwrt/YTDAGioPusndMdQ+eA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= +github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= @@ -719,8 +753,10 @@ github.com/google/go-licenses v1.6.0/go.mod h1:Z8jgz2isEhdenOqd/00pq7I4y4k1xVVQJ github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/go-replayers/grpcreplay v1.1.0 h1:S5+I3zYyZ+GQz68OfbURDdt/+cSMqCK1wrvNx7WBzTE= +github.com/google/go-replayers/grpcreplay v1.1.0/go.mod h1:qzAvJ8/wi57zq7gWqaE6AwLM6miiXUQwP1S+I9icmhk= github.com/google/go-replayers/httpreplay v1.1.1/go.mod h1:gN9GeLIs7l6NUoVaSSnv2RiqK1NiwAmD0MrKeC9IIks= github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= +github.com/google/go-replayers/httpreplay v1.2.0/go.mod h1:WahEFFZZ7a1P4VM1qEeHy+tME4bwyqPcwWbNlUI1Mcg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= @@ -752,12 +788,15 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b h1:RMpPgZTSApbPf7xaVel+QkoGPRLFLrwFO89uDUHEGf0= +github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= +github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= github.com/google/trillian v1.5.3 h1:3ioA5p09qz+U9/t2riklZtaQdZclaStp0/eQNfewNRg= +github.com/google/trillian v1.5.3/go.mod h1:p4tcg7eBr7aT6DxrAoILpc3uXNfcuAvZSnQKonVg+Eo= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -799,6 +838,7 @@ github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3 github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY= +github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= github.com/gowebpki/jcs v1.0.1 h1:Qjzg8EOkrOTuWP7DqQ1FbYtcpEbeTzUoTN9bptp8FOU= github.com/gowebpki/jcs v1.0.1/go.mod h1:CID1cNZ+sHp1CCpAR8mPf6QRtagFBgPJE0FCUQ6+BrI= github.com/grafeas/grafeas v0.2.3 h1:B9Bgc3ZQjPhqXKmro95Dfyb+GlE6D1pMuExT+n66ChE= @@ -814,6 +854,7 @@ github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9n github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= +github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= @@ -846,6 +887,7 @@ github.com/hashicorp/vault/api v1.10.0/go.mod h1:jo5Y/ET+hNyz+JnKDt8XLAdKs+AM0G5 github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/howeyc/gopass v0.0.0-20210920133722-c8aef6fb66ef h1:A9HsByNhogrvm9cWb28sjiS3i7tcKCkflWFEkHfuAgM= +github.com/howeyc/gopass v0.0.0-20210920133722-c8aef6fb66ef/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -880,6 +922,7 @@ github.com/jedisct1/go-minisign v0.0.0-20230811132847-661be99b8267/go.mod h1:h1n github.com/jellydator/ttlcache/v3 v3.1.0 h1:0gPFG0IHHP6xyUyXq+JaD8fwkDCqgqwohXNJBcYE71g= github.com/jellydator/ttlcache/v3 v3.1.0/go.mod h1:hi7MGFdMAwZna5n2tuvh63DvFLzVKySzCVW6+0gA2n4= github.com/jenkins-x/go-scm v1.14.20 h1:9AvGdFh9a87WYDbIQaecnylsIuowQM8nbEfDuYlFJYY= +github.com/jenkins-x/go-scm v1.14.20/go.mod h1:4EvjlLLeR4ilT413ZYXhop4Kxji4dYOJI3dlliKFut8= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jgautheron/goconst v1.6.0 h1:gbMLWKRMkzAc6kYsQL6/TxaoBUg3Jm9LSF/Ih1ADWGA= github.com/jgautheron/goconst v1.6.0/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= @@ -892,6 +935,7 @@ github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHW github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmhodges/clock v1.2.0 h1:eq4kys+NI0PLngzaHEe7AmPT90XMGIEySD1JfV1PDIs= +github.com/jmhodges/clock v1.2.0/go.mod h1:qKjhA7x7u/lQpPB1XAqX1b1lCI/w3/fNuYpI/ZjLynI= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= @@ -934,6 +978,7 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= @@ -1013,6 +1058,7 @@ github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= @@ -1021,7 +1067,9 @@ github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -1067,12 +1115,14 @@ github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4= +github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= github.com/onsi/gomega v1.28.1 h1:MijcGUbfYuznzK/5R4CPNoUP/9Xvuo20sXfEm6XxoTA= +github.com/onsi/gomega v1.28.1/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI= @@ -1084,6 +1134,7 @@ github.com/openzipkin/zipkin-go v0.3.0/go.mod h1:4c3sLeE8xjNqehmF5RpAFLPLJxXscc0 github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= github.com/otiai10/copy v1.6.0/go.mod h1:XWfuS3CrI0R6IE0FbgHsEazaXO8G0LpMp9o8tos0x4E= github.com/otiai10/copy v1.11.0 h1:OKBD80J/mLBrwnzXqGtFCzprFSGioo30JcmR4APsNwc= +github.com/otiai10/copy v1.11.0/go.mod h1:rSaLseMUsZFFbsFGc7wCJnnkTAvdc5L6VWxPE4308Ww= github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= @@ -1103,6 +1154,7 @@ github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -1117,6 +1169,7 @@ github.com/polyfloyd/go-errorlint v1.4.5 h1:70YWmMy4FgRHehGNOUask3HtSFSOLKgmDn7r github.com/polyfloyd/go-errorlint v1.4.5/go.mod h1:sIZEbFoDOCnTYYZoVkjc4hTnM459tuWA9H/EkdXwsKk= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= @@ -1160,6 +1213,7 @@ github.com/rabbitmq/amqp091-go v1.1.0/go.mod h1:ogQDLSOACsLPsIq0NpbtiifNZi2YOz0V github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/richardartoul/molecule v1.0.1-0.20221107223329-32cfee06a052 h1:Qp27Idfgi6ACvFQat5+VJvlYToylpM/hcyLBI3WaKPA= +github.com/richardartoul/molecule v1.0.1-0.20221107223329-32cfee06a052/go.mod h1:uvX/8buq8uVeiZiFht+0lqSLBHF+uGV8BrTv8W/SIwk= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -1168,6 +1222,7 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryancurrah/gomodguard v1.3.0 h1:q15RT/pd6UggBXVBuLps8BXRvl5GPBcwVA7BJHMLuTw= @@ -1190,6 +1245,7 @@ github.com/sashamelentyev/usestdlibvars v1.24.0/go.mod h1:9cYkq+gYJ+a5W2RPdhfaSC github.com/sassoftware/relic v7.2.1+incompatible h1:Pwyh1F3I0r4clFJXkSI8bOyJINGqpgjJU3DYAZeI05A= github.com/sassoftware/relic v7.2.1+incompatible/go.mod h1:CWfAxv73/iLZ17rbyhIEq3K9hs5w6FpNMdUT//qR+zk= github.com/sassoftware/relic/v7 v7.6.1 h1:O5s8ewCgq5QYNpv45dK4u6IpBmDM9RIcsbf/G1uXepQ= +github.com/sassoftware/relic/v7 v7.6.1/go.mod h1:NxwtWxWxlUa9as2qZi635Ye6bBT/tGnMALLq7dSfOOU= github.com/secure-systems-lab/go-securesystemslib v0.7.0 h1:OwvJ5jQf9LnIAS83waAjPbcMsODrTQUpJ02eNLUoxBg= github.com/secure-systems-lab/go-securesystemslib v0.7.0/go.mod h1:/2gYnlnHVQ6xeGtfIqFy7Do03K4cdCY0A/GlJLDKLHI= github.com/securego/gosec/v2 v2.18.2 h1:DkDt3wCiOtAHf1XkiXZBhQ6m6mK/b9T/wD257R3/c+I= @@ -1207,9 +1263,11 @@ github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAx github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= github.com/shurcooL/githubv4 v0.0.0-20190718010115-4ba037080260 h1:xKXiRdBUtMVp64NaxACcyX4kvfmHJ9KrLU+JvyB1mdM= +github.com/shurcooL/githubv4 v0.0.0-20190718010115-4ba037080260/go.mod h1:hAF0iLZy4td2EX+/8Tw+4nodhlMrwN3HupfaXj3zkGo= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk= +github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sigstore/cosign/v2 v2.2.1 h1:HauwPOMYYaVdQsnvUbF0P+ZsVPrkTB0G7Eq65+z1bQc= github.com/sigstore/cosign/v2 v2.2.1/go.mod h1:4l1hELKWoFYzZ/p7+umrK6dhdBoBW0JbQRCIjOZIM9g= @@ -1245,9 +1303,11 @@ github.com/sivchari/nosnakecase v1.7.0/go.mod h1:CwDzrzPea40/GB6uynrNLiorAlgFRvR github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak= github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= github.com/skeema/knownhosts v1.2.0 h1:h9r9cf0+u7wSE+M183ZtMGgOJKiL96brpaz5ekfJCpM= +github.com/skeema/knownhosts v1.2.0/go.mod h1:g4fPeYpque7P0xefxtGzV81ihjC8sX2IqpAoNkjxbMo= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262 h1:unQFBIznI+VYD1/1fApl1A+9VcBk+9dcqGfnePY87LY= +github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262/go.mod h1:MyOHs9Po2fbM1LHej6sBUT8ozbxmMOFG+E+rx/GSGuc= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= @@ -1259,6 +1319,7 @@ github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCp github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= @@ -1275,6 +1336,7 @@ github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0 github.com/spiffe/go-spiffe/v2 v2.1.6 h1:4SdizuQieFyL9eNU+SPiCArH4kynzaKOOj0VvM8R7Xo= github.com/spiffe/go-spiffe/v2 v2.1.6/go.mod h1:eVDqm9xFvyqao6C+eQensb9ZPkyNEeaUbqbBpOhBnNk= github.com/spiffe/spire-api-sdk v1.8.4 h1:pVQk6PerN1Cp44dgiXamoCb5/iDD0Zffe8MMdcJnMio= +github.com/spiffe/spire-api-sdk v1.8.4/go.mod h1:4uuhFlN6KBWjACRP3xXwrOTNnvaLp1zJs8Lribtr4fI= github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4= github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI= github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= @@ -1351,6 +1413,7 @@ github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/X github.com/uudashr/gocognit v1.1.2 h1:l6BAEKJqQH2UpKAPKdMfZf5kE4W/2xk8pfU1OVLvniI= github.com/uudashr/gocognit v1.1.2/go.mod h1:aAVdLURqcanke8h3vg35BC++eseDm66Z7KmchI5et4k= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/vbatts/tar-split v0.11.5 h1:3bHCTIheBm1qFTcgh9oPu+nNBtX+XJIupG/vacinCts= github.com/vbatts/tar-split v0.11.5/go.mod h1:yZbwRsSeGjusneWgA781EKej9HF8vme8okylkAeNKLk= github.com/xanzy/go-gitlab v0.93.2 h1:kNNf3BYNYn/Zkig0B89fma12l36VLcYSGu7OnaRlRDg= @@ -1380,10 +1443,15 @@ github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7Jul github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= github.com/ysmood/fetchup v0.2.3 h1:ulX+SonA0Vma5zUFXtv52Kzip/xe7aj4vqT5AJwQ+ZQ= +github.com/ysmood/fetchup v0.2.3/go.mod h1:xhibcRKziSvol0H1/pj33dnKrYyI2ebIvz5cOOkYGns= github.com/ysmood/goob v0.4.0 h1:HsxXhyLBeGzWXnqVKtmT9qM7EuVs/XOgkX7T6r1o1AQ= +github.com/ysmood/goob v0.4.0/go.mod h1:u6yx7ZhS4Exf2MwciFr6nIM8knHQIE22lFpWHnfql18= github.com/ysmood/got v0.34.1 h1:IrV2uWLs45VXNvZqhJ6g2nIhY+pgIG1CUoOcqfXFl1s= +github.com/ysmood/got v0.34.1/go.mod h1:yddyjq/PmAf08RMLSwDjPyCvHvYed+WjHnQxpH851LM= github.com/ysmood/gson v0.7.3 h1:QFkWbTH8MxyUTKPkVWAENJhxqdBa4lYTQWqZCiLG6kE= +github.com/ysmood/gson v0.7.3/go.mod h1:3Kzs5zDl21g5F/BlLTNcuAGAYLKt2lV5G8D1zF3RNmg= github.com/ysmood/leakless v0.8.0 h1:BzLrVoiwxikpgEQR0Lk8NyBN5Cit2b1z+u0mgL4ZJak= +github.com/ysmood/leakless v0.8.0/go.mod h1:R8iAXPRaG97QJwqxs74RdwzcRHT1SWCGTNqY8q0JvMQ= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -1393,11 +1461,13 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zalando/go-keyring v0.2.2 h1:f0xmpYiSrHtSNAVgwip93Cg8tuF45HJM6rHq/A5RI/4= +github.com/zalando/go-keyring v0.2.2/go.mod h1:sI3evg9Wvpw3+n4SqplGSJUMwtDeROfD4nsFz4z9PG0= github.com/zeebo/errs v1.3.0 h1:hmiaKqgYZzcVgRL1Vkc1Mn2914BbzB0IBxs+ebeutGs= github.com/zeebo/errs v1.3.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= gitlab.com/bosi/decorder v0.4.1 h1:VdsdfxhstabyhZovHafFw+9eJ6eU0d2CkFNJcZz/NU4= gitlab.com/bosi/decorder v0.4.1/go.mod h1:jecSqWUew6Yle1pCr2eLWTensJMmsxHsBwt+PVbkAqA= go-simpler.org/assert v0.6.0 h1:QxSrXa4oRuo/1eHMXSBFHKvJIpWABayzKldqZyugG7E= +go-simpler.org/assert v0.6.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= go-simpler.org/sloglint v0.1.2 h1:IjdhF8NPxyn0Ckn2+fuIof7ntSnVUAqBFcQRrnG9AiM= go-simpler.org/sloglint v0.1.2/go.mod h1:2LL+QImPfTslD5muNPydAEYmpXIj6o/WYcqnJjLi4o4= go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= @@ -1417,9 +1487,11 @@ go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= +go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= +go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= @@ -1434,6 +1506,7 @@ go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0 go.uber.org/automaxprocs v1.5.2 h1:2LxUOGiR3O6tw8ui5sZa2LAaHnsviZdVOUZw4fvbnME= go.uber.org/automaxprocs v1.5.2/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= @@ -2120,6 +2193,7 @@ gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -2174,3 +2248,4 @@ sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= software.sslmate.com/src/go-pkcs12 v0.2.0 h1:nlFkj7bTysH6VkC4fGphtjXRbezREPgrHuJG20hBGPE= +software.sslmate.com/src/go-pkcs12 v0.2.0/go.mod h1:23rNcYsMabIc1otwLpTkCCPwUq6kQsTyowttG/as0kQ= diff --git a/pkg/artifacts/signable.go b/pkg/artifacts/signable.go index 1ac9492f99..ccf878f56a 100644 --- a/pkg/artifacts/signable.go +++ b/pkg/artifacts/signable.go @@ -17,6 +17,7 @@ import ( "context" _ "crypto/sha256" // Recommended by go-digest. _ "crypto/sha512" // Recommended by go-digest. + "encoding/json" "fmt" "regexp" "strings" @@ -24,9 +25,11 @@ import ( "github.com/google/go-containerregistry/pkg/name" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" "github.com/opencontainers/go-digest" + "github.com/opentracing/opentracing-go/log" "github.com/tektoncd/chains/internal/backport" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "k8s.io/apimachinery/pkg/util/sets" "knative.dev/pkg/logging" @@ -65,12 +68,12 @@ type TaskRunArtifact struct{} var _ Signable = &TaskRunArtifact{} func (ta *TaskRunArtifact) ShortKey(obj interface{}) string { - tro := obj.(*objects.TaskRunObject) + tro := obj.(*objects.TaskRunObjectV1) return "taskrun-" + string(tro.UID) } func (ta *TaskRunArtifact) FullKey(obj interface{}) string { - tro := obj.(*objects.TaskRunObject) + tro := obj.(*objects.TaskRunObjectV1) gvk := tro.GetGroupVersionKind() return fmt.Sprintf("%s-%s-%s-%s", gvk.Group, gvk.Version, gvk.Kind, tro.UID) } @@ -104,12 +107,12 @@ type PipelineRunArtifact struct{} var _ Signable = &PipelineRunArtifact{} func (pa *PipelineRunArtifact) ShortKey(obj interface{}) string { - pro := obj.(*objects.PipelineRunObject) + pro := obj.(*objects.PipelineRunObjectV1) return "pipelinerun-" + string(pro.UID) } func (pa *PipelineRunArtifact) FullKey(obj interface{}) string { - pro := obj.(*objects.PipelineRunObject) + pro := obj.(*objects.PipelineRunObjectV1) gvk := pro.GetGroupVersionKind() return fmt.Sprintf("%s-%s-%s-%s", gvk.Group, gvk.Version, gvk.Kind, pro.UID) } @@ -149,40 +152,50 @@ type image struct { } func (oa *OCIArtifact) ExtractObjects(ctx context.Context, obj objects.TektonObject) []interface{} { - log := logging.FromContext(ctx) objs := []interface{}{} + if trV1, ok := obj.GetObject().(*v1.TaskRun); ok { + serializedResources := trV1.Annotations["tekton.dev/v1beta1-status-taskrunstatusfields-taskspec-resources"] + var resources v1beta1.TaskResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true - // TODO: Not applicable to PipelineRuns, should look into a better way to separate this out - if tr, ok := obj.GetObject().(*v1beta1.TaskRun); ok { - imageResourceNames := map[string]*image{} - if tr.Status.TaskSpec != nil && tr.Status.TaskSpec.Resources != nil { - for _, output := range tr.Status.TaskSpec.Resources.Outputs { - if output.Type == backport.PipelineResourceTypeImage { - imageResourceNames[output.Name] = &image{} + } + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, trV1); err == nil { + if shouldReplace { + trV1Beta1.Status.TaskSpec.Resources = &resources //nolint:staticcheck + } + imageResourceNames := map[string]*image{} + if trV1Beta1.Status.TaskSpec != nil && trV1Beta1.Status.TaskSpec.Resources != nil { //nolint:staticcheck + for _, output := range trV1Beta1.Status.TaskSpec.Resources.Outputs { //nolint:staticcheck + if output.Type == backport.PipelineResourceTypeImage { + imageResourceNames[output.Name] = &image{} + } } } - } - for _, rr := range tr.Status.ResourcesResult { - img, ok := imageResourceNames[rr.ResourceName] - if !ok { - continue - } - // We have a result for an image! - if rr.Key == "url" { - img.url = rr.Value - } else if rr.Key == "digest" { - img.digest = rr.Value + for _, rr := range trV1Beta1.Status.ResourcesResult { + img, ok := imageResourceNames[rr.ResourceName] + if !ok { + continue + } + // We have a result for an image! + if rr.Key == "url" { + img.url = rr.Value + } else if rr.Key == "digest" { + img.digest = rr.Value + } } - } - for _, image := range imageResourceNames { - dgst, err := name.NewDigest(fmt.Sprintf("%s@%s", image.url, image.digest)) - if err != nil { - log.Error(err) - continue + for _, image := range imageResourceNames { + dgst, err := name.NewDigest(fmt.Sprintf("%s@%s", image.url, image.digest)) + if err != nil { + log.Error(err) + continue + } + objs = append(objs, dgst) } - objs = append(objs, dgst) } } @@ -208,16 +221,15 @@ func ExtractOCIImagesFromResults(ctx context.Context, obj objects.TektonObject) logger.Errorf("error getting digest: %v", err) continue } - objs = append(objs, dgst) } // look for a comma separated list of images for _, key := range obj.GetResults() { - if key.Name != "IMAGES" { + if key.GetName() != "IMAGES" { continue } - imgs := strings.FieldsFunc(key.Value.StringVal, split) + imgs := strings.FieldsFunc(key.GetStringValue(), split) for _, img := range imgs { trimmed := strings.TrimSpace(img) @@ -291,43 +303,36 @@ func ExtractStructuredTargetFromResults(ctx context.Context, obj objects.TektonO } // TODO(#592): support structured results using Run - results := []objects.Result{} for _, res := range obj.GetResults() { - results = append(results, objects.Result{ - Name: res.Name, - Value: res.Value, - }) - } - for _, res := range results { - if strings.HasSuffix(res.Name, categoryMarker) { + if strings.HasSuffix(res.GetName(), categoryMarker) { valid, err := isStructuredResult(res, categoryMarker) if err != nil { logger.Debugf("ExtractStructuredTargetFromResults: %v", err) } if valid { - logger.Debugf("Extracted Structured data from Result %s, %s", res.Value.ObjectVal["uri"], res.Value.ObjectVal["digest"]) - objs = append(objs, &StructuredSignable{URI: res.Value.ObjectVal["uri"], Digest: res.Value.ObjectVal["digest"]}) + logger.Debugf("Extracted Structured data from Result %v", res) + objs = append(objs, &StructuredSignable{URI: res.GetObjectValue("uri"), Digest: res.GetObjectValue("digest")}) } } } return objs } -func isStructuredResult(res objects.Result, categoryMarker string) (bool, error) { - if !strings.HasSuffix(res.Name, categoryMarker) { +func isStructuredResult(res objects.GenericResult, categoryMarker string) (bool, error) { + if !strings.HasSuffix(res.GetName(), categoryMarker) { return false, nil } - if res.Value.ObjectVal == nil { - return false, fmt.Errorf("%s should be an object: %v", res.Name, res.Value.ObjectVal) + if res.ObjectValueIsNil() { + return false, fmt.Errorf("%s should be an object: %v", res.GetName(), res) } - if res.Value.ObjectVal["uri"] == "" { - return false, fmt.Errorf("%s should have uri field: %v", res.Name, res.Value.ObjectVal) + if res.GetObjectValue("uri") == "" { + return false, fmt.Errorf("%s should have uri field: %v", res.GetName(), res) } - if res.Value.ObjectVal["digest"] == "" { - return false, fmt.Errorf("%s should have digest field: %v", res.Name, res.Value.ObjectVal) + if res.GetObjectValue("digest") == "" { + return false, fmt.Errorf("%s should have digest field: %v", res.GetName(), res) } - if _, _, err := ParseDigest(res.Value.ObjectVal["digest"]); err != nil { - return false, fmt.Errorf("error getting digest %s: %v", res.Value.ObjectVal["digest"], err) + if _, _, err := ParseDigest(res.GetObjectValue("digest")); err != nil { + return false, fmt.Errorf("error getting digest %s: %v", res.GetObjectValue("digest"), err) } return true, nil } diff --git a/pkg/artifacts/signable_test.go b/pkg/artifacts/signable_test.go index b3181020c3..884adc4107 100644 --- a/pkg/artifacts/signable_test.go +++ b/pkg/artifacts/signable_test.go @@ -14,6 +14,7 @@ limitations under the License. package artifacts import ( + "encoding/json" "fmt" "sort" "testing" @@ -23,6 +24,7 @@ import ( "github.com/google/go-containerregistry/pkg/name" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" @@ -52,7 +54,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }{ { name: "one image", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -71,10 +73,10 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, TaskSpec: &v1beta1.TaskSpec{ - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image", Type: "image", }, @@ -89,7 +91,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, { name: "two images", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -118,16 +120,16 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, TaskSpec: &v1beta1.TaskSpec{ - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image1", Type: "image", }, }, { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image2", Type: "image", }, @@ -145,7 +147,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, { name: "resource and result", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -182,10 +184,10 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { Name: "IMAGE_DIGEST", }, }, - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image", Type: "image", }, @@ -202,7 +204,7 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, { name: "extra", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck TypeMeta: metav1.TypeMeta{ Kind: "TaskRun", }, @@ -241,10 +243,10 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, TaskSpec: &v1beta1.TaskSpec{ - Resources: &v1beta1.TaskResources{ - Outputs: []v1beta1.TaskResource{ + Resources: &v1beta1.TaskResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResource{ //nolint:staticcheck { - ResourceDeclaration: v1beta1.ResourceDeclaration{ + ResourceDeclaration: v1beta1.ResourceDeclaration{ //nolint:staticcheck Name: "my-image", Type: "image", }, @@ -256,15 +258,16 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }), want: []interface{}{createDigest(t, "gcr.io/foo/bar@sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, - }, { + }, + { name: "images", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "IMAGES", - Value: *v1beta1.NewStructuredValues(fmt.Sprintf(" \n \tgcr.io/foo/bar@%s\n,gcr.io/baz/bar@%s", digest1, digest2)), + Value: *v1.NewStructuredValues(fmt.Sprintf(" \n \tgcr.io/foo/bar@%s\n,gcr.io/baz/bar@%s", digest1, digest2)), }, }, }, @@ -276,13 +279,13 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { }, }, { name: "images-newline", - obj: objects.NewTaskRunObject(&v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "IMAGES", - Value: *v1beta1.NewStructuredValues(fmt.Sprintf("gcr.io/foo/bar@%s\ngcr.io/baz/bar@%s\n\n", digest1, digest2)), + Value: *v1.NewStructuredValues(fmt.Sprintf("gcr.io/foo/bar@%s\ngcr.io/baz/bar@%s\n\n", digest1, digest2)), }, }, }, @@ -298,6 +301,19 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { t.Run(tt.name, func(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) oa := &OCIArtifact{} + if trV1Beta1, ok := tt.obj.GetObject().(*v1beta1.TaskRun); ok { //nolint:staticcheck + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Status.TaskRunStatusFields.TaskSpec != nil && trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-status-taskrunstatusfields-taskspec-resources"] = string(jsonData) + } + tt.obj = objects.NewTaskRunObjectV1(trV1) + } + } got := oa.ExtractObjects(ctx, tt.obj) sort.Slice(got, func(i, j int) bool { a := got[i].(name.Digest) @@ -312,25 +328,25 @@ func TestOCIArtifact_ExtractObjects(t *testing.T) { } func TestExtractOCIImagesFromResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "img1_IMAGE_URL", Value: *v1beta1.NewStructuredValues("img1")}, - {Name: "img1_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "img2_IMAGE_URL", Value: *v1beta1.NewStructuredValues("img2")}, - {Name: "img2_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues(digest2)}, - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues("img3")}, - {Name: "IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "img4_IMAGE_URL", Value: *v1beta1.NewStructuredValues("img4")}, - {Name: "img5_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("sha123:abc")}, - {Name: "empty_str_IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("")}, - {Name: "empty_str_IMAGE_URL", Value: *v1beta1.NewStructuredValues("")}, + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "img1_IMAGE_URL", Value: *v1.NewStructuredValues("img1")}, + {Name: "img1_IMAGE_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "img2_IMAGE_URL", Value: *v1.NewStructuredValues("img2")}, + {Name: "img2_IMAGE_DIGEST", Value: *v1.NewStructuredValues(digest2)}, + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues("img3")}, + {Name: "IMAGE_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "img4_IMAGE_URL", Value: *v1.NewStructuredValues("img4")}, + {Name: "img5_IMAGE_DIGEST", Value: *v1.NewStructuredValues("sha123:abc")}, + {Name: "empty_str_IMAGE_DIGEST", Value: *v1.NewStructuredValues("")}, + {Name: "empty_str_IMAGE_URL", Value: *v1.NewStructuredValues("")}, }, }, }, } - obj := objects.NewTaskRunObject(tr) + obj := objects.NewTaskRunObjectV1(tr) want := []interface{}{ createDigest(t, fmt.Sprintf("img1@%s", digest1)), createDigest(t, fmt.Sprintf("img2@%s", digest2)), @@ -349,23 +365,23 @@ func TestExtractOCIImagesFromResults(t *testing.T) { } func TestExtractSignableTargetFromResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "mvn1_pom_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("com.google.guava:guava:31.0-jre.pom")}, - {Name: "mvn1_pom_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest2)}, - {Name: "mvn1_src_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("com.google.guava:guava:31.0-jre-sources.jar")}, - {Name: "mvn1_src_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest3)}, - {Name: "mvn2_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/a.b.c:d:1.0-jre")}, - {Name: "mvn2_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest4)}, - {Name: "ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/empty_prefix")}, - {Name: "ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "miss_target_name_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues(digest1)}, - {Name: "wrong_digest_format_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/wrong_digest_format")}, - {Name: "wrong_digest_format_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("abc")}, + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "mvn1_pom_ARTIFACT_URI", Value: *v1.NewStructuredValues("com.google.guava:guava:31.0-jre.pom")}, + {Name: "mvn1_pom_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest2)}, + {Name: "mvn1_src_ARTIFACT_URI", Value: *v1.NewStructuredValues("com.google.guava:guava:31.0-jre-sources.jar")}, + {Name: "mvn1_src_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest3)}, + {Name: "mvn2_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/a.b.c:d:1.0-jre")}, + {Name: "mvn2_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest4)}, + {Name: "ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/empty_prefix")}, + {Name: "ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "miss_target_name_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues(digest1)}, + {Name: "wrong_digest_format_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/wrong_digest_format")}, + {Name: "wrong_digest_format_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("abc")}, }, }, }, @@ -378,7 +394,7 @@ func TestExtractSignableTargetFromResults(t *testing.T) { {URI: "projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/empty_prefix", Digest: digest1}, } ctx := logtesting.TestContextWithLogger(t) - got := ExtractSignableTargetFromResults(ctx, objects.NewTaskRunObject(tr)) + got := ExtractSignableTargetFromResults(ctx, objects.NewTaskRunObjectV1(tr)) sort.Slice(got, func(i, j int) bool { return got[i].URI < got[j].URI }) @@ -391,13 +407,13 @@ func TestExtractSignableTargetFromResults(t *testing.T) { } func TestExtractStructuredTargetFromResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "mvn1_pkg" + "_" + ArtifactsOutputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", "digest": digest1, "signable_type": "", @@ -405,7 +421,7 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { }, { Name: "mvn1_pom_sha512" + "_" + ArtifactsOutputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "com.google.guava:guava:31.0-jre.pom", "digest": digest2, "signable_type": "", @@ -413,56 +429,56 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { }, { Name: "img1_input" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest3, }), }, { Name: "img2_input_sha1" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_sha1, }), }, { Name: "img2_input_incorrect_sha1" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_incorrect_sha1, }), }, { Name: "img3_input_sha384" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_sha384, }), }, { Name: "img3_input_incorrect_sha384" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_incorrect_sha384, }), }, { Name: "img4_input_sha512" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_sha512, }), }, { Name: "img4_input_incorrect_sha512" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest_incorrect_sha512, }), }, { Name: "img2_input_no_digest" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/foo", "digest": "", }), @@ -479,7 +495,7 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { {URI: "gcr.io/foo/bar", Digest: digest_sha512}, } ctx := logtesting.TestContextWithLogger(t) - gotInputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObject(tr), ArtifactsInputsResultName) + gotInputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObjectV1(tr), ArtifactsInputsResultName) if diff := cmp.Diff(gotInputs, wantInputs, cmpopts.SortSlices(func(x, y *StructuredSignable) bool { return x.Digest < y.Digest })); diff != "" { t.Errorf("Inputs are not as expected: %v", diff) } @@ -488,7 +504,7 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { {URI: "projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", Digest: digest1}, {URI: "com.google.guava:guava:31.0-jre.pom", Digest: digest2}, } - gotOutputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObject(tr), ArtifactsOutputsResultName) + gotOutputs := ExtractStructuredTargetFromResults(ctx, objects.NewTaskRunObjectV1(tr), ArtifactsOutputsResultName) opts := append(ignore, cmpopts.SortSlices(func(x, y *StructuredSignable) bool { return x.Digest < y.Digest })) if diff := cmp.Diff(gotOutputs, wantOutputs, opts...); diff != "" { t.Error(diff) @@ -496,27 +512,27 @@ func TestExtractStructuredTargetFromResults(t *testing.T) { } func TestRetrieveMaterialsFromStructuredResults(t *testing.T) { - tr := &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "img1_input" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": OCIScheme + "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, { Name: "img2_input_no_digest" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": OCIScheme + "gcr.io/foo/foo", "digest": "", }), }, { Name: "img2_input_invalid_digest" + "_" + ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": OCIScheme + "gcr.io/foo/foo", "digest": "sha:123", }), @@ -532,7 +548,7 @@ func TestRetrieveMaterialsFromStructuredResults(t *testing.T) { }, } ctx := logtesting.TestContextWithLogger(t) - gotMaterials := RetrieveMaterialsFromStructuredResults(ctx, objects.NewTaskRunObject(tr), ArtifactsInputsResultName) + gotMaterials := RetrieveMaterialsFromStructuredResults(ctx, objects.NewTaskRunObjectV1(tr), ArtifactsInputsResultName) if diff := cmp.Diff(gotMaterials, wantMaterials, ignore...); diff != "" { t.Fatalf("Materials not the same %s", diff) @@ -542,7 +558,7 @@ func TestRetrieveMaterialsFromStructuredResults(t *testing.T) { func TestValidateResults(t *testing.T) { tests := []struct { name string - obj objects.Result + obj objects.ResultV1 categoryMarker string wantResult bool wantErr error @@ -550,9 +566,9 @@ func TestValidateResults(t *testing.T) { { name: "valid result", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "valid_result-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest3, @@ -565,37 +581,37 @@ func TestValidateResults(t *testing.T) { { name: "invalid result without digest field", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", }, }, }, wantResult: false, - wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: map[uri:gcr.io/foo/bar]"), + wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: {missing_digest-ARTIFACT_OUTPUTS { [] map[uri:gcr.io/foo/bar]}}"), }, { name: "invalid result without uri field", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "digest": digest3, }, }, }, wantResult: false, - wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have uri field: map[digest:sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7]"), + wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have uri field: {missing_digest-ARTIFACT_OUTPUTS { [] map[digest:sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7]}}"), }, { name: "invalid result wrong digest format", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACT_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", "digest": "", @@ -603,14 +619,14 @@ func TestValidateResults(t *testing.T) { }, }, wantResult: false, - wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: map[digest: uri:gcr.io/foo/bar]"), + wantErr: fmt.Errorf("missing_digest-ARTIFACT_OUTPUTS should have digest field: {missing_digest-ARTIFACT_OUTPUTS { [] map[digest: uri:gcr.io/foo/bar]}}"), }, { name: "invalid result wrong type hinting", categoryMarker: ArtifactsOutputsResultName, - obj: objects.Result{ + obj: objects.ResultV1{ Name: "missing_digest-ARTIFACTs_OUTPUTS", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ ObjectVal: map[string]string{ "uri": "gcr.io/foo/bar", "digest": digest3, diff --git a/pkg/artifacts/structured.go b/pkg/artifacts/structured.go index 4e9efb1a9b..fa22855aa1 100644 --- a/pkg/artifacts/structured.go +++ b/pkg/artifacts/structured.go @@ -56,15 +56,15 @@ func (b *structuredSignableExtractor) extract(ctx context.Context, obj objects.T if suffix == "" { continue } - if !strings.HasSuffix(res.Name, suffix) { + if !strings.HasSuffix(res.GetName(), suffix) { continue } - value := strings.TrimSpace(res.Value.StringVal) + value := strings.TrimSpace(res.GetStringValue()) if value == "" { - logger.Debugf("error getting string value for %s", res.Name) + logger.Debugf("error getting string value for %s", res.GetName()) continue } - marker := strings.TrimSuffix(res.Name, suffix) + marker := strings.TrimSuffix(res.GetName(), suffix) if _, ok := partials[marker]; !ok { partials[marker] = StructuredSignable{} } diff --git a/pkg/chains/annotations_test.go b/pkg/chains/annotations_test.go index d8dd0ed717..e697b5752e 100644 --- a/pkg/chains/annotations_test.go +++ b/pkg/chains/annotations_test.go @@ -18,7 +18,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" rtesting "knative.dev/pkg/reconciler/testing" @@ -78,7 +78,7 @@ func TestReconciled(t *testing.T) { c := fakepipelineclient.Get(ctx) // Test TaskRun - taskRun := objects.NewTaskRunObject(&v1beta1.TaskRun{ + taskRun := objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.annotation, @@ -87,7 +87,7 @@ func TestReconciled(t *testing.T) { }) tekton.CreateObject(t, ctx, c, taskRun) - cachedTaskRun := objects.NewTaskRunObject(&v1beta1.TaskRun{ + cachedTaskRun := objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.latestAnnotation, @@ -101,7 +101,7 @@ func TestReconciled(t *testing.T) { } // Test PipelineRun - pipelineRun := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + pipelineRun := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.annotation, @@ -110,7 +110,7 @@ func TestReconciled(t *testing.T) { }) tekton.CreateObject(t, ctx, c, pipelineRun) - cachedPipelineRun := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + cachedPipelineRun := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ ChainsAnnotation: tt.latestAnnotation, @@ -133,12 +133,12 @@ func TestMarkSigned(t *testing.T) { }{ { name: "mark taskrun", - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", }, - Spec: v1beta1.TaskRunSpec{ - TaskRef: &v1beta1.TaskRef{ + Spec: v1.TaskRunSpec{ + TaskRef: &v1.TaskRef{ Name: "foo", }, }, @@ -146,12 +146,12 @@ func TestMarkSigned(t *testing.T) { }, { name: "mark pipelinerun", - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-pipelinerun", }, - Spec: v1beta1.PipelineRunSpec{ - PipelineRef: &v1beta1.PipelineRef{ + Spec: v1.PipelineRunSpec{ + PipelineRef: &v1.PipelineRef{ Name: "foo", }, }, @@ -212,13 +212,13 @@ func TestMarkFailed(t *testing.T) { }{ { name: "mark taskrun failed", - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Annotations: map[string]string{RetryAnnotation: "3"}, }, - Spec: v1beta1.TaskRunSpec{ - TaskRef: &v1beta1.TaskRef{ + Spec: v1.TaskRunSpec{ + TaskRef: &v1.TaskRef{ Name: "foo", }, }, @@ -226,13 +226,13 @@ func TestMarkFailed(t *testing.T) { }, { name: "mark pipelinerun failed", - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "my-pipelinerun", Annotations: map[string]string{RetryAnnotation: "3"}, }, - Spec: v1beta1.PipelineRunSpec{ - PipelineRef: &v1beta1.PipelineRef{ + Spec: v1.PipelineRunSpec{ + PipelineRef: &v1.PipelineRef{ Name: "foo", }, }, @@ -294,23 +294,23 @@ func TestRetryAvailble(t *testing.T) { for _, test := range tests { t.Run(test.description, func(t *testing.T) { // test taskrun - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: test.annotations, }, } - trObj := objects.NewTaskRunObject(tr) + trObj := objects.NewTaskRunObjectV1(tr) got := RetryAvailable(trObj) if got != test.expected { t.Fatalf("RetryAvailble() got %v expected %v", got, test.expected) } // test pipelinerun - pr := &v1beta1.PipelineRun{ + pr := &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: test.annotations, }, } - prObj := objects.NewPipelineRunObject(pr) + prObj := objects.NewPipelineRunObjectV1(pr) got = RetryAvailable(prObj) if got != test.expected { t.Fatalf("RetryAvailble() got %v expected %v", got, test.expected) @@ -326,13 +326,13 @@ func TestAddRetry(t *testing.T) { }{ { name: "add retry to taskrun", - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{Name: "mytaskrun"}, }), }, { name: "add retry to pipelinerun", - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{Name: "mypipelinerun"}, }), }, diff --git a/pkg/chains/formats/slsa/attest/attest.go b/pkg/chains/formats/slsa/attest/attest.go index e0bc380d38..6e607c9043 100644 --- a/pkg/chains/formats/slsa/attest/attest.go +++ b/pkg/chains/formats/slsa/attest/attest.go @@ -23,7 +23,7 @@ import ( slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/tektoncd/chains/pkg/artifacts" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" corev1 "k8s.io/api/core/v1" ) @@ -40,7 +40,7 @@ type StepAttestation struct { Annotations map[string]string `json:"annotations"` } -func Step(step *v1beta1.Step, stepState *v1beta1.StepState) StepAttestation { +func Step(step *v1.Step, stepState *v1.StepState) StepAttestation { attestation := StepAttestation{} entrypoint := strings.Join(step.Command, " ") @@ -58,16 +58,20 @@ func Step(step *v1beta1.Step, stepState *v1beta1.StepState) StepAttestation { return attestation } -func Invocation(obj objects.TektonObject, params []v1beta1.Param, paramSpecs []v1beta1.ParamSpec) slsa.ProvenanceInvocation { - var source *v1beta1.RefSource - if p := obj.GetProvenance(); p != nil { - source = p.RefSource - } +func Invocation(obj objects.TektonObject, params []v1.Param, paramSpecs []v1.ParamSpec) slsa.ProvenanceInvocation { i := slsa.ProvenanceInvocation{ - ConfigSource: convertConfigSource(source), + ConfigSource: slsa.ConfigSource{}, + } + p := obj.GetProvenance() + if !p.IsNil() && !p.RefSourceIsNil() { + i.ConfigSource = slsa.ConfigSource{ + URI: p.GetRefSourceURI(), + Digest: p.GetRefSourceDigest(), + EntryPoint: p.GetRefSourceEntrypoint(), + } } - iParams := make(map[string]v1beta1.ParamValue) + iParams := make(map[string]v1.ParamValue) // get implicit parameters from defaults for _, p := range paramSpecs { @@ -108,17 +112,6 @@ func Invocation(obj objects.TektonObject, params []v1beta1.Param, paramSpecs []v return i } -func convertConfigSource(source *v1beta1.RefSource) slsa.ConfigSource { - if source == nil { - return slsa.ConfigSource{} - } - return slsa.ConfigSource{ - URI: source.URI, - Digest: source.Digest, - EntryPoint: source.EntryPoint, - } -} - // supports the SPDX format which is recommended by in-toto // ref: https://spdx.github.io/spdx-spec/v2-draft/package-information/#773-examples // ref: https://github.com/in-toto/attestation/blob/849867bee97e33678f61cc6bd5da293097f84c25/spec/field_types.md diff --git a/pkg/chains/formats/slsa/extract/extract.go b/pkg/chains/formats/slsa/extract/extract.go index 7a2d093c87..a92e363bb3 100644 --- a/pkg/chains/formats/slsa/extract/extract.go +++ b/pkg/chains/formats/slsa/extract/extract.go @@ -18,6 +18,7 @@ package extract import ( "context" + "encoding/json" "fmt" "strings" @@ -29,6 +30,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/artifact" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "knative.dev/pkg/logging" ) @@ -46,9 +48,9 @@ func SubjectDigests(ctx context.Context, obj objects.TektonObject, slsaconfig *s var subjects []intoto.Subject switch obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: subjects = subjectsFromPipelineRun(ctx, obj, slsaconfig) - case *v1beta1.TaskRun: + case *v1.TaskRun: subjects = subjectsFromTektonObject(ctx, obj) } @@ -67,7 +69,7 @@ func subjectsFromPipelineRun(ctx context.Context, obj objects.TektonObject, slsa // If deep inspection is enabled, collect subjects from child taskruns var result []intoto.Subject - pro := obj.(*objects.PipelineRunObject) + pro := obj.(*objects.PipelineRunObjectV1) pSpec := pro.Status.PipelineSpec if pSpec != nil { @@ -135,42 +137,57 @@ func subjectsFromTektonObject(ctx context.Context, obj objects.TektonObject) []i }) } - // Check if object is a Taskrun, if so search for images used in PipelineResources - // Otherwise object is a PipelineRun, where Pipelineresources are not relevant. - // PipelineResources have been deprecated so their support has been left out of - // the POC for TEP-84 - // More info: https://tekton.dev/docs/pipelines/resources/ - tr, ok := obj.GetObject().(*v1beta1.TaskRun) - if !ok || tr.Spec.Resources == nil { - return subjects - } + if trV1, ok := obj.GetObject().(*v1.TaskRun); ok { + serializedResources := trV1.Annotations["tekton.dev/v1beta1-spec-resources"] + var resources v1beta1.TaskRunResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true - // go through resourcesResult - for _, output := range tr.Spec.Resources.Outputs { - name := output.Name - if output.PipelineResourceBinding.ResourceSpec == nil { - continue } - // similarly, we could do this for other pipeline resources or whatever thing replaces them - if output.PipelineResourceBinding.ResourceSpec.Type == backport.PipelineResourceTypeImage { - // get the url and digest, and save as a subject - var url, digest string - for _, s := range tr.Status.ResourcesResult { - if s.ResourceName == name { - if s.Key == "url" { - url = s.Value - } - if s.Key == "digest" { - digest = s.Value + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, trV1); err == nil { + if shouldReplace { + trV1Beta1.Spec.Resources = &resources //nolint:staticcheck + } + + // Check if object is a Taskrun, if so search for images used in PipelineResources + // Otherwise object is a PipelineRun, where Pipelineresources are not relevant. + // PipelineResources have been deprecated so their support has been left out of + // the POC for TEP-84 + // More info: https://tekton.dev/docs/pipelines/resources/ + if !ok || trV1Beta1.Spec.Resources == nil { //nolint:staticcheck + return subjects + } + + // go through resourcesResult + for _, output := range trV1Beta1.Spec.Resources.Outputs { //nolint:staticcheck + name := output.Name + if output.PipelineResourceBinding.ResourceSpec == nil { + continue + } + // similarly, we could do this for other pipeline resources or whatever thing replaces them + if output.PipelineResourceBinding.ResourceSpec.Type == backport.PipelineResourceTypeImage { + // get the url and digest, and save as a subject + var url, digest string + for _, s := range trV1Beta1.Status.ResourcesResult { + if s.ResourceName == name { + if s.Key == "url" { + url = s.Value + } + if s.Key == "digest" { + digest = s.Value + } + } } + subjects = artifact.AppendSubjects(subjects, intoto.Subject{ + Name: url, + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, + }) } } - subjects = artifact.AppendSubjects(subjects, intoto.Subject{ - Name: url, - Digest: common.DigestSet{ - "sha256": strings.TrimPrefix(digest, "sha256:"), - }, - }) } } diff --git a/pkg/chains/formats/slsa/extract/extract_test.go b/pkg/chains/formats/slsa/extract/extract_test.go index 9c7b164679..583a727170 100644 --- a/pkg/chains/formats/slsa/extract/extract_test.go +++ b/pkg/chains/formats/slsa/extract/extract_test.go @@ -28,7 +28,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" ) @@ -208,7 +208,7 @@ func TestPipelineRunObserveModeForSubjects(t *testing.T) { { name: "deep inspection enabled: pipelinerun and taskrun have duplicated results", pro: createProWithTaskRunResults( - createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObject), + createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObjectV1), []artifact{ {uri: artifactURL1, digest: "sha256:" + artifactDigest1}, }), @@ -228,7 +228,7 @@ func TestPipelineRunObserveModeForSubjects(t *testing.T) { { name: "deep inspection enabled: pipelinerun and taskrun have different results", pro: createProWithTaskRunResults( - createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObject), + createProWithPipelineResults(map[string]string{artifactURL1: "sha256:" + artifactDigest1}).(*objects.PipelineRunObjectV1), []artifact{ {uri: artifactURL2, digest: "sha256:" + artifactDigest2}, }), @@ -272,21 +272,21 @@ func TestPipelineRunObserveModeForSubjects(t *testing.T) { } func createTaskRunObjectWithResults(results map[string]string) objects.TektonObject { - trResults := []v1beta1.TaskRunResult{} + trResults := []v1.TaskRunResult{} prefix := 0 for url, digest := range results { trResults = append(trResults, - v1beta1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(digest)}, - v1beta1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(url)}, + v1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1.NewStructuredValues(digest)}, + v1.TaskRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1.NewStructuredValues(url)}, ) prefix++ } - return objects.NewTaskRunObject( - &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: trResults, + return objects.NewTaskRunObjectV1( + &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: trResults, }, }, }, @@ -294,21 +294,21 @@ func createTaskRunObjectWithResults(results map[string]string) objects.TektonObj } func createProWithPipelineResults(results map[string]string) objects.TektonObject { - prResults := []v1beta1.PipelineRunResult{} + prResults := []v1.PipelineRunResult{} prefix := 0 for url, digest := range results { prResults = append(prResults, - v1beta1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(digest)}, - v1beta1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(url)}, + v1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1.NewStructuredValues(digest)}, + v1.PipelineRunResult{Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1.NewStructuredValues(url)}, ) prefix++ } - return objects.NewPipelineRunObject( - &v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: prResults, + return objects.NewPipelineRunObjectV1( + &v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: prResults, }, }, }, @@ -323,19 +323,19 @@ type artifact struct { // create a child taskrun for each result // //nolint:all -func createProWithTaskRunResults(pro *objects.PipelineRunObject, results []artifact) objects.TektonObject { +func createProWithTaskRunResults(pro *objects.PipelineRunObjectV1, results []artifact) objects.TektonObject { if pro == nil { - pro = objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{}, + pro = objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{}, }, }, }) } if pro.Status.PipelineSpec == nil { - pro.Status.PipelineSpec = &v1beta1.PipelineSpec{} + pro.Status.PipelineSpec = &v1.PipelineSpec{} } // create child taskruns with results and pipelinetask @@ -343,21 +343,21 @@ func createProWithTaskRunResults(pro *objects.PipelineRunObject, results []artif for _, r := range results { // simulate child taskruns pipelineTaskName := fmt.Sprintf("task-%d", prefix) - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{objects.PipelineTaskLabel: pipelineTaskName}}, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1beta1.NewStructuredValues(r.digest)}, - {Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1beta1.NewStructuredValues(r.uri)}, + Results: []v1.TaskRunResult{ + {Name: fmt.Sprintf("%v_IMAGE_DIGEST", prefix), Value: *v1.NewStructuredValues(r.digest)}, + {Name: fmt.Sprintf("%v_IMAGE_URL", prefix), Value: *v1.NewStructuredValues(r.uri)}, }, }, }, } pro.AppendTaskRun(tr) - pro.Status.PipelineSpec.Tasks = append(pro.Status.PipelineSpec.Tasks, v1beta1.PipelineTask{Name: pipelineTaskName}) + pro.Status.PipelineSpec.Tasks = append(pro.Status.PipelineSpec.Tasks, v1.PipelineTask{Name: pipelineTaskName}) prefix++ } diff --git a/pkg/chains/formats/slsa/internal/material/material.go b/pkg/chains/formats/slsa/internal/material/material.go index 864dc14ab4..90355575ee 100644 --- a/pkg/chains/formats/slsa/internal/material/material.go +++ b/pkg/chains/formats/slsa/internal/material/material.go @@ -18,6 +18,7 @@ package material import ( "context" + "encoding/json" "fmt" "strings" @@ -28,6 +29,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/artifact" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "knative.dev/pkg/logging" ) @@ -37,7 +40,7 @@ const ( ) // TaskMaterials constructs `predicate.materials` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. -func TaskMaterials(ctx context.Context, tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { +func TaskMaterials(ctx context.Context, tro *objects.TaskRunObjectV1) ([]common.ProvenanceMaterial, error) { var mats []common.ProvenanceMaterial // add step images @@ -56,13 +59,26 @@ func TaskMaterials(ctx context.Context, tro *objects.TaskRunObject) ([]common.Pr mats = artifact.AppendMaterials(mats, FromTaskParamsAndResults(ctx, tro)...) - // add task resources - mats = artifact.AppendMaterials(mats, FromTaskResources(ctx, tro)...) + // convert to v1beta1 and add any task resources + serializedResources := tro.Annotations["tekton.dev/v1beta1-spec-resources"] + var resources v1beta1.TaskRunResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true + + } + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1.TaskRun)); err == nil { + if shouldReplace { + trV1Beta1.Spec.Resources = &resources //nolint:staticcheck + } + mats = artifact.AppendMaterials(mats, FromTaskResources(ctx, trV1Beta1)...) + } return mats, nil } -func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig) ([]common.ProvenanceMaterial, error) { +func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig) ([]common.ProvenanceMaterial, error) { logger := logging.FromContext(ctx) var mats []common.ProvenanceMaterial if p := pro.Status.Provenance; p != nil && p.RefSource != nil { @@ -113,7 +129,7 @@ func PipelineMaterials(ctx context.Context, pro *objects.PipelineRunObject, slsa } // FromStepImages gets predicate.materials from step images -func FromStepImages(tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { +func FromStepImages(tro *objects.TaskRunObjectV1) ([]common.ProvenanceMaterial, error) { mats := []common.ProvenanceMaterial{} for _, image := range tro.GetStepImages() { m, err := fromImageID(image) @@ -126,7 +142,7 @@ func FromStepImages(tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, er } // FromSidecarImages gets predicate.materials from sidecar images -func FromSidecarImages(tro *objects.TaskRunObject) ([]common.ProvenanceMaterial, error) { +func FromSidecarImages(tro *objects.TaskRunObjectV1) ([]common.ProvenanceMaterial, error) { mats := []common.ProvenanceMaterial{} for _, image := range tro.GetSidecarImages() { m, err := fromImageID(image) @@ -158,11 +174,11 @@ func fromImageID(imageID string) (common.ProvenanceMaterial, error) { } // FromTaskResourcesToMaterials gets materials from task resources. -func FromTaskResources(ctx context.Context, tro *objects.TaskRunObject) []common.ProvenanceMaterial { +func FromTaskResources(ctx context.Context, tr *v1beta1.TaskRun) []common.ProvenanceMaterial { //nolint:staticcheck mats := []common.ProvenanceMaterial{} - if tro.Spec.Resources != nil { //nolint:all //incompatible with pipelines v0.45 + if tr.Spec.Resources != nil { //nolint:all //incompatible with pipelines v0.45 // check for a Git PipelineResource - for _, input := range tro.Spec.Resources.Inputs { //nolint:all //incompatible with pipelines v0.45 + for _, input := range tr.Spec.Resources.Inputs { //nolint:all //incompatible with pipelines v0.45 if input.ResourceSpec == nil || input.ResourceSpec.Type != backport.PipelineResourceTypeGit { //nolint:all //incompatible with pipelines v0.45 continue } @@ -171,7 +187,7 @@ func FromTaskResources(ctx context.Context, tro *objects.TaskRunObject) []common Digest: common.DigestSet{}, } - for _, rr := range tro.Status.ResourcesResult { + for _, rr := range tr.Status.ResourcesResult { if rr.ResourceName != input.Name { continue } @@ -202,7 +218,7 @@ func FromTaskResources(ctx context.Context, tro *objects.TaskRunObject) []common // FromTaskParamsAndResults scans over the taskrun, taskspec params and taskrun results // and looks for unstructured type hinted names matching CHAINS-GIT_COMMIT and CHAINS-GIT_URL // to extract the commit and url value for input artifact materials. -func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObject) []common.ProvenanceMaterial { +func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObjectV1) []common.ProvenanceMaterial { var commit, url string // Scan for git params to use for materials if tro.Status.TaskSpec != nil { @@ -230,7 +246,7 @@ func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObject) [ } } - for _, r := range tro.Status.TaskRunResults { + for _, r := range tro.Status.Results { if r.Name == attest.CommitParam { commit = r.Value.StringVal } @@ -257,7 +273,7 @@ func FromTaskParamsAndResults(ctx context.Context, tro *objects.TaskRunObject) [ } // FromPipelineParamsAndResults extracts type hinted params and results and adds the url and digest to materials. -func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig) []common.ProvenanceMaterial { +func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig) []common.ProvenanceMaterial { mats := []common.ProvenanceMaterial{} sms := artifacts.RetrieveMaterialsFromStructuredResults(ctx, pro, artifacts.ArtifactsInputsResultName) mats = artifact.AppendMaterials(mats, sms...) @@ -308,8 +324,8 @@ func FromPipelineParamsAndResults(ctx context.Context, pro *objects.PipelineRunO } } - // search status.PipelineRunResults - for _, r := range pro.Status.PipelineResults { + // search status.Results + for _, r := range pro.Status.Results { if r.Name == attest.CommitParam { commit = r.Value.StringVal } diff --git a/pkg/chains/formats/slsa/internal/material/material_test.go b/pkg/chains/formats/slsa/internal/material/material_test.go index 2f86b45f37..2ce97f7e1d 100644 --- a/pkg/chains/formats/slsa/internal/material/material_test.go +++ b/pkg/chains/formats/slsa/internal/material/material_test.go @@ -17,6 +17,7 @@ limitations under the License. package material import ( + "encoding/json" "fmt" "reflect" "strings" @@ -31,16 +32,16 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" - "sigs.k8s.io/yaml" ) const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { var err error pr, err := objectloader.PipelineRunFromFile(path) if err != nil { @@ -54,34 +55,33 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p } -func TestMaterialsWithTaskRunResults(t *testing.T) { - // make sure this works with Git resources - taskrun := `apiVersion: tekton.dev/v1beta1 -kind: TaskRun -spec: - taskSpec: - resources: - inputs: - - name: repo - type: git -status: - taskResults: - - name: CHAINS-GIT_COMMIT - value: 50c56a48cfb3a5a80fa36ed91c739bdac8381cbe - - name: CHAINS-GIT_URL - value: https://github.com/GoogleContainerTools/distroless` - - var taskRun *v1beta1.TaskRun - if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { - t.Fatal(err) +func TestMaterialsWithResults(t *testing.T) { + taskRun := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + { + Name: "CHAINS-GIT_COMMIT", + Value: v1.ParamValue{ + StringVal: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, + }, + { + Name: "CHAINS-GIT_URL", + Value: v1.ParamValue{ + StringVal: "https://github.com/GoogleContainerTools/distroless", + }, + }, + }, + }, + }, } - want := []common.ProvenanceMaterial{ { URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git", @@ -92,7 +92,7 @@ status: } ctx := logtesting.TestContextWithLogger(t) - got, err := TaskMaterials(ctx, objects.NewTaskRunObject(taskRun)) + got, err := TaskMaterials(ctx, objects.NewTaskRunObjectV1(taskRun)) if err != nil { t.Fatalf("Did not expect an error but got %v", err) } @@ -103,232 +103,259 @@ status: func TestTaskMaterials(t *testing.T) { tests := []struct { - name string - taskRun *v1beta1.TaskRun - want []common.ProvenanceMaterial - }{{ - name: "materials from pipeline resources", - taskRun: &v1beta1.TaskRun{ - Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ - Inputs: []v1beta1.TaskResourceBinding{ - { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ - Name: "nil-resource-spec", - }, - }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ - Name: "repo", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ - Params: []v1alpha1.ResourceParam{ - {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, - {Name: "revision", Value: "my-revision"}, + name string + obj objects.TektonObject + want []common.ProvenanceMaterial + }{ + { + name: "materials from pipeline resources", + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Inputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck + { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck + Name: "nil-resource-spec", + }, + }, { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck + Name: "repo", + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck + Params: []v1alpha1.ResourceParam{ //nolint:staticcheck + {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + {Name: "revision", Value: "my-revision"}, + }, + Type: backport.PipelineResourceTypeGit, }, - Type: backport.PipelineResourceTypeGit, }, }, }, }, }, - }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - { - Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ - "uri": "gcr.io/foo/bar", - "digest": digest, - }), + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1beta1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest, + }), + }, }, - }, - ResourcesResult: []v1beta1.PipelineResourceResult{ - { - ResourceName: "repo", - Key: "commit", - Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", - }, { - ResourceName: "repo", - Key: "url", - Value: "https://github.com/GoogleContainerTools/distroless", + ResourcesResult: []v1beta1.PipelineResourceResult{ + { + ResourceName: "repo", + Key: "commit", + Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, { + ResourceName: "repo", + Key: "url", + Value: "https://github.com/GoogleContainerTools/distroless", + }, }, }, }, - }, - }, - want: []common.ProvenanceMaterial{ - { - URI: "gcr.io/foo/bar", - Digest: common.DigestSet{ - "sha256": strings.TrimPrefix(digest, "sha256:"), + }), + want: []common.ProvenanceMaterial{ + { + URI: "gcr.io/foo/bar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, }, - }, - { - URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git@my-revision", - Digest: common.DigestSet{ - "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + { + URI: artifacts.GitSchemePrefix + "https://github.com/GoogleContainerTools/distroless.git@my-revision", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, }, }, }, - }, { - name: "materials from git results in task run spec", - taskRun: &v1beta1.TaskRun{ - Spec: v1beta1.TaskRunSpec{ - Params: []v1beta1.Param{{ - Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), - }, { - Name: "CHAINS-GIT_URL", - Value: *v1beta1.NewStructuredValues("github.com/something"), - }}, - }, - }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.GitSchemePrefix + "github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", + { + name: "materials from git results in task run spec", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Spec: v1.TaskRunSpec{ + Params: []v1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1.NewStructuredValues("my-commit"), + }, { + Name: "CHAINS-GIT_URL", + Value: *v1.NewStructuredValues("github.com/something"), + }}, }, - }, - }, - }, { - name: "materials from git results in task spec", - taskRun: &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{{ - Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ - StringVal: "my-commit", - }, - }, { - Name: "CHAINS-GIT_URL", - Default: &v1beta1.ParamValue{ - StringVal: "github.com/something", - }, - }}, + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", }, }, }, }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.GitSchemePrefix + "github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", - }, - }, - }, - }, { - name: "materials from git results in task spec and taskrun spec", - taskRun: &v1beta1.TaskRun{ - Spec: v1beta1.TaskRunSpec{ - Params: []v1beta1.Param{{ - Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{ - StringVal: "github.com/something", + { + name: "materials from git results in task spec", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{{ + Name: "CHAINS-GIT_COMMIT", + Default: &v1.ParamValue{ + StringVal: "my-commit", + }, + }, { + Name: "CHAINS-GIT_URL", + Default: &v1.ParamValue{ + StringVal: "github.com/something", + }, + }}, + }, }, - }}, - }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{{ - Name: "CHAINS-GIT_URL", - }, { - Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ - StringVal: "my-commit", - }, - }}, + }, + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.GitSchemePrefix + "github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", }, }, }, }, - want: []common.ProvenanceMaterial{{ - URI: "git+github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", - }, - }}, - }, { - name: "materials from step images", - taskRun: &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Steps: []v1beta1.StepState{{ - Name: "git-source-repo-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "git-source-repo-repeat-again-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "build", - ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + { + name: "materials from git results in task spec and taskrun spec", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Spec: v1.TaskRunSpec{ + Params: []v1.Param{{ + Name: "CHAINS-GIT_URL", + Value: v1.ParamValue{ + StringVal: "github.com/something", + }, }}, }, - }, - }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", - Digest: common.DigestSet{ - "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{{ + Name: "CHAINS-GIT_URL", + }, { + Name: "CHAINS-GIT_COMMIT", + Default: &v1.ParamValue{ + StringVal: "my-commit", + }, + }}, + }, + }, }, - }, - { - URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + }), + want: []common.ProvenanceMaterial{{ + URI: "git+github.com/something.git", Digest: common.DigestSet{ - "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + "sha1": "my-commit", }, - }, + }}, }, - }, { - name: "materials from step and sidecar images", - taskRun: &v1beta1.TaskRun{ - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Steps: []v1beta1.StepState{{ - Name: "git-source-repo-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "git-source-repo-repeat-again-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "build", - ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", - }}, - Sidecars: []v1beta1.SidecarState{{ - Name: "sidecar-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", - }}, + { + name: "materials from step images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + }, + }, + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, + { + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, }, }, }, - want: []common.ProvenanceMaterial{ - { - URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", - Digest: common.DigestSet{ - "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, - }, { - URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", - Digest: common.DigestSet{ - "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + { + name: "materials from step and sidecar images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + Sidecars: []v1.SidecarState{{ + Name: "sidecar-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }}, + }, }, - }, { - URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", - Digest: common.DigestSet{ - "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }), + want: []common.ProvenanceMaterial{ + { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: artifacts.OCIScheme + "gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: artifacts.OCIScheme + "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, }, }, }, - }} + } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - mat, err := TaskMaterials(ctx, objects.NewTaskRunObject(tc.taskRun)) + // convert tekton object to taskrun objet + var input *objects.TaskRunObjectV1 + var err error + if obj, ok := tc.obj.(*objects.TaskRunObjectV1); ok { + input = obj + } + + if trV1Beta1, ok := tc.obj.GetObject().(*v1beta1.TaskRun); ok { //nolint:staticcheck + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + input = objects.NewTaskRunObjectV1(trV1) + } + } + mat, err := TaskMaterials(ctx, input) if err != nil { t.Fatalf("Did not expect an error but got %v", err) } @@ -443,20 +470,20 @@ func TestFromImageID(t *testing.T) { func TestFromPipelineParamsAndResults(t *testing.T) { tests := []struct { name string - pipelineRunObject *objects.PipelineRunObject + pipelineRunObject *objects.PipelineRunObjectV1 enableDeepInspection bool want []common.ProvenanceMaterial }{{ name: "from results", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{{ Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), + Value: *v1.NewStructuredValues("my-commit"), }, { Name: "CHAINS-GIT_URL", - Value: *v1beta1.NewStructuredValues("github.com/something"), + Value: *v1.NewStructuredValues("github.com/something"), }}, }, }, @@ -469,18 +496,18 @@ func TestFromPipelineParamsAndResults(t *testing.T) { }}, }, { name: "from pipelinespec", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{{ Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "my-commit", }, }, { Name: "CHAINS-GIT_URL", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "github.com/something", }, }}, @@ -496,16 +523,16 @@ func TestFromPipelineParamsAndResults(t *testing.T) { }}, }, { name: "from pipelineRunSpec", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Spec: v1beta1.PipelineRunSpec{ - Params: []v1beta1.Param{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Spec: v1.PipelineRunSpec{ + Params: []v1.Param{{ Name: "CHAINS-GIT_COMMIT", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ StringVal: "my-commit", }, }, { Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ StringVal: "github.com/something", }, }}, @@ -519,25 +546,25 @@ func TestFromPipelineParamsAndResults(t *testing.T) { }}, }, { name: "from completeChain", - pipelineRunObject: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Spec: v1beta1.PipelineRunSpec{ - Params: []v1beta1.Param{{ + pipelineRunObject: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Spec: v1.PipelineRunSpec{ + Params: []v1.Param{{ Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ StringVal: "github.com/something", }, }}, }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{{ Name: "CHAINS-GIT_URL", }}, }, - PipelineResults: []v1beta1.PipelineRunResult{{ + Results: []v1.PipelineRunResult{{ Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), + Value: *v1.NewStructuredValues("my-commit"), }}, }, }, @@ -548,24 +575,25 @@ func TestFromPipelineParamsAndResults(t *testing.T) { "sha1": "my-commit", }, }}, - }, { - name: "deep inspection: pipelinerun param and task result", - pipelineRunObject: createProWithPipelineParamAndTaskResult(), - enableDeepInspection: true, - want: []common.ProvenanceMaterial{ - { - URI: "git+github.com/pipelinerun-param.git", - Digest: common.DigestSet{ - "sha1": "115734d92807a80158b4b7af605d768c647fdb3d", - }, - }, { - URI: "github.com/childtask-result", - Digest: common.DigestSet{ - "sha1": "225734d92807a80158b4b7af605d768c647fdb3d", + }, + { + name: "deep inspection: pipelinerun param and task result", + pipelineRunObject: createProWithPipelineParamAndTaskResult(), + enableDeepInspection: true, + want: []common.ProvenanceMaterial{ + { + URI: "git+github.com/pipelinerun-param.git", + Digest: common.DigestSet{ + "sha1": "115734d92807a80158b4b7af605d768c647fdb3d", + }, + }, { + URI: "github.com/childtask-result", + Digest: common.DigestSet{ + "sha1": "225734d92807a80158b4b7af605d768c647fdb3d", + }, }, }, }, - }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { @@ -579,19 +607,19 @@ func TestFromPipelineParamsAndResults(t *testing.T) { } //nolint:all -func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObject { - pro := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{{ +func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObjectV1 { + pro := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{{ Name: "CHAINS-GIT_COMMIT", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "115734d92807a80158b4b7af605d768c647fdb3d", }, }, { Name: "CHAINS-GIT_URL", - Default: &v1beta1.ParamValue{ + Default: &v1.ParamValue{ StringVal: "github.com/pipelinerun-param", }, }}, @@ -601,15 +629,15 @@ func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObject { }) pipelineTaskName := "my-clone-task" - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{Labels: map[string]string{objects.PipelineTaskLabel: pipelineTaskName}}, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "github.com/childtask-result", "digest": "sha1:225734d92807a80158b4b7af605d768c647fdb3d", })}, @@ -619,6 +647,6 @@ func createProWithPipelineParamAndTaskResult() *objects.PipelineRunObject { } pro.AppendTaskRun(tr) - pro.Status.PipelineSpec.Tasks = []v1beta1.PipelineTask{{Name: pipelineTaskName}} + pro.Status.PipelineSpec.Tasks = []v1.PipelineTask{{Name: pipelineTaskName}} return pro } diff --git a/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json b/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json index 32030fa0aa..7fc402a02e 100644 --- a/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json +++ b/pkg/chains/formats/slsa/testdata/pipelinerun-childrefs.json @@ -9,7 +9,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -23,7 +25,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "CHAINS-GIT_COMMIT", "value": "abcd" @@ -113,13 +115,13 @@ }, "childReferences": [ { - "apiVersion": "tekton.dev/v1beta1", + "apiVersion": "tekton.dev/v1", "kind": "TaskRun", "name": "git-clone", "pipelineTaskName": "git-clone" }, { - "apiVersion": "tekton.dev/v1beta1", + "apiVersion": "tekton.dev/v1", "kind": "TaskRun", "name": "taskrun-build", "pipelineTaskName": "build" diff --git a/pkg/chains/formats/slsa/testdata/pipelinerun1.json b/pkg/chains/formats/slsa/testdata/pipelinerun1.json index 879e8b1d84..fda4c6ead3 100644 --- a/pkg/chains/formats/slsa/testdata/pipelinerun1.json +++ b/pkg/chains/formats/slsa/testdata/pipelinerun1.json @@ -9,7 +9,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -23,7 +25,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "CHAINS-GIT_COMMIT", "value": "abcd" @@ -155,7 +157,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "commit", "value": "abcd" @@ -238,7 +240,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json b/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json index 2e210c985a..909f821ec0 100644 --- a/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json +++ b/pkg/chains/formats/slsa/testdata/pipelinerun_structured_results.json @@ -9,7 +9,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -23,7 +25,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "image-ARTIFACT_INPUTS", "value": { @@ -113,7 +115,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "commit", "value": "abcd" @@ -196,7 +198,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json b/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json index 32ddbc30e5..38ac438638 100644 --- a/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json +++ b/pkg/chains/formats/slsa/testdata/taskrun-multiple-subjects.json @@ -25,7 +25,7 @@ "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" } ], - "taskResults": [ + "results": [ { "name": "IMAGES", "value": "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367" diff --git a/pkg/chains/formats/slsa/testdata/taskrun1.json b/pkg/chains/formats/slsa/testdata/taskrun1.json index a686452516..d42a4638e4 100644 --- a/pkg/chains/formats/slsa/testdata/taskrun1.json +++ b/pkg/chains/formats/slsa/testdata/taskrun1.json @@ -56,7 +56,7 @@ "imageID": "docker-pullable://gcr.io/test3/test3@sha256:f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478" } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/taskrun2.json b/pkg/chains/formats/slsa/testdata/taskrun2.json index 958e992057..63b3f984b7 100644 --- a/pkg/chains/formats/slsa/testdata/taskrun2.json +++ b/pkg/chains/formats/slsa/testdata/taskrun2.json @@ -42,7 +42,7 @@ "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" } ], - "taskResults": [ + "results": [ { "name": "some-uri_DIGEST", "value": "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" diff --git a/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun1.json b/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun1.json index e9eb7dbc30..7a91957a06 100644 --- a/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun1.json +++ b/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun1.json @@ -13,7 +13,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -27,7 +29,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "CHAINS-GIT_COMMIT", "value": "abcd" @@ -159,7 +161,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "commit", "value": "abcd" @@ -242,7 +244,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun_structured_results.json b/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun_structured_results.json index a0a41e35b7..dbaac82e33 100644 --- a/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun_structured_results.json +++ b/pkg/chains/formats/slsa/testdata/v2alpha2/pipelinerun_structured_results.json @@ -13,7 +13,9 @@ "pipelineRef": { "name": "test-pipeline" }, - "serviceAccountName": "pipeline" + "taskRunTemplate": { + "serviceAccountName": "pipeline" + } }, "status": { "startTime": "2021-03-29T09:50:00Z", @@ -27,7 +29,7 @@ "type": "Succeeded" } ], - "pipelineResults": [ + "results": [ { "name": "image-ARTIFACT_INPUTS", "value": { @@ -117,7 +119,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "commit", "value": "abcd" @@ -200,7 +202,7 @@ } } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun-multiple-subjects.json b/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun-multiple-subjects.json index 32ddbc30e5..38ac438638 100644 --- a/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun-multiple-subjects.json +++ b/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun-multiple-subjects.json @@ -25,7 +25,7 @@ "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" } ], - "taskResults": [ + "results": [ { "name": "IMAGES", "value": "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367" diff --git a/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun1.json b/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun1.json index 8dabc97e6d..20ebbe1237 100644 --- a/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun1.json +++ b/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun1.json @@ -57,7 +57,7 @@ "imageID": "docker-pullable://gcr.io/test3/test3@sha256:f1a8b8549c179f41e27ff3db0fe1a1793e4b109da46586501a8343637b1d0478" } ], - "taskResults": [ + "results": [ { "name": "IMAGE_DIGEST", "value": "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7" diff --git a/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun2.json b/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun2.json index 5c2264e59c..1b83d1c2a0 100644 --- a/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun2.json +++ b/pkg/chains/formats/slsa/testdata/v2alpha2/taskrun2.json @@ -43,7 +43,7 @@ "imageID": "docker-pullable://gcr.io/test1/test1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" } ], - "taskResults": [ + "results": [ { "name": "some-uri_DIGEST", "value": "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6" diff --git a/pkg/chains/formats/slsa/v1/intotoite6.go b/pkg/chains/formats/slsa/v1/intotoite6.go index 4ab3c8d0bf..cfa5e0d399 100644 --- a/pkg/chains/formats/slsa/v1/intotoite6.go +++ b/pkg/chains/formats/slsa/v1/intotoite6.go @@ -57,9 +57,9 @@ func (i *InTotoIte6) Wrap() bool { func (i *InTotoIte6) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { switch v := obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: return taskrun.GenerateAttestation(ctx, v, i.slsaConfig) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1: return pipelinerun.GenerateAttestation(ctx, v, i.slsaConfig) default: return nil, fmt.Errorf("intoto does not support type: %s", v) diff --git a/pkg/chains/formats/slsa/v1/intotoite6_test.go b/pkg/chains/formats/slsa/v1/intotoite6_test.go index a61bf2489b..c2959fe253 100644 --- a/pkg/chains/formats/slsa/v1/intotoite6_test.go +++ b/pkg/chains/formats/slsa/v1/intotoite6_test.go @@ -34,7 +34,7 @@ import ( "github.com/in-toto/in-toto-golang/in_toto" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" logtesting "knative.dev/pkg/logging/testing" ) @@ -93,7 +93,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { Digest: map[string]string{"sha1": "ab123"}, EntryPoint: "build.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskrun"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, @@ -105,7 +105,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { Builder: common.ProvenanceBuilder{ ID: "test_builder-1", }, - BuildType: "tekton.dev/v1beta1/TaskRun", + BuildType: "tekton.dev/v1/TaskRun", BuildConfig: taskrun.BuildConfig{ Steps: []attest.StepAttestation{ { @@ -135,7 +135,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -207,20 +207,20 @@ func TestPipelineRunCreatePayload(t *testing.T) { Digest: map[string]string{"sha1": "28b123"}, EntryPoint: "pipeline.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, }, }, Builder: common.ProvenanceBuilder{ ID: "test_builder-1", }, - BuildType: "tekton.dev/v1beta1/PipelineRun", + BuildType: "tekton.dev/v1/PipelineRun", BuildConfig: pipelinerun.BuildConfig{ Tasks: []pipelinerun.TaskAttestation{ { Name: "git-clone", After: nil, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "git-clone", Kind: "ClusterTask", }, @@ -244,7 +244,7 @@ func TestPipelineRunCreatePayload(t *testing.T) { Digest: common.DigestSet{"sha1": "x123"}, EntryPoint: "git-clone.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskdefault"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "revision": {Type: "string", StringVal: ""}, @@ -254,18 +254,18 @@ func TestPipelineRunCreatePayload(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "git-clone"}, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "some-uri_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6", }, }, { Name: "some-uri", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "pkg:deb/debian/curl@7.50.3-1", }, }, @@ -274,7 +274,7 @@ func TestPipelineRunCreatePayload(t *testing.T) { { Name: "build", After: []string{"git-clone"}, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "build", Kind: "ClusterTask", }, @@ -316,7 +316,7 @@ func TestPipelineRunCreatePayload(t *testing.T) { Digest: map[string]string{"sha1": "ab123"}, EntryPoint: "build.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskrun"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, @@ -325,18 +325,18 @@ func TestPipelineRunCreatePayload(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "build"}, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "IMAGE_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", }, }, { Name: "IMAGE_URL", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "gcr.io/my/image", }, }, @@ -355,7 +355,7 @@ func TestPipelineRunCreatePayload(t *testing.T) { if err != nil { t.Errorf("error reading taskrun: %s", err.Error()) } - pro := objects.NewPipelineRunObject(pr) + pro := objects.NewPipelineRunObjectV1(pr) pro.AppendTaskRun(tr1) pro.AppendTaskRun(tr2) @@ -425,20 +425,20 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { }, Invocation: slsa.ProvenanceInvocation{ ConfigSource: slsa.ConfigSource{}, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, }, }, Builder: common.ProvenanceBuilder{ ID: "test_builder-1", }, - BuildType: "tekton.dev/v1beta1/PipelineRun", + BuildType: "tekton.dev/v1/PipelineRun", BuildConfig: pipelinerun.BuildConfig{ Tasks: []pipelinerun.TaskAttestation{ { Name: "git-clone", After: nil, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "git-clone", Kind: "ClusterTask", }, @@ -462,7 +462,7 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { Digest: common.DigestSet{"sha1": "x123"}, EntryPoint: "git-clone.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskdefault"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "revision": {Type: "string", StringVal: ""}, @@ -472,18 +472,18 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "git-clone"}, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "some-uri_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6", }, }, { Name: "some-uri", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "pkg:deb/debian/curl@7.50.3-1", }, }, @@ -492,7 +492,7 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { { Name: "build", After: []string{"git-clone"}, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "build", Kind: "ClusterTask", }, @@ -534,7 +534,7 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { Digest: map[string]string{"sha1": "ab123"}, EntryPoint: "build.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskrun"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, @@ -543,18 +543,18 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "build"}, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "IMAGE_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", }, }, { Name: "IMAGE_URL", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "gcr.io/my/image", }, }, @@ -573,7 +573,7 @@ func TestPipelineRunCreatePayloadChildRefs(t *testing.T) { if err != nil { t.Errorf("error reading taskrun: %s", err.Error()) } - pro := objects.NewPipelineRunObject(pr) + pro := objects.NewPipelineRunObjectV1(pr) pro.AppendTaskRun(tr1) pro.AppendTaskRun(tr2) @@ -627,7 +627,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { Digest: common.DigestSet{"sha1": "x123"}, EntryPoint: "git-clone.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskdefault"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "revision": {Type: "string"}, @@ -637,7 +637,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "git-clone"}, }, }, - BuildType: "tekton.dev/v1beta1/TaskRun", + BuildType: "tekton.dev/v1/TaskRun", BuildConfig: taskrun.BuildConfig{ Steps: []attest.StepAttestation{ { @@ -653,7 +653,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { }, } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -695,7 +695,7 @@ func TestMultipleSubjects(t *testing.T) { }, }, Predicate: slsa.ProvenancePredicate{ - BuildType: "tekton.dev/v1beta1/TaskRun", + BuildType: "tekton.dev/v1/TaskRun", Metadata: &slsa.ProvenanceMetadata{}, Builder: common.ProvenanceBuilder{ ID: "test_builder-multiple", @@ -707,7 +707,7 @@ func TestMultipleSubjects(t *testing.T) { }, }, Invocation: slsa.ProvenanceInvocation{ - Parameters: map[string]v1beta1.ParamValue{}, + Parameters: map[string]v1.ParamValue{}, }, BuildConfig: taskrun.BuildConfig{ Steps: []attest.StepAttestation{ @@ -724,7 +724,7 @@ func TestMultipleSubjects(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) } diff --git a/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go b/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go index e652111e5f..7fff58e085 100644 --- a/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go +++ b/pkg/chains/formats/slsa/v1/pipelinerun/pipelinerun.go @@ -25,7 +25,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" corev1 "k8s.io/api/core/v1" "knative.dev/pkg/apis" "knative.dev/pkg/logging" @@ -38,16 +38,16 @@ type BuildConfig struct { type TaskAttestation struct { Name string `json:"name,omitempty"` After []string `json:"after,omitempty"` - Ref v1beta1.TaskRef `json:"ref,omitempty"` + Ref v1.TaskRef `json:"ref,omitempty"` StartedOn time.Time `json:"startedOn,omitempty"` FinishedOn time.Time `json:"finishedOn,omitempty"` Status string `json:"status,omitempty"` Steps []attest.StepAttestation `json:"steps,omitempty"` Invocation slsa.ProvenanceInvocation `json:"invocation,omitempty"` - Results []v1beta1.TaskRunResult `json:"results,omitempty"` + Results []v1.TaskRunResult `json:"results,omitempty"` } -func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { subjects := extract.SubjectDigests(ctx, pro, slsaConfig) mat, err := material.PipelineMaterials(ctx, pro, slsaConfig) @@ -74,15 +74,15 @@ func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, sl return att, nil } -func invocation(pro *objects.PipelineRunObject) slsa.ProvenanceInvocation { - var paramSpecs []v1beta1.ParamSpec +func invocation(pro *objects.PipelineRunObjectV1) slsa.ProvenanceInvocation { + var paramSpecs []v1.ParamSpec if ps := pro.Status.PipelineSpec; ps != nil { paramSpecs = ps.Params } return attest.Invocation(pro, pro.Spec.Params, paramSpecs) } -func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfig { +func buildConfig(ctx context.Context, pro *objects.PipelineRunObjectV1) BuildConfig { logger := logging.FromContext(ctx) tasks := []TaskAttestation{} @@ -110,7 +110,7 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi // Establish task order by retrieving all task's referenced // in the "when" and "params" fields - refs := v1beta1.PipelineTaskResultRefs(&t) + refs := v1.PipelineTaskResultRefs(&t) for _, ref := range refs { // Ensure task doesn't already exist in after @@ -132,11 +132,11 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi } params := tr.Spec.Params - var paramSpecs []v1beta1.ParamSpec + var paramSpecs []v1.ParamSpec if tr.Status.TaskSpec != nil { paramSpecs = tr.Status.TaskSpec.Params } else { - paramSpecs = []v1beta1.ParamSpec{} + paramSpecs = []v1.ParamSpec{} } task := TaskAttestation{ @@ -147,7 +147,7 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi Status: getStatus(tr.Status.Conditions), Steps: steps, Invocation: attest.Invocation(tr, params, paramSpecs), - Results: tr.Status.TaskRunResults, + Results: tr.Status.Results, } if t.TaskRef != nil { @@ -162,7 +162,7 @@ func buildConfig(ctx context.Context, pro *objects.PipelineRunObject) BuildConfi return BuildConfig{Tasks: tasks} } -func metadata(pro *objects.PipelineRunObject) *slsa.ProvenanceMetadata { +func metadata(pro *objects.PipelineRunObjectV1) *slsa.ProvenanceMetadata { m := &slsa.ProvenanceMetadata{} if pro.Status.StartTime != nil { utc := pro.Status.StartTime.Time.UTC() diff --git a/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go b/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go index 35137767c6..1044253111 100644 --- a/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go +++ b/pkg/chains/formats/slsa/v1/pipelinerun/provenance_test.go @@ -30,14 +30,14 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "k8s.io/apimachinery/pkg/selection" logtesting "knative.dev/pkg/logging/testing" ) // Global pro is only read from, never modified -var pro *objects.PipelineRunObject -var proStructuredResults *objects.PipelineRunObject +var pro *objects.PipelineRunObjectV1 +var proStructuredResults *objects.PipelineRunObjectV1 var e1BuildStart = time.Unix(1617011400, 0) var e1BuildFinished = time.Unix(1617011415, 0) @@ -46,7 +46,7 @@ func init() { proStructuredResults = createPro("../../testdata/pipelinerun_structured_results.json") } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { var err error pr, err := objectloader.PipelineRunFromFile(path) if err != nil { @@ -60,7 +60,7 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -73,7 +73,7 @@ func TestInvocation(t *testing.T) { Digest: map[string]string{"sha1": "28b123"}, EntryPoint: "pipeline.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, }, } @@ -89,7 +89,7 @@ func TestBuildConfig(t *testing.T) { { Name: "git-clone", After: nil, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "git-clone", Kind: "ClusterTask", }, @@ -113,7 +113,7 @@ func TestBuildConfig(t *testing.T) { Digest: common.DigestSet{"sha1": "x123"}, EntryPoint: "git-clone.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskdefault"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "revision": {Type: "string", StringVal: ""}, @@ -123,18 +123,18 @@ func TestBuildConfig(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "git-clone"}, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "some-uri_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6", }, }, { Name: "some-uri", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "pkg:deb/debian/curl@7.50.3-1", }, }, @@ -143,7 +143,7 @@ func TestBuildConfig(t *testing.T) { { Name: "build", After: []string{"git-clone"}, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "build", Kind: "ClusterTask", }, @@ -185,7 +185,7 @@ func TestBuildConfig(t *testing.T) { Digest: map[string]string{"sha1": "ab123"}, EntryPoint: "build.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskrun"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "IMAGE": {Type: "string", StringVal: "test.io/test/image"}, @@ -194,18 +194,18 @@ func TestBuildConfig(t *testing.T) { "labels": {"tekton.dev/pipelineTask": "build"}, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "IMAGE_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", }, }, { Name: "IMAGE_URL", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "gcr.io/my/image", }, }, @@ -224,20 +224,20 @@ func TestBuildConfigTaskOrder(t *testing.T) { BUILD_TASK := 1 tests := []struct { name string - params []v1beta1.Param - whenExpressions v1beta1.WhenExpressions + params []v1.Param + whenExpressions v1.WhenExpressions runAfter []string }{ { name: "Referencing previous task via parameter", - params: []v1beta1.Param{ + params: []v1.Param{ { Name: "CHAINS-GIT_COMMIT", - Value: v1beta1.ParamValue{Type: "string", StringVal: "$(tasks.git-clone.results.commit)"}, + Value: v1.ParamValue{Type: "string", StringVal: "$(tasks.git-clone.results.commit)"}, }, { Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{Type: "string", StringVal: "$(tasks.git-clone.results.url)"}, + Value: v1.ParamValue{Type: "string", StringVal: "$(tasks.git-clone.results.url)"}, }, }, whenExpressions: nil, @@ -245,13 +245,13 @@ func TestBuildConfigTaskOrder(t *testing.T) { }, { name: "Referencing previous task via runAfter", - params: []v1beta1.Param{}, + params: []v1.Param{}, runAfter: []string{"git-clone"}, }, { name: "Referencing previous task via when.Input", - params: []v1beta1.Param{}, - whenExpressions: v1beta1.WhenExpressions{ + params: []v1.Param{}, + whenExpressions: v1.WhenExpressions{ { Input: "$(tasks.git-clone.results.commit)", Operator: selection.Equals, @@ -262,8 +262,8 @@ func TestBuildConfigTaskOrder(t *testing.T) { }, { name: "Referencing previous task via when.Value", - params: []v1beta1.Param{}, - whenExpressions: v1beta1.WhenExpressions{ + params: []v1.Param{}, + whenExpressions: v1.WhenExpressions{ { Input: "abcd", Operator: selection.Equals, @@ -280,7 +280,7 @@ func TestBuildConfigTaskOrder(t *testing.T) { { Name: "git-clone", After: nil, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "git-clone", Kind: "ClusterTask", }, @@ -304,7 +304,7 @@ func TestBuildConfigTaskOrder(t *testing.T) { Digest: common.DigestSet{"sha1": "x123"}, EntryPoint: "git-clone.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskdefault"}, "CHAINS-GIT_URL": {Type: "string", StringVal: "https://git.test.com"}, "url": {Type: "string", StringVal: "https://git.test.com"}, @@ -316,18 +316,18 @@ func TestBuildConfigTaskOrder(t *testing.T) { }, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "some-uri_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6", }, }, { Name: "some-uri", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "pkg:deb/debian/curl@7.50.3-1", }, }, @@ -336,7 +336,7 @@ func TestBuildConfigTaskOrder(t *testing.T) { { Name: "build", After: []string{"git-clone"}, - Ref: v1beta1.TaskRef{ + Ref: v1.TaskRef{ Name: "build", Kind: "ClusterTask", }, @@ -378,7 +378,7 @@ func TestBuildConfigTaskOrder(t *testing.T) { Digest: map[string]string{"sha1": "ab123"}, EntryPoint: "build.yaml", }, - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ // TODO: Is this right? // "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "abcd"}, "CHAINS-GIT_COMMIT": {Type: "string", StringVal: "sha:taskrun"}, @@ -391,18 +391,18 @@ func TestBuildConfigTaskOrder(t *testing.T) { }, }, }, - Results: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "IMAGE_DIGEST", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7", }, }, { Name: "IMAGE_URL", - Value: v1beta1.ParamValue{ - Type: v1beta1.ParamTypeString, + Value: v1.ParamValue{ + Type: v1.ParamTypeString, StringVal: "gcr.io/my/image", }, }, @@ -410,15 +410,15 @@ func TestBuildConfigTaskOrder(t *testing.T) { }, }, } - pt := v1beta1.PipelineTask{ + pt := v1.PipelineTask{ Name: "build", - TaskRef: &v1beta1.TaskRef{ + TaskRef: &v1.TaskRef{ Kind: "ClusterTask", Name: "build", }, - Params: tt.params, - WhenExpressions: tt.whenExpressions, - RunAfter: tt.runAfter, + Params: tt.params, + When: tt.whenExpressions, + RunAfter: tt.runAfter, } pro := createPro("../../testdata/pipelinerun1.json") pro.Status.PipelineSpec.Tasks[BUILD_TASK] = pt @@ -461,7 +461,7 @@ func TestMetadataInTimeZone(t *testing.T) { Reproducible: false, } - zoned := objects.NewPipelineRunObject(pro.DeepCopy()) + zoned := objects.NewPipelineRunObjectV1(pro.DeepCopy()) tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) zoned.Status.StartTime.Time = zoned.Status.StartTime.Time.In(tz) zoned.Status.CompletionTime.Time = zoned.Status.CompletionTime.Time.In(tz) diff --git a/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go b/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go index 39f513d4c9..27ed00018f 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go +++ b/pkg/chains/formats/slsa/v1/taskrun/buildconfig.go @@ -19,7 +19,7 @@ package taskrun import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/attest" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" ) // BuildConfig is the custom Chains format to fill out the @@ -36,7 +36,7 @@ type Step struct { Annotations map[string]string `json:"annotations"` } -func buildConfig(tro *objects.TaskRunObject) BuildConfig { +func buildConfig(tro *objects.TaskRunObjectV1) BuildConfig { attestations := []attest.StepAttestation{} for _, stepState := range tro.Status.Steps { step := stepFromTaskRun(stepState.Name, tro) @@ -45,7 +45,7 @@ func buildConfig(tro *objects.TaskRunObject) BuildConfig { return BuildConfig{Steps: attestations} } -func stepFromTaskRun(name string, tro *objects.TaskRunObject) *v1beta1.Step { +func stepFromTaskRun(name string, tro *objects.TaskRunObjectV1) *v1.Step { if tro.Status.TaskSpec != nil { for _, s := range tro.Status.TaskSpec.Steps { if s.Name == name { @@ -53,5 +53,5 @@ func stepFromTaskRun(name string, tro *objects.TaskRunObject) *v1beta1.Step { } } } - return &v1beta1.Step{} + return &v1.Step{} } diff --git a/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go b/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go index 6ae7d9451b..4b48d1aa0b 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go +++ b/pkg/chains/formats/slsa/v1/taskrun/buildconfig_test.go @@ -24,12 +24,12 @@ import ( "github.com/tektoncd/chains/pkg/artifacts" "github.com/tektoncd/chains/pkg/chains/formats/slsa/attest" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "sigs.k8s.io/yaml" ) func TestBuildConfig(t *testing.T) { - taskrun := `apiVersion: tekton.dev/v1beta1 + taskrun := `apiVersion: tekton.dev/v1 kind: TaskRun status: taskSpec: @@ -59,7 +59,7 @@ status: terminated: containerID: containerd://e2fadd134495619cccd1c48d8a9df2aed2afd64e6c62ea55135f90796102231e` - var taskRun *v1beta1.TaskRun + var taskRun *v1.TaskRun if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { t.Fatal(err) } @@ -84,7 +84,7 @@ status: }, } - got := buildConfig(objects.NewTaskRunObject(taskRun)) + got := buildConfig(objects.NewTaskRunObjectV1(taskRun)) if !reflect.DeepEqual(expected, got) { if d := cmp.Diff(expected, got); d != "" { t.Log(d) diff --git a/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go b/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go index 9a5ebe0d5a..42475f78b2 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go +++ b/pkg/chains/formats/slsa/v1/taskrun/provenance_test.go @@ -17,24 +17,26 @@ limitations under the License. package taskrun import ( + "encoding/json" "reflect" "strings" "testing" "time" + "github.com/in-toto/in-toto-golang/in_toto" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/google/go-cmp/cmp" - "github.com/in-toto/in-toto-golang/in_toto" "github.com/tektoncd/chains/internal/backport" "github.com/tektoncd/chains/pkg/artifacts" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/compare" "github.com/tektoncd/chains/pkg/chains/objects" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" "sigs.k8s.io/yaml" ) @@ -48,18 +50,18 @@ const ( ) func TestMetadata(t *testing.T) { - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ "chains.tekton.dev/reproducible": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, } @@ -69,7 +71,7 @@ func TestMetadata(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := Metadata(objects.NewTaskRunObject(tr)) + got := Metadata(objects.NewTaskRunObjectV1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } @@ -77,18 +79,18 @@ func TestMetadata(t *testing.T) { func TestMetadataInTimeZone(t *testing.T) { tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ "chains.tekton.dev/reproducible": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, }, }, } @@ -98,14 +100,14 @@ func TestMetadataInTimeZone(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := Metadata(objects.NewTaskRunObject(tr)) + got := Metadata(objects.NewTaskRunObjectV1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } } func TestInvocation(t *testing.T) { - taskrun := `apiVersion: tekton.dev/v1beta1 + taskrun := `apiVersion: tekton.dev/v1 kind: TaskRun metadata: uid: my-uid @@ -161,13 +163,13 @@ status: default: [] ` - var taskRun *v1beta1.TaskRun + var taskRun *v1.TaskRun if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { t.Fatal(err) } expected := slsa.ProvenanceInvocation{ - Parameters: map[string]v1beta1.ParamValue{ + Parameters: map[string]v1.ParamValue{ "my-param": {Type: "string", StringVal: "string-param"}, "my-array-param": {Type: "array", ArrayVal: []string{"my", "array"}}, "my-default-param": {Type: "string", StringVal: "string-default-param"}, @@ -189,7 +191,7 @@ status: }, } - got := invocation(objects.NewTaskRunObject(taskRun)) + got := invocation(objects.NewTaskRunObjectV1(taskRun)) if !reflect.DeepEqual(expected, got) { if d := cmp.Diff(expected, got); d != "" { t.Log(d) @@ -198,19 +200,19 @@ status: } } -func TestGetSubjectDigests(t *testing.T) { - tr := &v1beta1.TaskRun{ +func TestGetSubjectDigestsV1Beta1(t *testing.T) { + trV1Beta1 := &v1beta1.TaskRun{ //nolint:staticcheck Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ - Outputs: []v1beta1.TaskResourceBinding{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "nil-check", }, }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "built-image", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck Type: backport.PipelineResourceTypeImage, }, }, @@ -325,7 +327,8 @@ func TestGetSubjectDigests(t *testing.T) { Digest: common.DigestSet{ "sha256": strings.TrimPrefix(digest1, "sha256:"), }, - }, { + }, + { Name: "registry/resource-image", Digest: common.DigestSet{ "sha256": strings.TrimPrefix(digest2, "sha256:"), @@ -333,7 +336,127 @@ func TestGetSubjectDigests(t *testing.T) { }, } ctx := logtesting.TestContextWithLogger(t) - tro := objects.NewTaskRunObject(tr) + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + } + + tro := objects.NewTaskRunObjectV1(trV1) + got := extract.SubjectDigests(ctx, tro, nil) + + if d := cmp.Diff(want, got, compare.SubjectCompareOption()); d != "" { + t.Errorf("Wrong subjects extracted, diff=%s", d) + } +} + +func TestGetSubjectDigestsV1(t *testing.T) { + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + { + Name: "IMAGE_URL", + Value: *v1.NewStructuredValues("registry/myimage"), + }, + { + Name: "IMAGE_DIGEST", + Value: *v1.NewStructuredValues(digest1), + }, + { + Name: "mvn1_ARTIFACT_URI", + Value: *v1.NewStructuredValues("maven-test-0.1.1.jar"), + }, + { + Name: "mvn1_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest3), + }, + { + Name: "mvn1_pom_ARTIFACT_URI", + Value: *v1.NewStructuredValues("maven-test-0.1.1.pom"), + }, + { + Name: "mvn1_pom_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest4), + }, + { + Name: "mvn1_src_ARTIFACT_URI", + Value: *v1.NewStructuredValues("maven-test-0.1.1-sources.jar"), + }, + { + Name: "mvn1_src_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest5), + }, + { + Name: "invalid_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest5), + }, + { + Name: "mvn1_pkg" + "-" + artifacts.ArtifactsOutputsResultName, + Value: *v1.NewObject(map[string]string{ + "uri": "projects/test-project-1/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", + "digest": digest1, + }), + }, + { + Name: "mvn1_pom_sha512" + "-" + artifacts.ArtifactsOutputsResultName, + Value: *v1.NewObject(map[string]string{ + "uri": "com.google.guava:guava:1.0-jre.pom", + "digest": digest2, + }), + }, + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest3, + }), + }, + }, + }, + }, + } + + want := []in_toto.Subject{ + { + Name: "com.google.guava:guava:1.0-jre.pom", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest2, "sha256:"), + }, + }, { + Name: "index.docker.io/registry/myimage", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest1, "sha256:"), + }, + }, { + Name: "maven-test-0.1.1-sources.jar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest5, "sha256:"), + }, + }, { + Name: "maven-test-0.1.1.jar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest3, "sha256:"), + }, + }, { + Name: "maven-test-0.1.1.pom", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest4, "sha256:"), + }, + }, { + Name: "projects/test-project-1/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest1, "sha256:"), + }, + }, + } + ctx := logtesting.TestContextWithLogger(t) + tro := objects.NewTaskRunObjectV1(tr) got := extract.SubjectDigests(ctx, tro, nil) if d := cmp.Diff(want, got, compare.SubjectCompareOption()); d != "" { diff --git a/pkg/chains/formats/slsa/v1/taskrun/taskrun.go b/pkg/chains/formats/slsa/v1/taskrun/taskrun.go index 36f185a3ea..ad1801db2e 100644 --- a/pkg/chains/formats/slsa/v1/taskrun/taskrun.go +++ b/pkg/chains/formats/slsa/v1/taskrun/taskrun.go @@ -24,10 +24,10 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" ) -func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObjectV1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { subjects := extract.SubjectDigests(ctx, tro, slsaConfig) mat, err := material.TaskMaterials(ctx, tro) @@ -57,8 +57,8 @@ func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaCo // invocation describes the event that kicked off the build // we currently don't set ConfigSource because we don't know // which material the Task definition came from -func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { - var paramSpecs []v1beta1.ParamSpec +func invocation(tro *objects.TaskRunObjectV1) slsa.ProvenanceInvocation { + var paramSpecs []v1.ParamSpec if ts := tro.Status.TaskSpec; ts != nil { paramSpecs = ts.Params } @@ -67,7 +67,7 @@ func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { // Metadata adds taskrun's start time, completion time and reproducibility labels // to the metadata section of the generated provenance. -func Metadata(tro *objects.TaskRunObject) *slsa.ProvenanceMetadata { +func Metadata(tro *objects.TaskRunObjectV1) *slsa.ProvenanceMetadata { m := &slsa.ProvenanceMetadata{} if tro.Status.StartTime != nil { utc := tro.Status.StartTime.Time.UTC() diff --git a/pkg/chains/formats/slsa/v2alpha1/README.md b/pkg/chains/formats/slsa/v2alpha1/README.md index 6ac8ec267d..fe0060aa26 100644 --- a/pkg/chains/formats/slsa/v2alpha1/README.md +++ b/pkg/chains/formats/slsa/v2alpha1/README.md @@ -2,7 +2,7 @@ When running the following taskrun with bundle resolver referencing the [remote task](https://github.com/tektoncd/catalog/tree/main/task/git-clone/0.9): ```yaml -apiVersion: tekton.dev/v1beta1 +apiVersion: tekton.dev/v1 kind: TaskRun metadata: generateName: bundles-resolver- @@ -44,7 +44,7 @@ The following output was generated. Notice the following below: "builder": { "id": "https://tekton.dev/chains/v2" }, - "buildType": "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1beta1/TaskRun", + "buildType": "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1/TaskRun", "invocation": { "configSource": { "uri": "gcr.io/tekton-releases/catalog/upstream/git-clone", @@ -102,7 +102,7 @@ The following output was generated. Notice the following below: "EnableProvenanceInStatus": true, "ResultExtractionMethod": "termination-message", "MaxResultSize": 4096, - "CustomTaskVersion": "v1beta1" + "CustomTaskVersion": "v1" } } }, diff --git a/pkg/chains/formats/slsa/v2alpha1/slsav2.go b/pkg/chains/formats/slsa/v2alpha1/slsav2.go index bb7e1b68ee..e6cf230f44 100644 --- a/pkg/chains/formats/slsa/v2alpha1/slsav2.go +++ b/pkg/chains/formats/slsa/v2alpha1/slsav2.go @@ -50,7 +50,7 @@ func (s *Slsa) Wrap() bool { func (s *Slsa) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { switch v := obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: return taskrun.GenerateAttestation(ctx, s.builderID, s.Type(), v) default: return nil, fmt.Errorf("intoto does not support type: %s", v) diff --git a/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go b/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go index c06e126ca7..f95c395eee 100644 --- a/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go +++ b/pkg/chains/formats/slsa/v2alpha1/slsav2_test.go @@ -27,7 +27,6 @@ import ( "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/internal/objectloader" corev1 "k8s.io/api/core/v1" - v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "github.com/google/go-cmp/cmp" @@ -35,7 +34,7 @@ import ( "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" logtesting "knative.dev/pkg/logging/testing" ) @@ -101,60 +100,59 @@ func TestTaskRunCreatePayload1(t *testing.T) { }, Parameters: map[string]any{ "ComputeResources": (*corev1.ResourceRequirements)(nil), - "Debug": (*v1beta1.TaskRunDebug)(nil), - "Params": v1beta1.Params{ + "Debug": (*v1.TaskRunDebug)(nil), + "Params": v1.Params{ { Name: "IMAGE", - Value: v1beta1.ParamValue{Type: "string", StringVal: "test.io/test/image"}, + Value: v1.ParamValue{Type: "string", StringVal: "test.io/test/image"}, }, { Name: "CHAINS-GIT_COMMIT", - Value: v1beta1.ParamValue{Type: "string", StringVal: "sha:taskrun"}, + Value: v1.ParamValue{Type: "string", StringVal: "sha:taskrun"}, }, { Name: "CHAINS-GIT_URL", - Value: v1beta1.ParamValue{Type: "string", StringVal: "https://git.test.com"}, + Value: v1.ParamValue{Type: "string", StringVal: "https://git.test.com"}, }, }, "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), "Retries": 0, "ServiceAccountName": "default", - "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), - "Status": v1beta1.TaskRunSpecStatus(""), - "StatusMessage": v1beta1.TaskRunSpecStatusMessage(""), - "StepOverrides": []v1beta1.TaskRunStepOverride(nil), + "SidecarSpecs": []v1.TaskRunSidecarSpec(nil), + "Status": v1.TaskRunSpecStatus(""), + "StatusMessage": v1.TaskRunSpecStatusMessage(""), + "StepSpecs": []v1.TaskRunStepSpec(nil), "Timeout": (*metav1.Duration)(nil), - "Workspaces": []v1beta1.WorkspaceBinding(nil), + "Workspaces": []v1.WorkspaceBinding(nil), }, }, Builder: common.ProvenanceBuilder{ ID: "test_builder-1", }, - BuildType: "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1beta1/TaskRun", + BuildType: "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1/TaskRun", BuildConfig: taskrun.BuildConfig{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{ {Name: "IMAGE", Type: "string"}, {Name: "filename", Type: "string"}, {Name: "DOCKERFILE", Type: "string"}, {Name: "CONTEXT", Type: "string"}, {Name: "EXTRA_ARGS", Type: "string"}, {Name: "BUILDER_IMAGE", Type: "string"}, - {Name: "CHAINS-GIT_COMMIT", Type: "string", Default: &v1beta1.ParamValue{Type: "string", StringVal: "sha:task"}}, - {Name: "CHAINS-GIT_URL", Type: "string", Default: &v1beta1.ParamValue{Type: "string", StringVal: "https://defaultgit.test.com"}}, + {Name: "CHAINS-GIT_COMMIT", Type: "string", Default: &v1.ParamValue{Type: "string", StringVal: "sha:task"}}, + {Name: "CHAINS-GIT_URL", Type: "string", Default: &v1.ParamValue{Type: "string", StringVal: "https://defaultgit.test.com"}}, }, - Steps: []v1beta1.Step{{Name: "step1"}, {Name: "step2"}, {Name: "step3"}}, - Results: []v1beta1.TaskResult{ + Steps: []v1.Step{{Name: "step1"}, {Name: "step2"}, {Name: "step3"}}, + Results: []v1.TaskResult{ {Name: "IMAGE_DIGEST", Description: "Digest of the image just built."}, {Name: "filename_DIGEST", Description: "Digest of the file just built."}, }, }, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "IMAGE_DIGEST", - Value: v1beta1.ParamValue{Type: "string", StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}, + Value: v1.ParamValue{Type: "string", StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"}, }, { Name: "IMAGE_URL", - Value: v1beta1.ParamValue{Type: "string", StringVal: "gcr.io/my/image"}, + Value: v1.ParamValue{Type: "string", StringVal: "gcr.io/my/image"}, }, }, }, @@ -162,7 +160,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -216,54 +214,53 @@ func TestTaskRunCreatePayload2(t *testing.T) { }, Parameters: map[string]any{ "ComputeResources": (*corev1.ResourceRequirements)(nil), - "Debug": (*v1beta1.TaskRunDebug)(nil), - "Params": v1beta1.Params{ + "Debug": (*v1.TaskRunDebug)(nil), + "Params": v1.Params{ { Name: "url", - Value: v1beta1.ParamValue{Type: "string", StringVal: "https://git.test.com"}, + Value: v1.ParamValue{Type: "string", StringVal: "https://git.test.com"}, }, - {Name: "revision", Value: v1beta1.ParamValue{Type: "string"}}, + {Name: "revision", Value: v1.ParamValue{Type: "string"}}, }, "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), "Retries": 0, "ServiceAccountName": "default", - "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), - "Status": v1beta1.TaskRunSpecStatus(""), - "StatusMessage": v1beta1.TaskRunSpecStatusMessage(""), - "StepOverrides": []v1beta1.TaskRunStepOverride(nil), + "SidecarSpecs": []v1.TaskRunSidecarSpec(nil), + "Status": v1.TaskRunSpecStatus(""), + "StatusMessage": v1.TaskRunSpecStatusMessage(""), + "StepSpecs": []v1.TaskRunStepSpec(nil), "Timeout": (*metav1.Duration)(nil), - "Workspaces": []v1beta1.WorkspaceBinding(nil), + "Workspaces": []v1.WorkspaceBinding(nil), }, }, - BuildType: "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1beta1/TaskRun", + BuildType: "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1/TaskRun", BuildConfig: taskrun.BuildConfig{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{ - {Name: "CHAINS-GIT_COMMIT", Type: "string", Default: &v1beta1.ParamValue{Type: "string", StringVal: "sha:taskdefault"}}, - {Name: "CHAINS-GIT_URL", Type: "string", Default: &v1beta1.ParamValue{Type: "string", StringVal: "https://git.test.com"}}, + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{ + {Name: "CHAINS-GIT_COMMIT", Type: "string", Default: &v1.ParamValue{Type: "string", StringVal: "sha:taskdefault"}}, + {Name: "CHAINS-GIT_URL", Type: "string", Default: &v1.ParamValue{Type: "string", StringVal: "https://git.test.com"}}, }, - Steps: []v1beta1.Step{{Name: "step1", Env: []v1.EnvVar{{Name: "HOME", Value: "$(params.userHome)"}, {Name: "PARAM_URL", Value: "$(params.url)"}}, Script: "git clone"}}, - Results: []v1beta1.TaskResult{ + Steps: []v1.Step{{Name: "step1", Env: []corev1.EnvVar{{Name: "HOME", Value: "$(params.userHome)"}, {Name: "PARAM_URL", Value: "$(params.url)"}}, Script: "git clone"}}, + Results: []v1.TaskResult{ {Name: "some-uri_DIGEST", Description: "Digest of a file to push."}, {Name: "some-uri", Description: "some calculated uri"}, }, }, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "some-uri_DIGEST", - Value: v1beta1.ParamValue{Type: "string", StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, + Value: v1.ParamValue{Type: "string", StringVal: "sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6"}, }, { Name: "some-uri", - Value: v1beta1.ParamValue{Type: "string", StringVal: "pkg:deb/debian/curl@7.50.3-1"}, + Value: v1.ParamValue{Type: "string", StringVal: "pkg:deb/debian/curl@7.50.3-1"}, }, }, }, }, } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -305,7 +302,7 @@ func TestMultipleSubjects(t *testing.T) { }, }, Predicate: slsa.ProvenancePredicate{ - BuildType: "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1beta1/TaskRun", + BuildType: "https://chains.tekton.dev/format/slsa/v2alpha1/type/tekton.dev/v1/TaskRun", Metadata: &slsa.ProvenanceMetadata{ Completeness: slsa.ProvenanceComplete{ Parameters: true, @@ -323,34 +320,33 @@ func TestMultipleSubjects(t *testing.T) { Invocation: slsa.ProvenanceInvocation{ Parameters: map[string]any{ "ComputeResources": (*corev1.ResourceRequirements)(nil), - "Debug": (*v1beta1.TaskRunDebug)(nil), - "Params": v1beta1.Params{}, + "Debug": (*v1.TaskRunDebug)(nil), + "Params": v1.Params{}, "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), "Retries": 0, "ServiceAccountName": "default", - "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), - "Status": v1beta1.TaskRunSpecStatus(""), - "StatusMessage": v1beta1.TaskRunSpecStatusMessage(""), - "StepOverrides": []v1beta1.TaskRunStepOverride(nil), + "SidecarSpecs": []v1.TaskRunSidecarSpec(nil), + "Status": v1.TaskRunSpecStatus(""), + "StatusMessage": v1.TaskRunSpecStatusMessage(""), + "StepSpecs": []v1.TaskRunStepSpec(nil), "Timeout": (*metav1.Duration)(nil), - "Workspaces": []v1beta1.WorkspaceBinding(nil), + "Workspaces": []v1.WorkspaceBinding(nil), }, }, BuildConfig: taskrun.BuildConfig{ - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{}, - Results: []v1beta1.TaskResult{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{}, + Results: []v1.TaskResult{ {Name: "file1_DIGEST", Description: "Digest of a file to push."}, {Name: "file1", Description: "some assembled file"}, {Name: "file2_DIGEST", Description: "Digest of a file to push."}, {Name: "file2", Description: "some assembled file"}, }, }, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "IMAGES", - Value: v1beta1.ParamValue{ + Value: v1.ParamValue{ Type: "string", StringVal: "gcr.io/myimage1@sha256:d4b63d3e24d6eef04a6dc0795cf8a73470688803d97c52cffa3c8d4efd3397b6,gcr.io/myimage2@sha256:daa1a56e13c85cf164e7d9e595006649e3a04c47fe4a8261320e18a0bf3b0367", }, @@ -361,7 +357,7 @@ func TestMultipleSubjects(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) } diff --git a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go index f9c01dbc1a..47587fffbe 100644 --- a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go +++ b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun.go @@ -26,18 +26,18 @@ import ( slsav1 "github.com/tektoncd/chains/pkg/chains/formats/slsa/v1/taskrun" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "k8s.io/apimachinery/pkg/util/sets" ) // BuildConfig is the custom Chains format to fill out the // "buildConfig" section of the slsa-provenance predicate type BuildConfig struct { - TaskSpec *v1beta1.TaskSpec `json:"taskSpec"` - TaskRunResults []v1beta1.TaskRunResult `json:"taskRunResults"` + TaskSpec *v1.TaskSpec `json:"taskSpec"` + Results []v1.TaskRunResult `json:"taskRunResults"` } -func GenerateAttestation(ctx context.Context, builderID string, payloadType config.PayloadType, tro *objects.TaskRunObject) (interface{}, error) { +func GenerateAttestation(ctx context.Context, builderID string, payloadType config.PayloadType, tro *objects.TaskRunObjectV1) (interface{}, error) { subjects := extract.SubjectDigests(ctx, tro, nil) mat, err := material.TaskMaterials(ctx, tro) if err != nil { @@ -55,7 +55,7 @@ func GenerateAttestation(ctx context.Context, builderID string, payloadType conf }, BuildType: fmt.Sprintf("https://chains.tekton.dev/format/%v/type/%s", payloadType, tro.GetGVK()), Invocation: invocation(tro), - BuildConfig: BuildConfig{TaskSpec: tro.Status.TaskSpec, TaskRunResults: tro.Status.TaskRunResults}, + BuildConfig: BuildConfig{TaskSpec: tro.Status.TaskSpec, Results: tro.Status.Results}, Metadata: metadata(tro), Materials: mat, }, @@ -63,7 +63,7 @@ func GenerateAttestation(ctx context.Context, builderID string, payloadType conf return att, nil } -func metadata(tro *objects.TaskRunObject) *slsa.ProvenanceMetadata { +func metadata(tro *objects.TaskRunObjectV1) *slsa.ProvenanceMetadata { m := slsav1.Metadata(tro) m.Completeness = slsa.ProvenanceComplete{ Parameters: true, @@ -74,7 +74,7 @@ func metadata(tro *objects.TaskRunObject) *slsa.ProvenanceMetadata { // invocation describes the event that kicked off the build // we currently don't set ConfigSource because we don't know // which material the Task definition came from -func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { +func invocation(tro *objects.TaskRunObjectV1) slsa.ProvenanceInvocation { i := slsa.ProvenanceInvocation{} if p := tro.Status.Provenance; p != nil && p.RefSource != nil { i.ConfigSource = slsa.ConfigSource{ @@ -94,7 +94,7 @@ func invocation(tro *objects.TaskRunObject) slsa.ProvenanceInvocation { // invocationEnv adds the tekton feature flags that were enabled // for the taskrun. In the future, we can populate versioning information // here as well. -func invocationEnv(tro *objects.TaskRunObject) map[string]any { +func invocationEnv(tro *objects.TaskRunObjectV1) map[string]any { var iEnv map[string]any = make(map[string]any) if tro.Status.Provenance != nil && tro.Status.Provenance.FeatureFlags != nil { iEnv["tekton-pipelines-feature-flags"] = tro.Status.Provenance.FeatureFlags @@ -104,7 +104,7 @@ func invocationEnv(tro *objects.TaskRunObject) map[string]any { // invocationParams adds all fields from the task run object except // TaskRef or TaskSpec since they are in the ConfigSource or buildConfig. -func invocationParams(tro *objects.TaskRunObject) map[string]any { +func invocationParams(tro *objects.TaskRunObjectV1) map[string]any { var iParams map[string]any = make(map[string]any) skipFields := sets.NewString("TaskRef", "TaskSpec") v := reflect.ValueOf(tro.Spec) diff --git a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go index 3327ce0222..6139bc7e6c 100644 --- a/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go +++ b/pkg/chains/formats/slsa/v2alpha1/taskrun/taskrun_test.go @@ -17,6 +17,7 @@ limitations under the License. package taskrun import ( + "encoding/json" "reflect" "strings" "testing" @@ -35,11 +36,11 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/pipeline/pkg/apis/config" "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" "sigs.k8s.io/yaml" ) @@ -53,18 +54,18 @@ const ( ) func TestMetadata(t *testing.T) { - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ "chains.tekton.dev/reproducible": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, } @@ -74,7 +75,7 @@ func TestMetadata(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := slsav1.Metadata(objects.NewTaskRunObject(tr)) + got := slsav1.Metadata(objects.NewTaskRunObjectV1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } @@ -82,18 +83,18 @@ func TestMetadata(t *testing.T) { func TestMetadataInTimeZone(t *testing.T) { tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ "chains.tekton.dev/reproducible": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, }, }, } @@ -103,14 +104,14 @@ func TestMetadataInTimeZone(t *testing.T) { BuildStartedOn: &start, BuildFinishedOn: &end, } - got := slsav1.Metadata(objects.NewTaskRunObject(tr)) + got := slsav1.Metadata(objects.NewTaskRunObjectV1(tr)) if !reflect.DeepEqual(expected, got) { t.Fatalf("expected %v got %v", expected, got) } } func TestInvocation(t *testing.T) { - taskrun := `apiVersion: tekton.dev/v1beta1 + taskrun := `apiVersion: tekton.dev/v1 kind: TaskRun metadata: uid: my-uid @@ -167,40 +168,39 @@ status: RunningInEnvWithInjectedSidecars: true ` - var taskRun *v1beta1.TaskRun + var taskRun *v1.TaskRun if err := yaml.Unmarshal([]byte(taskrun), &taskRun); err != nil { t.Fatal(err) } expected := slsa.ProvenanceInvocation{ Parameters: map[string]any{ - "Params": v1beta1.Params{ + "Params": v1.Params{ { Name: "my-param", - Value: v1beta1.ParamValue{Type: "string", StringVal: "string-param"}, + Value: v1.ParamValue{Type: "string", StringVal: "string-param"}, }, { Name: "my-array-param", - Value: v1beta1.ParamValue{Type: "array", ArrayVal: []string{"my", "array"}}, + Value: v1.ParamValue{Type: "array", ArrayVal: []string{"my", "array"}}, }, - {Name: "my-empty-string-param", Value: v1beta1.ParamValue{Type: "string"}}, + {Name: "my-empty-string-param", Value: v1.ParamValue{Type: "string"}}, { Name: "my-empty-array-param", - Value: v1beta1.ParamValue{Type: "array", ArrayVal: []string{}}, + Value: v1.ParamValue{Type: "array", ArrayVal: []string{}}, }, }, "ComputeResources": (*corev1.ResourceRequirements)(nil), - "Debug": (*v1beta1.TaskRunDebug)(nil), + "Debug": (*v1.TaskRunDebug)(nil), "PodTemplate": (*pod.Template)(nil), - "Resources": (*v1beta1.TaskRunResources)(nil), "Retries": 0, "ServiceAccountName": "", - "SidecarOverrides": []v1beta1.TaskRunSidecarOverride(nil), - "Status": v1beta1.TaskRunSpecStatus(""), - "StatusMessage": v1beta1.TaskRunSpecStatusMessage(""), - "StepOverrides": []v1beta1.TaskRunStepOverride(nil), + "SidecarSpecs": []v1.TaskRunSidecarSpec(nil), + "Status": v1.TaskRunSpecStatus(""), + "StatusMessage": v1.TaskRunSpecStatusMessage(""), + "StepSpecs": []v1.TaskRunStepSpec(nil), "Timeout": (*metav1.Duration)(nil), - "Workspaces": []v1beta1.WorkspaceBinding(nil), + "Workspaces": []v1.WorkspaceBinding(nil), }, Environment: map[string]any{ "tekton-pipelines-feature-flags": &config.FeatureFlags{ @@ -214,7 +214,7 @@ status: }, }, } - got := invocation(objects.NewTaskRunObject(taskRun)) + got := invocation(objects.NewTaskRunObjectV1(taskRun)) if !reflect.DeepEqual(expected, got) { if d := cmp.Diff(expected, got); d != "" { t.Log(d) @@ -223,19 +223,19 @@ status: } } -func TestGetSubjectDigests(t *testing.T) { - tr := &v1beta1.TaskRun{ +func TestGetSubjectDigestsV1Beta1(t *testing.T) { + trV1Beta1 := &v1beta1.TaskRun{ //nolint:staticcheck Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ - Outputs: []v1beta1.TaskResourceBinding{ + Resources: &v1beta1.TaskRunResources{ //nolint:staticcheck + Outputs: []v1beta1.TaskResourceBinding{ //nolint:staticcheck { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "nil-check", }, }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:staticcheck Name: "built-image", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:staticcheck Type: backport.PipelineResourceTypeImage, }, }, @@ -350,15 +350,136 @@ func TestGetSubjectDigests(t *testing.T) { Digest: common.DigestSet{ "sha256": strings.TrimPrefix(digest1, "sha256:"), }, - }, { + }, + { Name: "registry/resource-image", Digest: common.DigestSet{ "sha256": strings.TrimPrefix(digest2, "sha256:"), }, }, } - tro := objects.NewTaskRunObject(tr) ctx := logtesting.TestContextWithLogger(t) + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + } + + tro := objects.NewTaskRunObjectV1(trV1) + got := extract.SubjectDigests(ctx, tro, nil) + + if d := cmp.Diff(want, got, compare.SubjectCompareOption()); d != "" { + t.Errorf("Wrong subjects extracted, diff=%s", d) + } +} + +func TestGetSubjectDigestsV1(t *testing.T) { + tr := &v1.TaskRun{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + { + Name: "IMAGE_URL", + Value: *v1.NewStructuredValues("registry/myimage"), + }, + { + Name: "IMAGE_DIGEST", + Value: *v1.NewStructuredValues(digest1), + }, + { + Name: "mvn1_ARTIFACT_URI", + Value: *v1.NewStructuredValues("maven-test-0.1.1.jar"), + }, + { + Name: "mvn1_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest3), + }, + { + Name: "mvn1_pom_ARTIFACT_URI", + Value: *v1.NewStructuredValues("maven-test-0.1.1.pom"), + }, + { + Name: "mvn1_pom_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest4), + }, + { + Name: "mvn1_src_ARTIFACT_URI", + Value: *v1.NewStructuredValues("maven-test-0.1.1-sources.jar"), + }, + { + Name: "mvn1_src_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest5), + }, + { + Name: "invalid_ARTIFACT_DIGEST", + Value: *v1.NewStructuredValues(digest5), + }, + { + Name: "mvn1_pkg" + "-" + artifacts.ArtifactsOutputsResultName, + Value: *v1.NewObject(map[string]string{ + "uri": "projects/test-project-1/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", + "digest": digest1, + }), + }, + { + Name: "mvn1_pom_sha512" + "-" + artifacts.ArtifactsOutputsResultName, + Value: *v1.NewObject(map[string]string{ + "uri": "com.google.guava:guava:1.0-jre.pom", + "digest": digest2, + }), + }, + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest3, + }), + }, + }, + }, + }, + } + + want := []in_toto.Subject{ + { + Name: "com.google.guava:guava:1.0-jre.pom", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest2, "sha256:"), + }, + }, { + Name: "index.docker.io/registry/myimage", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest1, "sha256:"), + }, + }, { + Name: "maven-test-0.1.1-sources.jar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest5, "sha256:"), + }, + }, { + Name: "maven-test-0.1.1.jar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest3, "sha256:"), + }, + }, { + Name: "maven-test-0.1.1.pom", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest4, "sha256:"), + }, + }, { + Name: "projects/test-project-1/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest1, "sha256:"), + }, + }, + } + ctx := logtesting.TestContextWithLogger(t) + tro := objects.NewTaskRunObjectV1(tr) got := extract.SubjectDigests(ctx, tro, nil) if d := cmp.Diff(want, got, compare.SubjectCompareOption()); d != "" { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go index 27185766a4..3ae576bcd4 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters.go @@ -20,25 +20,24 @@ import ( "fmt" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) -func buildConfigSource(provenance *v1beta1.Provenance) map[string]string { +func buildConfigSource(provenance objects.GenericProvenance) map[string]string { ref := "" - for alg, hex := range provenance.RefSource.Digest { + for alg, hex := range provenance.GetRefSourceDigest() { ref = fmt.Sprintf("%s:%s", alg, hex) break } buildConfigSource := map[string]string{ "ref": ref, - "repository": provenance.RefSource.URI, - "path": provenance.RefSource.EntryPoint, + "repository": provenance.GetRefSourceURI(), + "path": provenance.GetRefSourceEntrypoint(), } return buildConfigSource } // PipelineRun adds the pipeline run spec and provenance if available -func PipelineRun(pro *objects.PipelineRunObject) map[string]any { +func PipelineRun(pro *objects.PipelineRunObjectV1) map[string]any { externalParams := make(map[string]any) if provenance := pro.GetRemoteProvenance(); provenance != nil { @@ -49,7 +48,7 @@ func PipelineRun(pro *objects.PipelineRunObject) map[string]any { } // TaskRun adds the task run spec and provenance if available -func TaskRun(tro *objects.TaskRunObject) map[string]any { +func TaskRun(tro *objects.TaskRunObjectV1) map[string]any { externalParams := make(map[string]any) if provenance := tro.GetRemoteProvenance(); provenance != nil { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go index a389224676..ce31575bec 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/external_parameters/external_parameters_test.go @@ -23,16 +23,18 @@ import ( "github.com/google/go-cmp/cmp" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" ) func TestBuildConfigSource(t *testing.T) { digest := map[string]string{"alg1": "hex1", "alg2": "hex2"} - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ - Digest: digest, - URI: "https://tekton.com", - EntryPoint: "/path/to/entry", + provenance := &objects.ProvenanceV1{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ + Digest: digest, + URI: "https://tekton.com", + EntryPoint: "/path/to/entry", + }, }, } @@ -65,7 +67,7 @@ func TestBuildConfigSource(t *testing.T) { } } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { pr, err := objectloader.PipelineRunFromFile(path) if err != nil { panic(err) @@ -78,7 +80,7 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -90,15 +92,17 @@ func TestPipelineRun(t *testing.T) { got := PipelineRun(pro) want := map[string]any{ - "runSpec": v1beta1.PipelineRunSpec{ - PipelineRef: &v1beta1.PipelineRef{Name: "test-pipeline"}, - Params: v1beta1.Params{ + "runSpec": v1.PipelineRunSpec{ + PipelineRef: &v1.PipelineRef{Name: "test-pipeline"}, + Params: v1.Params{ { Name: "IMAGE", - Value: v1beta1.ParamValue{Type: "string", StringVal: "test.io/test/image"}, + Value: v1.ParamValue{Type: "string", StringVal: "test.io/test/image"}, }, }, - ServiceAccountName: "pipeline", + TaskRunTemplate: v1.PipelineTaskRunTemplate{ + ServiceAccountName: "pipeline", + }, }, } @@ -112,17 +116,17 @@ func TestTaskRun(t *testing.T) { if err != nil { t.Fatal(err) } - got := TaskRun(objects.NewTaskRunObject(tr)) + got := TaskRun(objects.NewTaskRunObjectV1(tr)) want := map[string]any{ - "runSpec": v1beta1.TaskRunSpec{ - Params: v1beta1.Params{ - {Name: "IMAGE", Value: v1beta1.ParamValue{Type: "string", StringVal: "test.io/test/image"}}, - {Name: "CHAINS-GIT_COMMIT", Value: v1beta1.ParamValue{Type: "string", StringVal: "taskrun"}}, - {Name: "CHAINS-GIT_URL", Value: v1beta1.ParamValue{Type: "string", StringVal: "https://git.test.com"}}, + "runSpec": v1.TaskRunSpec{ + Params: v1.Params{ + {Name: "IMAGE", Value: v1.ParamValue{Type: "string", StringVal: "test.io/test/image"}}, + {Name: "CHAINS-GIT_COMMIT", Value: v1.ParamValue{Type: "string", StringVal: "taskrun"}}, + {Name: "CHAINS-GIT_URL", Value: v1.ParamValue{Type: "string", StringVal: "https://git.test.com"}}, }, ServiceAccountName: "default", - TaskRef: &v1beta1.TaskRef{Name: "build", Kind: "Task"}, + TaskRef: &v1.TaskRef{Name: "build", Kind: "Task"}, }, } diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go index 844588fb54..cffe9626d5 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters.go @@ -18,14 +18,13 @@ package internalparameters import ( "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) // SLSAInternalParameters provides the chains config as internalparameters func SLSAInternalParameters(tko objects.TektonObject) map[string]any { internalParams := make(map[string]any) - if provenance := tko.GetProvenance(); provenance != (*v1beta1.Provenance)(nil) && provenance.FeatureFlags != nil { - internalParams["tekton-pipelines-feature-flags"] = *provenance.FeatureFlags + if provenance := tko.GetProvenance(); !provenance.IsNil() && !provenance.FeatureFlagsIsNil() { + internalParams["tekton-pipelines-feature-flags"] = *provenance.GetFeatureFlags() } return internalParams } @@ -33,8 +32,8 @@ func SLSAInternalParameters(tko objects.TektonObject) map[string]any { // TektonInternalParameters provides the chains config as well as annotations and labels func TektonInternalParameters(tko objects.TektonObject) map[string]any { internalParams := make(map[string]any) - if provenance := tko.GetProvenance(); provenance != (*v1beta1.Provenance)(nil) && provenance.FeatureFlags != nil { - internalParams["tekton-pipelines-feature-flags"] = *provenance.FeatureFlags + if provenance := tko.GetProvenance(); !provenance.IsNil() && !provenance.FeatureFlagsIsNil() { + internalParams["tekton-pipelines-feature-flags"] = *provenance.GetFeatureFlags() } internalParams["labels"] = tko.GetLabels() internalParams["annotations"] = tko.GetAnnotations() diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go index 95cbee3cc0..122b01da3b 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/internal_parameters/internal_parameters_test.go @@ -30,7 +30,7 @@ func TestTektonInternalParameters(t *testing.T) { if err != nil { t.Fatal(err) } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1(tr) got := TektonInternalParameters(tro) want := map[string]any{ "labels": tro.GetLabels(), @@ -48,7 +48,7 @@ func TestSLSAInternalParameters(t *testing.T) { if err != nil { t.Fatal(err) } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1(tr) got := SLSAInternalParameters(tro) want := map[string]any{ "tekton-pipelines-feature-flags": config.FeatureFlags{EnableAPIFields: "beta", ResultExtractionMethod: "termination-message"}, diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go index cab493d5f6..89df412b83 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun.go @@ -36,7 +36,7 @@ const ( ) // GenerateAttestation generates a provenance statement with SLSA v1.0 predicate for a pipeline run. -func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig) (interface{}, error) { bp, err := byproducts(pro) if err != nil { return nil, err @@ -67,7 +67,7 @@ func GenerateAttestation(ctx context.Context, pro *objects.PipelineRunObject, sl return att, nil } -func metadata(pro *objects.PipelineRunObject) slsa.BuildMetadata { +func metadata(pro *objects.PipelineRunObjectV1) slsa.BuildMetadata { m := slsa.BuildMetadata{ InvocationID: string(pro.ObjectMeta.UID), } @@ -83,9 +83,9 @@ func metadata(pro *objects.PipelineRunObject) slsa.BuildMetadata { } // byproducts contains the pipelineRunResults -func byproducts(pro *objects.PipelineRunObject) ([]slsa.ResourceDescriptor, error) { +func byproducts(pro *objects.PipelineRunObjectV1) ([]slsa.ResourceDescriptor, error) { byProd := []slsa.ResourceDescriptor{} - for _, key := range pro.Status.PipelineResults { + for _, key := range pro.Status.Results { content, err := json.Marshal(key.Value) if err != nil { return nil, err @@ -101,7 +101,7 @@ func byproducts(pro *objects.PipelineRunObject) ([]slsa.ResourceDescriptor, erro } // getBuildDefinition get the buildDefinition based on the configured buildType. This will default to the slsa buildType -func getBuildDefinition(ctx context.Context, slsaconfig *slsaconfig.SlsaConfig, pro *objects.PipelineRunObject) (slsa.ProvenanceBuildDefinition, error) { +func getBuildDefinition(ctx context.Context, slsaconfig *slsaconfig.SlsaConfig, pro *objects.PipelineRunObjectV1) (slsa.ProvenanceBuildDefinition, error) { // if buildType is not set in the chains-config, default to slsa build type buildDefinitionType := slsaconfig.BuildType if slsaconfig.BuildType == "" { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go index 34a12edc48..6dba281c87 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/pipelinerun/pipelinerun_test.go @@ -34,14 +34,14 @@ import ( resolveddependencies "github.com/tektoncd/chains/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" ) func TestMetadata(t *testing.T) { - pr := &v1beta1.PipelineRun{ //nolint:staticcheck - ObjectMeta: v1.ObjectMeta{ + pr := &v1.PipelineRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ @@ -49,10 +49,10 @@ func TestMetadata(t *testing.T) { }, UID: "abhhf-12354-asjsdbjs23-3435353n", }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, } @@ -63,7 +63,7 @@ func TestMetadata(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewPipelineRunObject(pr)) + got := metadata(objects.NewPipelineRunObjectV1(pr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -71,8 +71,8 @@ func TestMetadata(t *testing.T) { func TestMetadataInTimeZone(t *testing.T) { tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) - pr := &v1beta1.PipelineRun{ //nolint:staticcheck - ObjectMeta: v1.ObjectMeta{ + pr := &v1.PipelineRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ @@ -80,10 +80,10 @@ func TestMetadataInTimeZone(t *testing.T) { }, UID: "abhhf-12354-asjsdbjs23-3435353n", }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, }, }, } @@ -94,18 +94,18 @@ func TestMetadataInTimeZone(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewPipelineRunObject(pr)) + got := metadata(objects.NewPipelineRunObjectV1(pr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } } func TestByProducts(t *testing.T) { - resultValue := v1beta1.ResultValue{Type: "string", StringVal: "result-value"} - pr := &v1beta1.PipelineRun{ //nolint:staticcheck - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{ + resultValue := v1.ResultValue{Type: "string", StringVal: "result-value"} + pr := &v1.PipelineRun{ //nolint:staticcheck + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{ { Name: "result-name", Value: resultValue, @@ -126,7 +126,7 @@ func TestByProducts(t *testing.T) { MediaType: JsonMediaType, }, } - got, err := byproducts(objects.NewPipelineRunObject(pr)) + got, err := byproducts(objects.NewPipelineRunObjectV1(pr)) if err != nil { t.Fatalf("Could not extract byproducts: %s", err) } @@ -135,7 +135,7 @@ func TestByProducts(t *testing.T) { } } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { pr, err := objectloader.PipelineRunFromFile(path) if err != nil { panic(err) @@ -148,7 +148,7 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -275,7 +275,7 @@ func TestGenerateAttestation(t *testing.T) { } } -func getResolvedDependencies(addTasks func(*objects.TaskRunObject) (*v1resourcedescriptor.ResourceDescriptor, error)) []v1resourcedescriptor.ResourceDescriptor { //nolint:staticcheck +func getResolvedDependencies(addTasks func(*objects.TaskRunObjectV1) (*v1resourcedescriptor.ResourceDescriptor, error)) []v1resourcedescriptor.ResourceDescriptor { //nolint:staticcheck pr := createPro("../../../testdata/v2alpha2/pipelinerun1.json") rd, err := resolveddependencies.PipelineRun(context.Background(), pr, &slsaconfig.SlsaConfig{DeepInspectionEnabled: false}, addTasks) if err != nil { @@ -294,7 +294,7 @@ func TestGetBuildDefinition(t *testing.T) { } tests := []struct { name string - taskContent func(*objects.TaskRunObject) (*v1resourcedescriptor.ResourceDescriptor, error) //nolint:staticcheck + taskContent func(*objects.TaskRunObjectV1) (*v1resourcedescriptor.ResourceDescriptor, error) //nolint:staticcheck config *slsaconfig.SlsaConfig want slsa.ProvenanceBuildDefinition }{ diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go index 54fb4e1454..c464a950c4 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies.go @@ -19,12 +19,15 @@ package resolveddependencies import ( "context" "encoding/json" + "fmt" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/material" "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" + v1pipeline "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "go.uber.org/zap" "knative.dev/pkg/logging" ) @@ -44,11 +47,11 @@ const ( // used to toggle the fields in resolvedDependencies. see AddTektonTaskDescriptor // and AddSLSATaskDescriptor -type addTaskDescriptorContent func(*objects.TaskRunObject) (*v1.ResourceDescriptor, error) //nolint:staticcheck +type addTaskDescriptorContent func(*objects.TaskRunObjectV1) (*v1.ResourceDescriptor, error) //nolint:staticcheck // the more verbose resolved dependency content. this adds the name, uri, digest // and content if possible. -func AddTektonTaskDescriptor(tr *objects.TaskRunObject) (*v1.ResourceDescriptor, error) { //nolint:staticcheck +func AddTektonTaskDescriptor(tr *objects.TaskRunObjectV1) (*v1.ResourceDescriptor, error) { //nolint:staticcheck rd := v1.ResourceDescriptor{} storedTr, err := json.Marshal(tr) if err != nil { @@ -67,7 +70,7 @@ func AddTektonTaskDescriptor(tr *objects.TaskRunObject) (*v1.ResourceDescriptor, // resolved dependency content for the more generic slsa verifiers. just logs // the name, uri and digest. -func AddSLSATaskDescriptor(tr *objects.TaskRunObject) (*v1.ResourceDescriptor, error) { //nolint:staticcheck +func AddSLSATaskDescriptor(tr *objects.TaskRunObjectV1) (*v1.ResourceDescriptor, error) { //nolint:staticcheck if tr.Status.Provenance != nil && tr.Status.Provenance.RefSource != nil { return &v1.ResourceDescriptor{ Name: pipelineTaskConfigName, @@ -131,7 +134,7 @@ func removeDuplicateResolvedDependencies(resolvedDependencies []v1.ResourceDescr // fromPipelineTask adds the resolved dependencies from pipeline tasks // such as pipeline task uri/digest for remote pipeline tasks and step and sidecar images. -func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObject, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { +func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObjectV1, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { pSpec := pro.Status.PipelineSpec resolvedDependencies := []v1.ResourceDescriptor{} if pSpec != nil { @@ -177,30 +180,48 @@ func fromPipelineTask(logger *zap.SugaredLogger, pro *objects.PipelineRunObject, } // taskDependencies gather all dependencies in a task and adds them to resolvedDependencies -func taskDependencies(ctx context.Context, tr *objects.TaskRunObject) ([]v1.ResourceDescriptor, error) { +func taskDependencies(ctx context.Context, tro *objects.TaskRunObjectV1) ([]v1.ResourceDescriptor, error) { var resolvedDependencies []v1.ResourceDescriptor var err error mats := []common.ProvenanceMaterial{} // add step and sidecar images - stepMaterials, err := material.FromStepImages(tr) + stepMaterials, err := material.FromStepImages(tro) mats = append(mats, stepMaterials...) if err != nil { return nil, err } - sidecarMaterials, err := material.FromSidecarImages(tr) + sidecarMaterials, err := material.FromSidecarImages(tro) if err != nil { return nil, err } mats = append(mats, sidecarMaterials...) resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, "")...) - mats = material.FromTaskParamsAndResults(ctx, tr) + mats = material.FromTaskParamsAndResults(ctx, tro) // convert materials to resolved dependencies resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, inputResultName)...) // add task resources - mats = material.FromTaskResources(ctx, tr) + // ===== + // convert to v1beta1 and add any task resources + serializedResources := tro.Annotations["tekton.dev/v1beta1-spec-resources"] + var resources v1beta1.TaskRunResources //nolint:staticcheck + shouldReplace := false + if err := json.Unmarshal([]byte(serializedResources), &resources); err == nil { + shouldReplace = true + + } + trV1Beta1 := &v1beta1.TaskRun{} //nolint:staticcheck + fmt.Printf("%v", tro.GetObject().(*v1pipeline.TaskRun)) + if err := trV1Beta1.ConvertFrom(ctx, tro.GetObject().(*v1pipeline.TaskRun)); err == nil { + if shouldReplace { + trV1Beta1.Spec.Resources = &resources //nolint:staticcheck + } + mats = material.FromTaskResources(ctx, trV1Beta1) + + } + // convert materials to resolved dependencies resolvedDependencies = append(resolvedDependencies, convertMaterialsToResolvedDependencies(mats, pipelineResourceName)...) @@ -214,7 +235,7 @@ func taskDependencies(ctx context.Context, tr *objects.TaskRunObject) ([]v1.Reso } // TaskRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a taskrun such as source code repo and step&sidecar base images. -func TaskRun(ctx context.Context, tro *objects.TaskRunObject) ([]v1.ResourceDescriptor, error) { +func TaskRun(ctx context.Context, tro *objects.TaskRunObjectV1) ([]v1.ResourceDescriptor, error) { var resolvedDependencies []v1.ResourceDescriptor var err error @@ -238,7 +259,7 @@ func TaskRun(ctx context.Context, tro *objects.TaskRunObject) ([]v1.ResourceDesc } // PipelineRun constructs `predicate.resolvedDependencies` section by collecting all the artifacts that influence a pipeline run such as source code repo and step&sidecar base images. -func PipelineRun(ctx context.Context, pro *objects.PipelineRunObject, slsaconfig *slsaconfig.SlsaConfig, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { +func PipelineRun(ctx context.Context, pro *objects.PipelineRunObjectV1, slsaconfig *slsaconfig.SlsaConfig, addTasks addTaskDescriptorContent) ([]v1.ResourceDescriptor, error) { var err error var resolvedDependencies []v1.ResourceDescriptor logger := logging.FromContext(ctx) diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go index d8013bdf07..1e39485fd7 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/resolved_dependencies/resolved_dependencies_test.go @@ -23,7 +23,6 @@ import ( "github.com/google/go-cmp/cmp" "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" - v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" v1slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" "github.com/tektoncd/chains/internal/backport" "github.com/tektoncd/chains/pkg/artifacts" @@ -31,6 +30,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats/slsa/internal/slsaconfig" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/apis/resource/v1alpha1" logtesting "knative.dev/pkg/logging/testing" @@ -38,15 +38,15 @@ import ( const digest = "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7" -var pro *objects.PipelineRunObject -var proStructuredResults *objects.PipelineRunObject +var pro *objects.PipelineRunObjectV1 +var proStructuredResults *objects.PipelineRunObjectV1 func init() { pro = createPro("../../../testdata/v2alpha2/pipelinerun1.json") proStructuredResults = createPro("../../../testdata/v2alpha2/pipelinerun_structured_results.json") } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { var err error pr, err := objectloader.PipelineRunFromFile(path) if err != nil { @@ -60,7 +60,7 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p @@ -95,11 +95,11 @@ func tektonTaskRuns() map[string][]byte { func TestRemoveDuplicates(t *testing.T) { tests := []struct { name string - rds []v1.ResourceDescriptor - want []v1.ResourceDescriptor + rds []v1slsa.ResourceDescriptor + want []v1slsa.ResourceDescriptor }{{ name: "no duplicate resolvedDependencies", - rds: []v1.ResourceDescriptor{ + rds: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -117,7 +117,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, }, - want: []v1.ResourceDescriptor{ + want: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -137,7 +137,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, { name: "same uri and digest", - rds: []v1.ResourceDescriptor{ + rds: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -150,7 +150,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, }, - want: []v1.ResourceDescriptor{ + want: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -160,7 +160,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, { name: "same uri but different digest", - rds: []v1.ResourceDescriptor{ + rds: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -173,7 +173,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, }, - want: []v1.ResourceDescriptor{ + want: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -188,7 +188,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, { name: "same uri but different digest, swap order", - rds: []v1.ResourceDescriptor{ + rds: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -201,7 +201,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, }, - want: []v1.ResourceDescriptor{ + want: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -216,7 +216,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, { name: "task config must be present", - rds: []v1.ResourceDescriptor{ + rds: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -235,7 +235,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, }, - want: []v1.ResourceDescriptor{ + want: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -256,7 +256,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, { name: "pipeline config must be present", - rds: []v1.ResourceDescriptor{ + rds: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -275,7 +275,7 @@ func TestRemoveDuplicates(t *testing.T) { }, }, }, - want: []v1.ResourceDescriptor{ + want: []v1slsa.ResourceDescriptor{ { URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", Digest: common.DigestSet{ @@ -310,198 +310,223 @@ func TestRemoveDuplicates(t *testing.T) { func TestTaskRun(t *testing.T) { tests := []struct { - name string - taskRun *v1beta1.TaskRun //nolint:staticcheck - want []v1.ResourceDescriptor - }{{ - name: "resolvedDependencies from pipeline resources", - taskRun: &v1beta1.TaskRun{ //nolint:staticcheck - Spec: v1beta1.TaskRunSpec{ - Resources: &v1beta1.TaskRunResources{ //nolint:all //incompatible with pipelines v0.45 - Inputs: []v1beta1.TaskResourceBinding{ //nolint:all //incompatible with pipelines v0.45 - { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 - Name: "nil-resource-spec", - }, - }, { - PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 - Name: "repo", - ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:all //incompatible with pipelines v0.45 - Params: []v1alpha1.ResourceParam{ //nolint:all //incompatible with pipelines v0.45 - {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + name string + obj objects.TektonObject //nolint:staticcheck + want []v1slsa.ResourceDescriptor + }{ + { + name: "resolvedDependencies from pipeline resources", + obj: objects.NewTaskRunObjectV1Beta1(&v1beta1.TaskRun{ //nolint:staticcheck + Spec: v1beta1.TaskRunSpec{ + Resources: &v1beta1.TaskRunResources{ //nolint:all //incompatible with pipelines v0.45 + Inputs: []v1beta1.TaskResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + Name: "nil-resource-spec", + }, + }, { + PipelineResourceBinding: v1beta1.PipelineResourceBinding{ //nolint:all //incompatible with pipelines v0.45 + Name: "repo", + ResourceSpec: &v1alpha1.PipelineResourceSpec{ //nolint:all //incompatible with pipelines v0.45 + Params: []v1alpha1.ResourceParam{ //nolint:all //incompatible with pipelines v0.45 + {Name: "url", Value: "https://github.com/GoogleContainerTools/distroless"}, + }, + Type: backport.PipelineResourceTypeGit, }, - Type: backport.PipelineResourceTypeGit, }, }, }, }, }, - }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - { - Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, - Value: *v1beta1.NewObject(map[string]string{ - "uri": "gcr.io/foo/bar", - "digest": digest, - }), + Status: v1beta1.TaskRunStatus{ + TaskRunStatusFields: v1beta1.TaskRunStatusFields{ + TaskRunResults: []v1beta1.TaskRunResult{ + { + Name: "img1_input" + "-" + artifacts.ArtifactsInputsResultName, + Value: *v1beta1.NewObject(map[string]string{ + "uri": "gcr.io/foo/bar", + "digest": digest, + }), + }, }, - }, - ResourcesResult: []v1beta1.PipelineResourceResult{ - { - ResourceName: "repo", - Key: "commit", - Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", - }, { - ResourceName: "repo", - Key: "url", - Value: "https://github.com/GoogleContainerTools/distroless", + ResourcesResult: []v1beta1.PipelineResourceResult{ + { + ResourceName: "repo", + Key: "commit", + Value: "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, { + ResourceName: "repo", + Key: "url", + Value: "https://github.com/GoogleContainerTools/distroless", + }, }, }, }, - }, - }, - want: []v1.ResourceDescriptor{ - { - Name: "inputs/result", - URI: "gcr.io/foo/bar", - Digest: common.DigestSet{ - "sha256": strings.TrimPrefix(digest, "sha256:"), + }), + want: []v1slsa.ResourceDescriptor{ + { + Name: "inputs/result", + URI: "gcr.io/foo/bar", + Digest: common.DigestSet{ + "sha256": strings.TrimPrefix(digest, "sha256:"), + }, }, - }, - { - Name: "pipelineResource", - URI: "git+https://github.com/GoogleContainerTools/distroless.git", - Digest: common.DigestSet{ - "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + { + Name: "pipelineResource", + URI: "git+https://github.com/GoogleContainerTools/distroless.git", + Digest: common.DigestSet{ + "sha1": "50c56a48cfb3a5a80fa36ed91c739bdac8381cbe", + }, }, }, }, - }, { - name: "resolvedDependencies from remote task", - taskRun: &v1beta1.TaskRun{ //nolint:staticcheck - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Provenance: &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ - URI: "git+github.com/something.git", - Digest: map[string]string{ - "sha1": "abcd1234", + { + name: "resolvedDependencies from remote task", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ + URI: "git+github.com/something.git", + Digest: map[string]string{ + "sha1": "abcd1234", + }, }, }, }, }, - }, - }, - want: []v1.ResourceDescriptor{ - { - Name: "task", - URI: "git+github.com/something.git", - Digest: common.DigestSet{ - "sha1": "abcd1234", - }, - }, - }, - }, { - name: "git resolvedDependencies from taskrun params", - taskRun: &v1beta1.TaskRun{ //nolint:staticcheck - Spec: v1beta1.TaskRunSpec{ - Params: []v1beta1.Param{{ - Name: "CHAINS-GIT_COMMIT", - Value: *v1beta1.NewStructuredValues("my-commit"), - }, { - Name: "CHAINS-GIT_URL", - Value: *v1beta1.NewStructuredValues("github.com/something"), - }}, - }, - }, - want: []v1.ResourceDescriptor{ - { - Name: "inputs/result", - URI: "git+github.com/something.git", - Digest: common.DigestSet{ - "sha1": "my-commit", + }), + want: []v1slsa.ResourceDescriptor{ + { + Name: "task", + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "abcd1234", + }, }, }, }, - }, { - name: "resolvedDependencies from step images", - taskRun: &v1beta1.TaskRun{ //nolint:staticcheck - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Steps: []v1beta1.StepState{{ - Name: "git-source-repo-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "git-source-repo-repeat-again-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + { + name: "git resolvedDependencies from taskrun params", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Spec: v1.TaskRunSpec{ + Params: []v1.Param{{ + Name: "CHAINS-GIT_COMMIT", + Value: *v1.NewStructuredValues("my-commit"), }, { - Name: "build", - ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + Name: "CHAINS-GIT_URL", + Value: *v1.NewStructuredValues("github.com/something"), }}, }, + }), + want: []v1slsa.ResourceDescriptor{ + { + Name: "inputs/result", + URI: "git+github.com/something.git", + Digest: common.DigestSet{ + "sha1": "my-commit", + }, + }, }, }, - want: []v1.ResourceDescriptor{ - { - URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", - Digest: common.DigestSet{ - "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + { + name: "resolvedDependencies from step images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + }, }, - }, - { - URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", - Digest: common.DigestSet{ - "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }), + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, }, - }, - }, - }, { - name: "resolvedDependencies from step and sidecar images", - taskRun: &v1beta1.TaskRun{ //nolint:staticcheck - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Steps: []v1beta1.StepState{{ - Name: "git-source-repo-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "git-source-repo-repeat-again-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, { - Name: "build", - ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", - }}, - Sidecars: []v1beta1.SidecarState{{ - Name: "sidecar-jwqcl", - ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", - }}, + { + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, }, }, }, - want: []v1.ResourceDescriptor{ - { - URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", - Digest: common.DigestSet{ - "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", - }, - }, { - URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", - Digest: common.DigestSet{ - "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + { + name: "resolvedDependencies from step and sidecar images", + obj: objects.NewTaskRunObjectV1(&v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Steps: []v1.StepState{{ + Name: "git-source-repo-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "git-source-repo-repeat-again-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init@sha256:b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, { + Name: "build", + ImageID: "gcr.io/cloud-marketplace-containers/google/bazel@sha256:010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }}, + Sidecars: []v1.SidecarState{{ + Name: "sidecar-jwqcl", + ImageID: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init@sha256:a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }}, + }, }, - }, { - URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", - Digest: common.DigestSet{ - "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }), + want: []v1slsa.ResourceDescriptor{ + { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init", + Digest: common.DigestSet{ + "sha256": "b963f6e7a69617db57b685893256f978436277094c21d43b153994acd8a01247", + }, + }, { + URI: "oci://gcr.io/cloud-marketplace-containers/google/bazel", + Digest: common.DigestSet{ + "sha256": "010a1ecd1a8c3610f12039a25b823e3a17bd3e8ae455a53e340dcfdd37a49964", + }, + }, { + URI: "oci://gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/sidecar-git-init", + Digest: common.DigestSet{ + "sha256": "a1234f6e7a69617db57b685893256f978436277094c21d43b153994acd8a09567", + }, }, }, - }, - }} + }} for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { ctx := logtesting.TestContextWithLogger(t) - rd, err := TaskRun(ctx, objects.NewTaskRunObject(tc.taskRun)) + var input *objects.TaskRunObjectV1 + var err error + if obj, ok := tc.obj.(*objects.TaskRunObjectV1); ok { + input = obj + } + + if trV1Beta1, ok := tc.obj.GetObject().(*v1beta1.TaskRun); ok { //nolint:staticcheck + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err == nil { + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + t.Errorf("Error serializing to JSON: %v", err) + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + input = objects.NewTaskRunObjectV1(trV1) + } + } + + rd, err := TaskRun(ctx, input) if err != nil { t.Fatalf("Did not expect an error but got %v", err) } diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go index 9f53d253f0..0041adce46 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun.go @@ -32,7 +32,7 @@ import ( const taskRunResults = "taskRunResults/%s" // GenerateAttestation generates a provenance statement with SLSA v1.0 predicate for a task run. -func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { +func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObjectV1, slsaConfig *slsaconfig.SlsaConfig) (interface{}, error) { bp, err := byproducts(tro) if err != nil { return nil, err @@ -63,7 +63,7 @@ func GenerateAttestation(ctx context.Context, tro *objects.TaskRunObject, slsaCo return att, nil } -func metadata(tro *objects.TaskRunObject) slsa.BuildMetadata { +func metadata(tro *objects.TaskRunObjectV1) slsa.BuildMetadata { m := slsa.BuildMetadata{ InvocationID: string(tro.ObjectMeta.UID), } @@ -79,9 +79,9 @@ func metadata(tro *objects.TaskRunObject) slsa.BuildMetadata { } // byproducts contains the taskRunResults -func byproducts(tro *objects.TaskRunObject) ([]slsa.ResourceDescriptor, error) { +func byproducts(tro *objects.TaskRunObjectV1) ([]slsa.ResourceDescriptor, error) { byProd := []slsa.ResourceDescriptor{} - for _, key := range tro.Status.TaskRunResults { + for _, key := range tro.Status.Results { content, err := json.Marshal(key.Value) if err != nil { return nil, err @@ -97,7 +97,7 @@ func byproducts(tro *objects.TaskRunObject) ([]slsa.ResourceDescriptor, error) { } // getBuildDefinition get the buildDefinition based on the configured buildType. This will default to the slsa buildType -func getBuildDefinition(ctx context.Context, buildType string, tro *objects.TaskRunObject) (slsa.ProvenanceBuildDefinition, error) { +func getBuildDefinition(ctx context.Context, buildType string, tro *objects.TaskRunObjectV1) (slsa.ProvenanceBuildDefinition, error) { // if buildType is not set in the chains-config, default to slsa build type buildDefinitionType := buildType if buildType == "" { diff --git a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go index 731d74a1cd..7b704f5bab 100644 --- a/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/internal/taskrun/taskrun_test.go @@ -36,14 +36,14 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/internal/objectloader" "github.com/tektoncd/pipeline/pkg/apis/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" ) func TestMetadata(t *testing.T) { - tr := &v1beta1.TaskRun{ //nolint:staticcheck - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ @@ -51,10 +51,10 @@ func TestMetadata(t *testing.T) { }, UID: "abhhf-12354-asjsdbjs23-3435353n", }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, time.UTC)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, } @@ -65,7 +65,7 @@ func TestMetadata(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewTaskRunObject(tr)) + got := metadata(objects.NewTaskRunObjectV1(tr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -73,8 +73,8 @@ func TestMetadata(t *testing.T) { func TestMetadataInTimeZone(t *testing.T) { tz := time.FixedZone("Test Time", int((12 * time.Hour).Seconds())) - tr := &v1beta1.TaskRun{ //nolint:staticcheck - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ //nolint:staticcheck + ObjectMeta: metav1.ObjectMeta{ Name: "my-taskrun", Namespace: "my-namespace", Annotations: map[string]string{ @@ -82,10 +82,10 @@ func TestMetadataInTimeZone(t *testing.T) { }, UID: "abhhf-12354-asjsdbjs23-3435353n", }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - StartTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + StartTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 12, tz)}, + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, tz)}, }, }, } @@ -96,18 +96,18 @@ func TestMetadataInTimeZone(t *testing.T) { StartedOn: &start, FinishedOn: &end, } - got := metadata(objects.NewTaskRunObject(tr)) + got := metadata(objects.NewTaskRunObjectV1(tr)) if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } } func TestByProducts(t *testing.T) { - resultValue := v1beta1.ResultValue{Type: "string", StringVal: "result-value"} - tr := &v1beta1.TaskRun{ //nolint:staticcheck - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ + resultValue := v1.ResultValue{Type: "string", StringVal: "result-value"} + tr := &v1.TaskRun{ //nolint:staticcheck + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ { Name: "result-name", Value: resultValue, @@ -128,7 +128,7 @@ func TestByProducts(t *testing.T) { MediaType: pipelinerun.JsonMediaType, }, } - got, err := byproducts(objects.NewTaskRunObject(tr)) + got, err := byproducts(objects.NewTaskRunObjectV1(tr)) if err != nil { t.Fatalf("Could not extract byproducts: %s", err) } @@ -146,12 +146,12 @@ func TestTaskRunGenerateAttestation(t *testing.T) { e1BuildStart := time.Unix(1617011400, 0) e1BuildFinished := time.Unix(1617011415, 0) - resultValue := v1beta1.ResultValue{Type: "string", StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"} + resultValue := v1.ResultValue{Type: "string", StringVal: "sha256:827521c857fdcd4374f4da5442fbae2edb01e7fbae285c3ec15673d4c1daecb7"} resultBytesDigest, err := json.Marshal(resultValue) if err != nil { t.Fatalf("Could not marshal results: %s", err) } - resultValue = v1beta1.ResultValue{Type: "string", StringVal: "gcr.io/my/image"} + resultValue = v1.ResultValue{Type: "string", StringVal: "gcr.io/my/image"} resultBytesUri, err := json.Marshal(resultValue) if err != nil { t.Fatalf("Could not marshal results: %s", err) @@ -225,7 +225,7 @@ func TestTaskRunGenerateAttestation(t *testing.T) { }, } - got, err := GenerateAttestation(ctx, objects.NewTaskRunObject(tr), &slsaconfig.SlsaConfig{ + got, err := GenerateAttestation(ctx, objects.NewTaskRunObjectV1(tr), &slsaconfig.SlsaConfig{ BuilderID: "test_builder-1", BuildType: "https://tekton.dev/chains/v2/slsa", }) @@ -238,7 +238,7 @@ func TestTaskRunGenerateAttestation(t *testing.T) { } } -func getResolvedDependencies(tro *objects.TaskRunObject) []v1resourcedescriptor.ResourceDescriptor { +func getResolvedDependencies(tro *objects.TaskRunObjectV1) []v1resourcedescriptor.ResourceDescriptor { rd, err := resolveddependencies.TaskRun(context.Background(), tro) if err != nil { return []v1resourcedescriptor.ResourceDescriptor{} @@ -259,7 +259,7 @@ func TestGetBuildDefinition(t *testing.T) { "label1": "label1", } - tro := objects.NewTaskRunObject(tr) + tro := objects.NewTaskRunObjectV1(tr) tests := []struct { name string buildType string @@ -322,7 +322,7 @@ func TestUnsupportedBuildType(t *testing.T) { t.Fatal(err) } - got, err := getBuildDefinition(context.Background(), "bad-buildType", objects.NewTaskRunObject(tr)) + got, err := getBuildDefinition(context.Background(), "bad-buildType", objects.NewTaskRunObjectV1(tr)) if err == nil { t.Error("getBuildDefinition(): expected error got nil") } diff --git a/pkg/chains/formats/slsa/v2alpha2/slsav2.go b/pkg/chains/formats/slsa/v2alpha2/slsav2.go index 2368b459c5..1f28585b51 100644 --- a/pkg/chains/formats/slsa/v2alpha2/slsav2.go +++ b/pkg/chains/formats/slsa/v2alpha2/slsav2.go @@ -56,9 +56,9 @@ func (s *Slsa) Wrap() bool { func (s *Slsa) CreatePayload(ctx context.Context, obj interface{}) (interface{}, error) { switch v := obj.(type) { - case *objects.TaskRunObject: + case *objects.TaskRunObjectV1: return taskrun.GenerateAttestation(ctx, v, s.slsaConfig) - case *objects.PipelineRunObject: + case *objects.PipelineRunObjectV1: return pipelinerun.GenerateAttestation(ctx, v, s.slsaConfig) default: return nil, fmt.Errorf("intoto does not support type: %s", v) diff --git a/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go b/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go index 38c0107ca9..9abcdbd11c 100644 --- a/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go +++ b/pkg/chains/formats/slsa/v2alpha2/slsav2_test.go @@ -187,7 +187,7 @@ func TestTaskRunCreatePayload1(t *testing.T) { i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -272,7 +272,7 @@ func TestTaskRunCreatePayload2(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) @@ -352,7 +352,7 @@ func TestMultipleSubjects(t *testing.T) { } i, _ := NewFormatter(cfg) - got, err := i.CreatePayload(ctx, objects.NewTaskRunObject(tr)) + got, err := i.CreatePayload(ctx, objects.NewTaskRunObjectV1(tr)) if err != nil { t.Errorf("unexpected error: %s", err.Error()) } @@ -361,7 +361,7 @@ func TestMultipleSubjects(t *testing.T) { } } -func createPro(path string) *objects.PipelineRunObject { +func createPro(path string) *objects.PipelineRunObjectV1 { pr, err := objectloader.PipelineRunFromFile(path) if err != nil { panic(err) @@ -374,7 +374,7 @@ func createPro(path string) *objects.PipelineRunObject { if err != nil { panic(err) } - p := objects.NewPipelineRunObject(pr) + p := objects.NewPipelineRunObjectV1(pr) p.AppendTaskRun(tr1) p.AppendTaskRun(tr2) return p diff --git a/pkg/chains/objects/objects.go b/pkg/chains/objects/objects.go index d89204af28..13b43c869e 100644 --- a/pkg/chains/objects/objects.go +++ b/pkg/chains/objects/objects.go @@ -19,7 +19,10 @@ import ( "fmt" "strings" + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + "github.com/tektoncd/pipeline/pkg/apis/config" "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -40,14 +43,158 @@ type Object interface { runtime.Object } -// Result is a generic key value store containing the results +// GenericResult is a generic key value store containing the results // of Tekton operations. (eg. PipelineRun and TaskRun results) -type Result struct { +type GenericResult interface { + // GetName returns the name associated with the result. + GetName() string + + // GetStringValue returns the string value of the result. + GetStringValue() string + + // GetObjectValue returns the object value for the specified field. + GetObjectValue(field string) string + + // ObjectValueIsNil checks if the object value is nil. + ObjectValueIsNil() bool +} + +type GenericProvenance interface { + IsNil() bool + RefSourceIsNil() bool + + GetRefSourceURI() string + GetRefSourceDigest() common.DigestSet + GetRefSourceEntrypoint() string + + FeatureFlagsIsNil() bool + GetFeatureFlags() *config.FeatureFlags +} + +// ProvenanceV1 is a struct implementing the GenericProvenance interface. +type ProvenanceV1 struct { + Provenance *v1.Provenance +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1) IsNil() bool { + return p.Provenance == nil +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1) RefSourceIsNil() bool { + return p.Provenance.RefSource == nil +} + +// GetRefSourceURI returns the URI of the reference source. +func (p *ProvenanceV1) GetRefSourceURI() string { + return p.Provenance.RefSource.URI +} + +// GetRefSourceDigest returns the digest set of the reference source. +func (p *ProvenanceV1) GetRefSourceDigest() common.DigestSet { + return p.Provenance.RefSource.Digest +} + +// GetRefSourceEntrypoint returns the entrypoint of the reference source. +func (p *ProvenanceV1) GetRefSourceEntrypoint() string { + return p.Provenance.RefSource.EntryPoint +} + +func (p *ProvenanceV1) FeatureFlagsIsNil() bool { + return p.Provenance.FeatureFlags == nil +} + +func (p *ProvenanceV1) GetFeatureFlags() *config.FeatureFlags { + return p.Provenance.FeatureFlags +} + +// ProvenanceV1Beta1 is a struct implementing the GenericProvenance interface. +type ProvenanceV1Beta1 struct { + Provenance *v1beta1.Provenance +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1Beta1) IsNil() bool { + return p.Provenance == nil +} + +// RefSourceIsNil checks if the reference source is nil. +func (p *ProvenanceV1Beta1) RefSourceIsNil() bool { + return p.Provenance.RefSource == nil +} + +// GetRefSourceURI returns the URI of the reference source. +func (p *ProvenanceV1Beta1) GetRefSourceURI() string { + return p.Provenance.RefSource.URI +} + +// GetRefSourceDigest returns the digest set of the reference source. +func (p *ProvenanceV1Beta1) GetRefSourceDigest() common.DigestSet { + return p.Provenance.RefSource.Digest +} + +// GetRefSourceEntrypoint returns the entrypoint of the reference source. +func (p *ProvenanceV1Beta1) GetRefSourceEntrypoint() string { + return p.Provenance.RefSource.EntryPoint +} + +func (p *ProvenanceV1Beta1) FeatureFlagsIsNil() bool { + return p.Provenance.FeatureFlags == nil +} + +func (p *ProvenanceV1Beta1) GetFeatureFlags() *config.FeatureFlags { + return p.Provenance.FeatureFlags +} + +// ResultV1 is a generic key value store containing the results +// of Tekton operations. (eg. PipelineRun and TaskRun results) +type ResultV1 struct { + Name string + Type v1.ResultsType + Value v1.ParamValue +} + +func (res ResultV1) GetName() string { + return res.Name +} + +func (res ResultV1) GetStringValue() string { + return res.Value.StringVal +} + +func (res ResultV1) GetObjectValue(field string) string { + return res.Value.ObjectVal[field] +} + +func (res ResultV1) ObjectValueIsNil() bool { + return res.Value.ObjectVal == nil +} + +// ResultV1Beta1 is a generic key value store containing the results +// of Tekton operations. (eg. PipelineRun and TaskRun results) +type ResultV1Beta1 struct { Name string Type v1beta1.ResultsType Value v1beta1.ParamValue } +func (res ResultV1Beta1) GetName() string { + return res.Name +} + +func (res ResultV1Beta1) GetStringValue() string { + return res.Value.StringVal +} + +func (res ResultV1Beta1) GetObjectValue(field string) string { + return res.Value.ObjectVal[field] +} + +func (res ResultV1Beta1) ObjectValueIsNil() bool { + return res.Value.ObjectVal == nil +} + // Tekton object is an extended Kubernetes object with operations specific // to Tekton objects. type TektonObject interface { @@ -57,8 +204,8 @@ type TektonObject interface { GetObject() interface{} GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error - GetResults() []Result - GetProvenance() *v1beta1.Provenance + GetResults() []GenericResult + GetProvenance() GenericProvenance GetServiceAccountName() string GetPullSecrets() []string IsDone() bool @@ -66,70 +213,74 @@ type TektonObject interface { SupportsTaskRunArtifact() bool SupportsPipelineRunArtifact() bool SupportsOCIArtifact() bool - GetRemoteProvenance() *v1beta1.Provenance + GetRemoteProvenance() GenericProvenance IsRemote() bool } func NewTektonObject(i interface{}) (TektonObject, error) { switch o := i.(type) { - case *v1beta1.PipelineRun: - return NewPipelineRunObject(o), nil - case *v1beta1.TaskRun: - return NewTaskRunObject(o), nil + case *v1.PipelineRun: + return NewPipelineRunObjectV1(o), nil + case *v1.TaskRun: + return NewTaskRunObjectV1(o), nil + case *v1beta1.PipelineRun: //nolint:staticcheck + return NewPipelineRunObjectV1Beta1(o), nil + case *v1beta1.TaskRun: //nolint:staticcheck + return NewTaskRunObjectV1Beta1(o), nil default: return nil, errors.New("unrecognized type when attempting to create tekton object") } } -// TaskRunObject extends v1beta1.TaskRun with additional functions. -type TaskRunObject struct { - *v1beta1.TaskRun +// TaskRunObjectV1 extends v1.TaskRun with additional functions. +type TaskRunObjectV1 struct { + *v1.TaskRun } -var _ TektonObject = &TaskRunObject{} +var _ TektonObject = &TaskRunObjectV1{} -func NewTaskRunObject(tr *v1beta1.TaskRun) *TaskRunObject { - return &TaskRunObject{ +func NewTaskRunObjectV1(tr *v1.TaskRun) *TaskRunObjectV1 { + return &TaskRunObjectV1{ tr, } } // Get the TaskRun GroupVersionKind -func (tro *TaskRunObject) GetGVK() string { +func (tro *TaskRunObjectV1) GetGVK() string { return fmt.Sprintf("%s/%s", tro.GetGroupVersionKind().GroupVersion().String(), tro.GetGroupVersionKind().Kind) } -func (tro *TaskRunObject) GetKindName() string { +func (tro *TaskRunObjectV1) GetKindName() string { return strings.ToLower(tro.GetGroupVersionKind().Kind) } -func (tro *TaskRunObject) GetProvenance() *v1beta1.Provenance { - return tro.Status.Provenance +func (tro *TaskRunObjectV1) GetProvenance() GenericProvenance { + return &ProvenanceV1{tro.Status.Provenance} } // Get the latest annotations on the TaskRun -func (tro *TaskRunObject) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { - tr, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Get(ctx, tro.Name, metav1.GetOptions{}) +func (tro *TaskRunObjectV1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + tr, err := clientSet.TektonV1().TaskRuns(tro.Namespace).Get(ctx, tro.Name, metav1.GetOptions{}) return tr.Annotations, err } // Get the base TaskRun object -func (tro *TaskRunObject) GetObject() interface{} { +func (tro *TaskRunObjectV1) GetObject() interface{} { return tro.TaskRun } // Patch the original TaskRun object -func (tro *TaskRunObject) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { - _, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Patch( +func (tro *TaskRunObjectV1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1().TaskRuns(tro.Namespace).Patch( ctx, tro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) return err } // Get the TaskRun results -func (tro *TaskRunObject) GetResults() []Result { - res := []Result{} - for _, key := range tro.Status.TaskRunResults { - res = append(res, Result{ +func (tro *TaskRunObjectV1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range tro.Status.Results { + res = append(res, ResultV1{ Name: key.Name, Value: key.Value, }) @@ -137,7 +288,7 @@ func (tro *TaskRunObject) GetResults() []Result { return res } -func (tro *TaskRunObject) GetStepImages() []string { +func (tro *TaskRunObjectV1) GetStepImages() []string { images := []string{} for _, stepState := range tro.Status.Steps { images = append(images, stepState.ImageID) @@ -145,7 +296,7 @@ func (tro *TaskRunObject) GetStepImages() []string { return images } -func (tro *TaskRunObject) GetSidecarImages() []string { +func (tro *TaskRunObjectV1) GetSidecarImages() []string { images := []string{} for _, sidecarState := range tro.Status.Sidecars { images = append(images, sidecarState.ImageID) @@ -154,35 +305,35 @@ func (tro *TaskRunObject) GetSidecarImages() []string { } // Get the ServiceAccount declared in the TaskRun -func (tro *TaskRunObject) GetServiceAccountName() string { +func (tro *TaskRunObjectV1) GetServiceAccountName() string { return tro.Spec.ServiceAccountName } // Get the imgPullSecrets from the pod template -func (tro *TaskRunObject) GetPullSecrets() []string { +func (tro *TaskRunObjectV1) GetPullSecrets() []string { return getPodPullSecrets(tro.Spec.PodTemplate) } -func (tro *TaskRunObject) SupportsTaskRunArtifact() bool { +func (tro *TaskRunObjectV1) SupportsTaskRunArtifact() bool { return true } -func (tro *TaskRunObject) SupportsPipelineRunArtifact() bool { +func (tro *TaskRunObjectV1) SupportsPipelineRunArtifact() bool { return false } -func (tro *TaskRunObject) SupportsOCIArtifact() bool { +func (tro *TaskRunObjectV1) SupportsOCIArtifact() bool { return true } -func (tro *TaskRunObject) GetRemoteProvenance() *v1beta1.Provenance { +func (tro *TaskRunObjectV1) GetRemoteProvenance() GenericProvenance { if t := tro.Status.Provenance; t != nil && t.RefSource != nil && tro.IsRemote() { - return tro.Status.Provenance + return &ProvenanceV1{tro.Status.Provenance} } return nil } -func (tro *TaskRunObject) IsRemote() bool { +func (tro *TaskRunObjectV1) IsRemote() bool { isRemoteTask := false if tro.Spec.TaskRef != nil { if tro.Spec.TaskRef.Resolver != "" && tro.Spec.TaskRef.Resolver != "Cluster" { @@ -192,58 +343,58 @@ func (tro *TaskRunObject) IsRemote() bool { return isRemoteTask } -// PipelineRunObject extends v1beta1.PipelineRun with additional functions. -type PipelineRunObject struct { +// PipelineRunObjectV1 extends v1.PipelineRun with additional functions. +type PipelineRunObjectV1 struct { // The base PipelineRun - *v1beta1.PipelineRun + *v1.PipelineRun // taskRuns that were apart of this PipelineRun - taskRuns []*v1beta1.TaskRun + taskRuns []*v1.TaskRun } -var _ TektonObject = &PipelineRunObject{} +var _ TektonObject = &PipelineRunObjectV1{} -func NewPipelineRunObject(pr *v1beta1.PipelineRun) *PipelineRunObject { - return &PipelineRunObject{ +func NewPipelineRunObjectV1(pr *v1.PipelineRun) *PipelineRunObjectV1 { + return &PipelineRunObjectV1{ PipelineRun: pr, } } // Get the PipelineRun GroupVersionKind -func (pro *PipelineRunObject) GetGVK() string { +func (pro *PipelineRunObjectV1) GetGVK() string { return fmt.Sprintf("%s/%s", pro.GetGroupVersionKind().GroupVersion().String(), pro.GetGroupVersionKind().Kind) } -func (pro *PipelineRunObject) GetKindName() string { +func (pro *PipelineRunObjectV1) GetKindName() string { return strings.ToLower(pro.GetGroupVersionKind().Kind) } // Request the current annotations on the PipelineRun object -func (pro *PipelineRunObject) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { - pr, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Get(ctx, pro.Name, metav1.GetOptions{}) +func (pro *PipelineRunObjectV1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + pr, err := clientSet.TektonV1().PipelineRuns(pro.Namespace).Get(ctx, pro.Name, metav1.GetOptions{}) return pr.Annotations, err } // Get the base PipelineRun -func (pro *PipelineRunObject) GetObject() interface{} { +func (pro *PipelineRunObjectV1) GetObject() interface{} { return pro.PipelineRun } // Patch the original PipelineRun object -func (pro *PipelineRunObject) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { - _, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Patch( +func (pro *PipelineRunObjectV1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1().PipelineRuns(pro.Namespace).Patch( ctx, pro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) return err } -func (pro *PipelineRunObject) GetProvenance() *v1beta1.Provenance { - return pro.Status.Provenance +func (pro *PipelineRunObjectV1) GetProvenance() GenericProvenance { + return &ProvenanceV1{pro.Status.Provenance} } // Get the resolved Pipelinerun results -func (pro *PipelineRunObject) GetResults() []Result { - res := []Result{} - for _, key := range pro.Status.PipelineResults { - res = append(res, Result{ +func (pro *PipelineRunObjectV1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range pro.Status.Results { + res = append(res, ResultV1{ Name: key.Name, Value: key.Value, }) @@ -252,56 +403,56 @@ func (pro *PipelineRunObject) GetResults() []Result { } // Get the ServiceAccount declared in the PipelineRun -func (pro *PipelineRunObject) GetServiceAccountName() string { - return pro.Spec.ServiceAccountName +func (pro *PipelineRunObjectV1) GetServiceAccountName() string { + return pro.Spec.TaskRunTemplate.ServiceAccountName } // Get the ServiceAccount declared in the PipelineRun -func (pro *PipelineRunObject) IsSuccessful() bool { +func (pro *PipelineRunObjectV1) IsSuccessful() bool { return pro.Status.GetCondition(apis.ConditionSucceeded).IsTrue() } // Append TaskRuns to this PipelineRun -func (pro *PipelineRunObject) AppendTaskRun(tr *v1beta1.TaskRun) { +func (pro *PipelineRunObjectV1) AppendTaskRun(tr *v1.TaskRun) { pro.taskRuns = append(pro.taskRuns, tr) } // Get the associated TaskRun via the Task name -func (pro *PipelineRunObject) GetTaskRunFromTask(taskName string) *TaskRunObject { +func (pro *PipelineRunObjectV1) GetTaskRunFromTask(taskName string) *TaskRunObjectV1 { for _, tr := range pro.taskRuns { val, ok := tr.Labels[PipelineTaskLabel] if ok && val == taskName { - return NewTaskRunObject(tr) + return NewTaskRunObjectV1(tr) } } return nil } // Get the imgPullSecrets from the pod template -func (pro *PipelineRunObject) GetPullSecrets() []string { - return getPodPullSecrets(pro.Spec.PodTemplate) +func (pro *PipelineRunObjectV1) GetPullSecrets() []string { + return getPodPullSecrets(pro.Spec.TaskRunTemplate.PodTemplate) } -func (pro *PipelineRunObject) SupportsTaskRunArtifact() bool { +func (pro *PipelineRunObjectV1) SupportsTaskRunArtifact() bool { return false } -func (pro *PipelineRunObject) SupportsPipelineRunArtifact() bool { +func (pro *PipelineRunObjectV1) SupportsPipelineRunArtifact() bool { return true } -func (pro *PipelineRunObject) SupportsOCIArtifact() bool { +func (pro *PipelineRunObjectV1) SupportsOCIArtifact() bool { return false } -func (pro *PipelineRunObject) GetRemoteProvenance() *v1beta1.Provenance { +func (pro *PipelineRunObjectV1) GetRemoteProvenance() GenericProvenance { if p := pro.Status.Provenance; p != nil && p.RefSource != nil && pro.IsRemote() { - return pro.Status.Provenance + return &ProvenanceV1{pro.Status.Provenance} } return nil } -func (pro *PipelineRunObject) IsRemote() bool { +func (pro *PipelineRunObjectV1) IsRemote() bool { isRemotePipeline := false if pro.Spec.PipelineRef != nil { if pro.Spec.PipelineRef.Resolver != "" && pro.Spec.PipelineRef.Resolver != "Cluster" { @@ -321,3 +472,233 @@ func getPodPullSecrets(podTemplate *pod.Template) []string { } return imgPullSecrets } + +// PipelineRunObjectV1Beta1 extends v1.PipelineRun with additional functions. +type PipelineRunObjectV1Beta1 struct { + // The base PipelineRun + *v1beta1.PipelineRun + // taskRuns that were apart of this PipelineRun + taskRuns []*v1beta1.TaskRun //nolint:staticcheck +} + +var _ TektonObject = &PipelineRunObjectV1Beta1{} + +func NewPipelineRunObjectV1Beta1(pr *v1beta1.PipelineRun) *PipelineRunObjectV1Beta1 { //nolint:staticcheck + return &PipelineRunObjectV1Beta1{ + PipelineRun: pr, + } +} + +// Get the PipelineRun GroupVersionKind +func (pro *PipelineRunObjectV1Beta1) GetGVK() string { + return fmt.Sprintf("%s/%s", pro.GetGroupVersionKind().GroupVersion().String(), pro.GetGroupVersionKind().Kind) +} + +func (pro *PipelineRunObjectV1Beta1) GetKindName() string { + return strings.ToLower(pro.GetGroupVersionKind().Kind) +} + +// Request the current annotations on the PipelineRun object +func (pro *PipelineRunObjectV1Beta1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + pr, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Get(ctx, pro.Name, metav1.GetOptions{}) + return pr.Annotations, err +} + +// Get the base PipelineRun +func (pro *PipelineRunObjectV1Beta1) GetObject() interface{} { + return pro.PipelineRun +} + +// Patch the original PipelineRun object +func (pro *PipelineRunObjectV1Beta1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1beta1().PipelineRuns(pro.Namespace).Patch( + ctx, pro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) + return err +} + +func (pro *PipelineRunObjectV1Beta1) GetProvenance() GenericProvenance { + return &ProvenanceV1Beta1{pro.Status.Provenance} +} + +// Get the resolved Pipelinerun results +func (pro *PipelineRunObjectV1Beta1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range pro.Status.PipelineResults { + res = append(res, ResultV1Beta1{ + Name: key.Name, + Value: key.Value, + }) + } + return res +} + +// Get the ServiceAccount declared in the PipelineRun +func (pro *PipelineRunObjectV1Beta1) GetServiceAccountName() string { + return pro.Spec.ServiceAccountName +} + +// Get the ServiceAccount declared in the PipelineRun +func (pro *PipelineRunObjectV1Beta1) IsSuccessful() bool { + return pro.Status.GetCondition(apis.ConditionSucceeded).IsTrue() +} + +// Append TaskRuns to this PipelineRun +func (pro *PipelineRunObjectV1Beta1) AppendTaskRun(tr *v1beta1.TaskRun) { //nolint:staticcheck + pro.taskRuns = append(pro.taskRuns, tr) +} + +// Get the associated TaskRun via the Task name +func (pro *PipelineRunObjectV1Beta1) GetTaskRunFromTask(taskName string) *TaskRunObjectV1Beta1 { + for _, tr := range pro.taskRuns { + val, ok := tr.Labels[PipelineTaskLabel] + if ok && val == taskName { + return NewTaskRunObjectV1Beta1(tr) + } + } + return nil +} + +// Get the imgPullSecrets from the pod template +func (pro *PipelineRunObjectV1Beta1) GetPullSecrets() []string { + return getPodPullSecrets(pro.Spec.PodTemplate) +} + +func (pro *PipelineRunObjectV1Beta1) SupportsTaskRunArtifact() bool { + return false +} + +func (pro *PipelineRunObjectV1Beta1) SupportsPipelineRunArtifact() bool { + return true +} + +func (pro *PipelineRunObjectV1Beta1) SupportsOCIArtifact() bool { + return false +} + +func (pro *PipelineRunObjectV1Beta1) GetRemoteProvenance() GenericProvenance { + if p := pro.Status.Provenance; p != nil && p.RefSource != nil && pro.IsRemote() { + return &ProvenanceV1Beta1{pro.Status.Provenance} + } + return nil +} + +func (pro *PipelineRunObjectV1Beta1) IsRemote() bool { + isRemotePipeline := false + if pro.Spec.PipelineRef != nil { + if pro.Spec.PipelineRef.Resolver != "" && pro.Spec.PipelineRef.Resolver != "Cluster" { + isRemotePipeline = true + } + } + return isRemotePipeline +} + +// TaskRunObjectV1Beta1 extends v1beta1.TaskRun with additional functions. +type TaskRunObjectV1Beta1 struct { + *v1beta1.TaskRun +} + +var _ TektonObject = &TaskRunObjectV1Beta1{} + +func NewTaskRunObjectV1Beta1(tr *v1beta1.TaskRun) *TaskRunObjectV1Beta1 { //nolint:staticcheck + return &TaskRunObjectV1Beta1{ + tr, + } +} + +// Get the TaskRun GroupVersionKind +func (tro *TaskRunObjectV1Beta1) GetGVK() string { + return fmt.Sprintf("%s/%s", tro.GetGroupVersionKind().GroupVersion().String(), tro.GetGroupVersionKind().Kind) +} + +func (tro *TaskRunObjectV1Beta1) GetKindName() string { + return strings.ToLower(tro.GetGroupVersionKind().Kind) +} + +func (tro *TaskRunObjectV1Beta1) GetProvenance() GenericProvenance { + return &ProvenanceV1Beta1{tro.Status.Provenance} +} + +// Get the latest annotations on the TaskRun +func (tro *TaskRunObjectV1Beta1) GetLatestAnnotations(ctx context.Context, clientSet versioned.Interface) (map[string]string, error) { + tr, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Get(ctx, tro.Name, metav1.GetOptions{}) + return tr.Annotations, err +} + +// Get the base TaskRun object +func (tro *TaskRunObjectV1Beta1) GetObject() interface{} { + return tro.TaskRun +} + +// Patch the original TaskRun object +func (tro *TaskRunObjectV1Beta1) Patch(ctx context.Context, clientSet versioned.Interface, patchBytes []byte) error { + _, err := clientSet.TektonV1beta1().TaskRuns(tro.Namespace).Patch( + ctx, tro.Name, types.MergePatchType, patchBytes, metav1.PatchOptions{}) + return err +} + +// Get the TaskRun results +func (tro *TaskRunObjectV1Beta1) GetResults() []GenericResult { + res := []GenericResult{} + for _, key := range tro.Status.TaskRunResults { + res = append(res, ResultV1Beta1{ + Name: key.Name, + Value: key.Value, + }) + } + return res +} + +func (tro *TaskRunObjectV1Beta1) GetStepImages() []string { + images := []string{} + for _, stepState := range tro.Status.Steps { + images = append(images, stepState.ImageID) + } + return images +} + +func (tro *TaskRunObjectV1Beta1) GetSidecarImages() []string { + images := []string{} + for _, sidecarState := range tro.Status.Sidecars { + images = append(images, sidecarState.ImageID) + } + return images +} + +// Get the ServiceAccount declared in the TaskRun +func (tro *TaskRunObjectV1Beta1) GetServiceAccountName() string { + return tro.Spec.ServiceAccountName +} + +// Get the imgPullSecrets from the pod template +func (tro *TaskRunObjectV1Beta1) GetPullSecrets() []string { + return getPodPullSecrets(tro.Spec.PodTemplate) +} + +func (tro *TaskRunObjectV1Beta1) SupportsTaskRunArtifact() bool { + return true +} + +func (tro *TaskRunObjectV1Beta1) SupportsPipelineRunArtifact() bool { + return false +} + +func (tro *TaskRunObjectV1Beta1) SupportsOCIArtifact() bool { + return true +} + +func (tro *TaskRunObjectV1Beta1) GetRemoteProvenance() GenericProvenance { + if t := tro.Status.Provenance; t != nil && t.RefSource != nil && tro.IsRemote() { + return &ProvenanceV1Beta1{tro.Status.Provenance} + } + return nil +} + +func (tro *TaskRunObjectV1Beta1) IsRemote() bool { + isRemoteTask := false + if tro.Spec.TaskRef != nil { + if tro.Spec.TaskRef.Resolver != "" && tro.Spec.TaskRef.Resolver != "Cluster" { + isRemoteTask = true + } + } + return isRemoteTask +} diff --git a/pkg/chains/objects/objects_test.go b/pkg/chains/objects/objects_test.go index 61e9817f3d..cb1d86229b 100644 --- a/pkg/chains/objects/objects_test.go +++ b/pkg/chains/objects/objects_test.go @@ -19,7 +19,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -38,8 +38,8 @@ func getEmptyTemplate() *pod.PodTemplate { return &pod.PodTemplate{} } -func getTaskRun() *v1beta1.TaskRun { - return &v1beta1.TaskRun{ +func getTaskRun() *v1.TaskRun { + return &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "objects-test", @@ -47,47 +47,47 @@ func getTaskRun() *v1beta1.TaskRun { PipelineTaskLabel: "foo-task", }, }, - Spec: v1beta1.TaskRunSpec{ + Spec: v1.TaskRunSpec{ ServiceAccountName: "taskrun-sa", - Params: []v1beta1.Param{ + Params: []v1.Param{ { Name: "runtime-param", - Value: *v1beta1.NewStructuredValues("runtime-value"), + Value: *v1.NewStructuredValues("runtime-value"), }, }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - Provenance: &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, }, - TaskSpec: &v1beta1.TaskSpec{ - Params: []v1beta1.ParamSpec{ + TaskSpec: &v1.TaskSpec{ + Params: []v1.ParamSpec{ { Name: "param1", - Default: v1beta1.NewStructuredValues("default-value"), + Default: v1.NewStructuredValues("default-value"), }, }, }, - TaskRunResults: []v1beta1.TaskRunResult{ + Results: []v1.TaskRunResult{ { Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + {Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }, - Steps: []v1beta1.StepState{{ + Steps: []v1.StepState{{ ImageID: "step-image", }}, - Sidecars: []v1beta1.SidecarState{{ + Sidecars: []v1.SidecarState{{ ImageID: "sidecar-image", }}, }, @@ -95,48 +95,50 @@ func getTaskRun() *v1beta1.TaskRun { } } -func getPipelineRun() *v1beta1.PipelineRun { - return &v1beta1.PipelineRun{ +func getPipelineRun() *v1.PipelineRun { + return &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "objects-test", }, - Spec: v1beta1.PipelineRunSpec{ - ServiceAccountName: "pipelinerun-sa", - Params: []v1beta1.Param{ + Spec: v1.PipelineRunSpec{ + TaskRunTemplate: v1.PipelineTaskRunTemplate{ + ServiceAccountName: "pipelinerun-sa", + }, + Params: []v1.Param{ { Name: "runtime-param", - Value: *v1beta1.NewStructuredValues("runtime-value"), + Value: *v1.NewStructuredValues("runtime-value"), }, }, }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - Provenance: &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Provenance: &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, }, - PipelineSpec: &v1beta1.PipelineSpec{ - Params: []v1beta1.ParamSpec{ + PipelineSpec: &v1.PipelineSpec{ + Params: []v1.ParamSpec{ { Name: "param1", - Default: v1beta1.NewStructuredValues("default-value"), + Default: v1.NewStructuredValues("default-value"), }, }, }, - PipelineResults: []v1beta1.PipelineRunResult{ + Results: []v1.PipelineRunResult{ { Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + {Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }, }, }, @@ -170,7 +172,7 @@ func TestTaskRun_ImagePullSecrets(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) tr.Spec.PodTemplate = tt.template secret := tr.GetPullSecrets() assert.ElementsMatch(t, secret, tt.want) @@ -206,8 +208,8 @@ func TestPipelineRun_ImagePullSecrets(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - pr := NewPipelineRunObject(getPipelineRun()) - pr.Spec.PodTemplate = tt.template + pr := NewPipelineRunObjectV1(getPipelineRun()) + pr.Spec.TaskRunTemplate.PodTemplate = tt.template secret := pr.GetPullSecrets() assert.ElementsMatch(t, secret, tt.want) }) @@ -217,15 +219,15 @@ func TestPipelineRun_ImagePullSecrets(t *testing.T) { func TestPipelineRun_GetProvenance(t *testing.T) { t.Run("TestPipelineRun_GetProvenance", func(t *testing.T) { - pr := NewPipelineRunObject(getPipelineRun()) + pr := NewPipelineRunObjectV1(getPipelineRun()) got := pr.GetProvenance() - want := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + want := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, - } + }} if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -236,15 +238,15 @@ func TestPipelineRun_GetProvenance(t *testing.T) { func TestTaskRun_GetProvenance(t *testing.T) { t.Run("TestTaskRun_GetProvenance", func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) got := tr.GetProvenance() - want := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + want := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "https://github.com/tektoncd/chains", Digest: map[string]string{"sha1": "abcdef"}, EntryPoint: "pkg/chains/objects.go", }, - } + }} if d := cmp.Diff(want, got); d != "" { t.Fatalf("metadata (-want, +got):\n%s", d) } @@ -255,18 +257,18 @@ func TestTaskRun_GetProvenance(t *testing.T) { func TestPipelineRun_GetResults(t *testing.T) { t.Run("TestPipelineRun_GetResults", func(t *testing.T) { - pr := NewPipelineRunObject(getPipelineRun()) + pr := NewPipelineRunObjectV1(getPipelineRun()) got := pr.GetResults() - assert.ElementsMatch(t, got, []Result{ - { + assert.ElementsMatch(t, got, []GenericResult{ + ResultV1{ Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + ResultV1{Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + ResultV1{Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }) }) @@ -275,7 +277,7 @@ func TestPipelineRun_GetResults(t *testing.T) { func TestTaskRun_GetStepImages(t *testing.T) { t.Run("TestTaskRun_GetStepImages", func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) got := tr.GetStepImages() want := []string{"step-image"} if d := cmp.Diff(want, got); d != "" { @@ -288,7 +290,7 @@ func TestTaskRun_GetStepImages(t *testing.T) { func TestTaskRun_GetSidecarImages(t *testing.T) { t.Run("TestTaskRun_GetSidecarImages", func(t *testing.T) { - tr := NewTaskRunObject(getTaskRun()) + tr := NewTaskRunObjectV1(getTaskRun()) got := tr.GetSidecarImages() want := []string{"sidecar-image"} if d := cmp.Diff(want, got); d != "" { @@ -301,55 +303,55 @@ func TestTaskRun_GetSidecarImages(t *testing.T) { func TestTaskRun_GetResults(t *testing.T) { t.Run("TestTaskRun_GetResults", func(t *testing.T) { - pr := NewTaskRunObject(getTaskRun()) + pr := NewTaskRunObjectV1(getTaskRun()) got := pr.GetResults() - assert.ElementsMatch(t, got, []Result{ - { + assert.ElementsMatch(t, got, []GenericResult{ + ResultV1{ Name: "img1_input_ARTIFACT_INPUTS", - Value: *v1beta1.NewObject(map[string]string{ + Value: *v1.NewObject(map[string]string{ "uri": "gcr.io/foo/bar", "digest": "sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b7", }), }, - {Name: "mvn1_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, - {Name: "mvn1_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, + ResultV1{Name: "mvn1_ARTIFACT_URI", Value: *v1.NewStructuredValues("projects/test-project/locations/us-west4/repositories/test-repo/mavenArtifacts/com.google.guava:guava:31.0-jre")}, + ResultV1{Name: "mvn1_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:05f95b26ed10668b7183c1e2da98610e91372fa9f510046d4ce5812addad86b5")}, }) }) } func TestPipelineRun_GetGVK(t *testing.T) { - assert.Equal(t, "tekton.dev/v1beta1/PipelineRun", NewPipelineRunObject(getPipelineRun()).GetGVK()) + assert.Equal(t, "tekton.dev/v1/PipelineRun", NewPipelineRunObjectV1(getPipelineRun()).GetGVK()) } func TestTaskRun_GetGVK(t *testing.T) { - assert.Equal(t, "tekton.dev/v1beta1/TaskRun", NewTaskRunObject(getTaskRun()).GetGVK()) + assert.Equal(t, "tekton.dev/v1/TaskRun", NewTaskRunObjectV1(getTaskRun()).GetGVK()) } func TestPipelineRun_GetKindName(t *testing.T) { - assert.Equal(t, "pipelinerun", NewPipelineRunObject(getPipelineRun()).GetKindName()) + assert.Equal(t, "pipelinerun", NewPipelineRunObjectV1(getPipelineRun()).GetKindName()) } func TestTaskRun_GetKindName(t *testing.T) { - assert.Equal(t, "taskrun", NewTaskRunObject(getTaskRun()).GetKindName()) + assert.Equal(t, "taskrun", NewTaskRunObjectV1(getTaskRun()).GetKindName()) } func TestPipelineRun_GetServiceAccountName(t *testing.T) { - assert.Equal(t, "pipelinerun-sa", NewPipelineRunObject(getPipelineRun()).GetServiceAccountName()) + assert.Equal(t, "pipelinerun-sa", NewPipelineRunObjectV1(getPipelineRun()).GetServiceAccountName()) } func TestTaskRun_GetServiceAccountName(t *testing.T) { - assert.Equal(t, "taskrun-sa", NewTaskRunObject(getTaskRun()).GetServiceAccountName()) + assert.Equal(t, "taskrun-sa", NewTaskRunObjectV1(getTaskRun()).GetServiceAccountName()) } func TestNewTektonObject(t *testing.T) { tro, err := NewTektonObject(getTaskRun()) assert.NoError(t, err) - assert.IsType(t, &TaskRunObject{}, tro) + assert.IsType(t, &TaskRunObjectV1{}, tro) pro, err := NewTektonObject(getPipelineRun()) assert.NoError(t, err) - assert.IsType(t, &PipelineRunObject{}, pro) + assert.IsType(t, &PipelineRunObjectV1{}, pro) unknown, err := NewTektonObject("someting-else") assert.Nil(t, unknown) @@ -357,7 +359,7 @@ func TestNewTektonObject(t *testing.T) { } func TestPipelineRun_GetTaskRunFromTask(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) + pro := NewPipelineRunObjectV1(getPipelineRun()) assert.Nil(t, pro.GetTaskRunFromTask("missing")) assert.Nil(t, pro.GetTaskRunFromTask("foo-task")) @@ -369,14 +371,14 @@ func TestPipelineRun_GetTaskRunFromTask(t *testing.T) { } func TestProvenanceExists(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + pro := NewPipelineRunObjectV1(getPipelineRun()) + provenance := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, - } - pro.Status.Provenance = &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + }} + pro.Status.Provenance = &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, } @@ -384,14 +386,14 @@ func TestProvenanceExists(t *testing.T) { } func TestPipelineRunRemoteProvenance(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + pro := NewPipelineRunObjectV1(getPipelineRun()) + provenance := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, - } - pro.Status.Provenance = &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + }} + pro.Status.Provenance = &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, } @@ -399,14 +401,14 @@ func TestPipelineRunRemoteProvenance(t *testing.T) { } func TestTaskRunRemoteProvenance(t *testing.T) { - tro := NewTaskRunObject(getTaskRun()) - provenance := &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + tro := NewTaskRunObjectV1(getTaskRun()) + provenance := &ProvenanceV1{&v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, - } - tro.Status.Provenance = &v1beta1.Provenance{ - RefSource: &v1beta1.RefSource{ + }} + tro.Status.Provenance = &v1.Provenance{ + RefSource: &v1.RefSource{ URI: "tekton.com", }, } @@ -414,9 +416,9 @@ func TestTaskRunRemoteProvenance(t *testing.T) { } func TestPipelineRunIsRemote(t *testing.T) { - pro := NewPipelineRunObject(getPipelineRun()) - pro.Spec.PipelineRef = &v1beta1.PipelineRef{ - ResolverRef: v1beta1.ResolverRef{ + pro := NewPipelineRunObjectV1(getPipelineRun()) + pro.Spec.PipelineRef = &v1.PipelineRef{ + ResolverRef: v1.ResolverRef{ Resolver: "Bundle", }, } @@ -424,9 +426,9 @@ func TestPipelineRunIsRemote(t *testing.T) { } func TestTaskRunIsRemote(t *testing.T) { - tro := NewTaskRunObject(getTaskRun()) - tro.Spec.TaskRef = &v1beta1.TaskRef{ - ResolverRef: v1beta1.ResolverRef{ + tro := NewTaskRunObjectV1(getTaskRun()) + tro.Spec.TaskRef = &v1.TaskRef{ + ResolverRef: v1.ResolverRef{ Resolver: "Bundle", }, } diff --git a/pkg/chains/rekor_test.go b/pkg/chains/rekor_test.go index cbc8075c93..ce53fdc1bf 100644 --- a/pkg/chains/rekor_test.go +++ b/pkg/chains/rekor_test.go @@ -18,8 +18,8 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) func TestShouldUploadTlog(t *testing.T) { @@ -77,13 +77,13 @@ func TestShouldUploadTlog(t *testing.T) { for _, test := range tests { t.Run(test.description, func(t *testing.T) { - tr := &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr := &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Annotations: test.annotations, }, } cfg := config.Config{Transparency: test.cfg} - trObj := objects.NewTaskRunObject(tr) + trObj := objects.NewTaskRunObjectV1(tr) got := shouldUploadTlog(cfg, trObj) if got != test.expected { t.Fatalf("got (%v) doesn't match expected (%v)", got, test.expected) diff --git a/pkg/chains/signing.go b/pkg/chains/signing.go index 6a28b5e349..7b527138b8 100644 --- a/pkg/chains/signing.go +++ b/pkg/chains/signing.go @@ -34,7 +34,7 @@ import ( ) type Signer interface { - Sign(ctx context.Context, obj objects.TektonObject) error + Sign(ctx context.Context, obj ...objects.TektonObject) error } type ObjectSigner struct { @@ -107,7 +107,9 @@ func getSignableTypes(ctx context.Context, obj objects.TektonObject) ([]artifact // Signs TaskRun and PipelineRun objects, as well as generates attesations for each // Follows process of extract payload, sign payload, store payload and signature -func (o *ObjectSigner) Sign(ctx context.Context, tektonObj objects.TektonObject) error { +func (o *ObjectSigner) Sign(ctx context.Context, tektonObjs ...objects.TektonObject) error { + tektonObj := tektonObjs[0] + cfg := *config.FromContext(ctx) logger := logging.FromContext(ctx) @@ -219,9 +221,25 @@ func (o *ObjectSigner) Sign(ctx context.Context, tektonObj objects.TektonObject) } } - // Now mark the TektonObject as signed - if err := MarkSigned(ctx, tektonObj, o.Pipelineclientset, extraAnnotations); err != nil { - return err + if len(tektonObjs) == 1 { + // Now mark the TektonObject as signed + if err := MarkSigned(ctx, tektonObj, o.Pipelineclientset, extraAnnotations); err != nil { + return err + } + } else if len(tektonObjs) == 2 { + // Now mark the TektonObject as signed + if obj, ok := tektonObjs[1].(*objects.TaskRunObjectV1Beta1); ok { + if err := MarkSigned(ctx, obj, o.Pipelineclientset, extraAnnotations); err != nil { + return err + } + } + if obj, ok := tektonObjs[1].(*objects.PipelineRunObjectV1Beta1); ok { + if err := MarkSigned(ctx, obj, o.Pipelineclientset, extraAnnotations); err != nil { + return err + } + } + } else { + return fmt.Errorf("method Sign only supports 1-2 tektonObjs as arguments, received: %d", len(tektonObjs)) } return nil diff --git a/pkg/chains/signing_test.go b/pkg/chains/signing_test.go index 4660a04c57..0a8cba2f09 100644 --- a/pkg/chains/signing_test.go +++ b/pkg/chains/signing_test.go @@ -26,7 +26,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/sets" @@ -40,13 +40,13 @@ func TestSigner_Sign(t *testing.T) { // - generates payloads // - stores them in the configured systems // - marks the object as signed - tro := objects.NewTaskRunObject(&v1beta1.TaskRun{ + tro := objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, }) - pro := objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + pro := objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, @@ -186,14 +186,14 @@ func TestSigner_Sign(t *testing.T) { func TestSigner_Transparency(t *testing.T) { newTaskRun := func(name string) objects.TektonObject { - return objects.NewTaskRunObject(&v1beta1.TaskRun{ + return objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: name, }, }) } newPipelineRun := func(name string) objects.TektonObject { - return objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + return objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: name, }, @@ -202,12 +202,12 @@ func TestSigner_Transparency(t *testing.T) { setAnnotation := func(obj objects.TektonObject, key, value string) { // TODO: opportunity to add code reuse switch o := obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: if o.Annotations == nil { o.Annotations = make(map[string]string) } o.Annotations[key] = value - case *v1beta1.TaskRun: + case *v1.TaskRun: if o.Annotations == nil { o.Annotations = make(map[string]string) } diff --git a/pkg/chains/storage/docdb/docdb_test.go b/pkg/chains/storage/docdb/docdb_test.go index 95e0386590..9de09f119d 100644 --- a/pkg/chains/storage/docdb/docdb_test.go +++ b/pkg/chains/storage/docdb/docdb_test.go @@ -19,7 +19,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "gocloud.dev/docstore" _ "gocloud.dev/docstore/memdocstore" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -43,7 +43,7 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error", args: args{ - rawPayload: &v1beta1.TaskRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, + rawPayload: &v1.TaskRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, signature: "signature", key: "foo", }, @@ -51,7 +51,7 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error - PipelineRun", args: args{ - rawPayload: &v1beta1.PipelineRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, + rawPayload: &v1.PipelineRun{ObjectMeta: metav1.ObjectMeta{UID: "foo"}}, signature: "signature", key: "moo", }, diff --git a/pkg/chains/storage/gcs/gcs.go b/pkg/chains/storage/gcs/gcs.go index 1c42406dc6..27404d3986 100644 --- a/pkg/chains/storage/gcs/gcs.go +++ b/pkg/chains/storage/gcs/gcs.go @@ -26,6 +26,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/signing" "github.com/tektoncd/chains/pkg/chains/storage/api" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" ) @@ -68,13 +69,29 @@ func NewStorageBackend(ctx context.Context, cfg config.Config) (*Backend, error) func (b *Backend) StorePayload(ctx context.Context, obj objects.TektonObject, rawPayload []byte, signature string, opts config.StorageOpts) error { logger := logging.FromContext(ctx) - if tr, isTaskRun := obj.GetObject().(*v1beta1.TaskRun); isTaskRun { + if trV1Beta1, isTaskRunV1Beta1 := obj.GetObject().(*v1beta1.TaskRun); isTaskRunV1Beta1 { + trV1 := &v1.TaskRun{} + if err := trV1Beta1.ConvertTo(ctx, trV1); err != nil { + return err + } + obj = objects.NewTaskRunObjectV1(trV1) + } + + if prV1Beta1, isPipelineRunV1Beta1 := obj.GetObject().(*v1beta1.PipelineRun); isPipelineRunV1Beta1 { + prV1 := &v1.PipelineRun{} + if err := prV1Beta1.ConvertTo(ctx, prV1); err != nil { + return err + } + obj = objects.NewPipelineRunObjectV1(prV1) + } + + if tr, isTaskRun := obj.GetObject().(*v1.TaskRun); isTaskRun { store := &TaskRunStorer{ writer: b.writer, key: opts.ShortKey, } - // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1beta1 APIs until we add full v1 support - if _, err := store.Store(ctx, &api.StoreRequest[*v1beta1.TaskRun, *in_toto.Statement]{ + // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1 APIs until we add full v1 support + if _, err := store.Store(ctx, &api.StoreRequest[*v1.TaskRun, *in_toto.Statement]{ Object: obj, Artifact: tr, // We don't actually use payload - we store the raw bundle values directly. @@ -89,13 +106,13 @@ func (b *Backend) StorePayload(ctx context.Context, obj objects.TektonObject, ra logger.Errorf("error writing to GCS: %w", err) return err } - } else if pr, isPipelineRun := obj.GetObject().(*v1beta1.PipelineRun); isPipelineRun { + } else if pr, isPipelineRun := obj.GetObject().(*v1.PipelineRun); isPipelineRun { store := &PipelineRunStorer{ writer: b.writer, key: opts.ShortKey, } - // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1beta1 APIs until we add full v1 support - if _, err := store.Store(ctx, &api.StoreRequest[*v1beta1.PipelineRun, *in_toto.Statement]{ + // TODO(https://github.com/tektoncd/chains/issues/665) currently using deprecated v1 APIs until we add full v1 support + if _, err := store.Store(ctx, &api.StoreRequest[*v1.PipelineRun, *in_toto.Statement]{ Object: obj, Artifact: pr, // We don't actually use payload - we store the raw bundle values directly. @@ -111,7 +128,7 @@ func (b *Backend) StorePayload(ctx context.Context, obj objects.TektonObject, ra return err } } else { - return fmt.Errorf("type %T not supported - supported types: [*v1beta1.TaskRun, *v1beta1.PipelineRun]", obj.GetObject()) + return fmt.Errorf("type %T not supported - supported types: [*v1.TaskRun, *v1.PipelineRun]", obj.GetObject()) } return nil } @@ -151,9 +168,9 @@ func (b *Backend) RetrieveSignatures(ctx context.Context, obj objects.TektonObje var object string switch t := obj.GetObject().(type) { - case *v1beta1.TaskRun: + case *v1.TaskRun: object = taskRunSigName(t, opts) - case *v1beta1.PipelineRun: + case *v1.PipelineRun: object = pipelineRunSigname(t, opts) default: return nil, fmt.Errorf("unsupported TektonObject type: %T", t) @@ -174,9 +191,9 @@ func (b *Backend) RetrievePayloads(ctx context.Context, obj objects.TektonObject var object string switch t := obj.GetObject().(type) { - case *v1beta1.TaskRun: + case *v1.TaskRun: object = taskRunPayloadName(t, opts) - case *v1beta1.PipelineRun: + case *v1.PipelineRun: object = pipelineRunPayloadName(t, opts) default: return nil, fmt.Errorf("unsupported TektonObject type: %T", t) @@ -207,29 +224,29 @@ func (b *Backend) retrieveObject(ctx context.Context, object string) (string, er } //nolint:staticcheck -func taskRunSigName(tr *v1beta1.TaskRun, opts config.StorageOpts) string { +func taskRunSigName(tr *v1.TaskRun, opts config.StorageOpts) string { return fmt.Sprintf(SignatureNameFormatTaskRun, tr.Namespace, tr.Name, opts.ShortKey) } //nolint:staticcheck -func taskRunPayloadName(tr *v1beta1.TaskRun, opts config.StorageOpts) string { +func taskRunPayloadName(tr *v1.TaskRun, opts config.StorageOpts) string { return fmt.Sprintf(PayloadNameFormatTaskRun, tr.Namespace, tr.Name, opts.ShortKey) } //nolint:staticcheck -func pipelineRunSigname(pr *v1beta1.PipelineRun, opts config.StorageOpts) string { +func pipelineRunSigname(pr *v1.PipelineRun, opts config.StorageOpts) string { return fmt.Sprintf(SignatureNameFormatPipelineRun, pr.Namespace, pr.Name, opts.ShortKey) } //nolint:staticcheck -func pipelineRunPayloadName(pr *v1beta1.PipelineRun, opts config.StorageOpts) string { +func pipelineRunPayloadName(pr *v1.PipelineRun, opts config.StorageOpts) string { return fmt.Sprintf(PayloadNameFormatPipelineRun, pr.Namespace, pr.Name, opts.ShortKey) } //nolint:staticcheck var ( - _ api.Storer[*v1beta1.TaskRun, *in_toto.Statement] = &TaskRunStorer{} - _ api.Storer[*v1beta1.PipelineRun, *in_toto.Statement] = &PipelineRunStorer{} + _ api.Storer[*v1.TaskRun, *in_toto.Statement] = &TaskRunStorer{} + _ api.Storer[*v1.PipelineRun, *in_toto.Statement] = &PipelineRunStorer{} ) // TaskRunStorer stores TaskRuns in GCS. @@ -244,7 +261,7 @@ type TaskRunStorer struct { // Store stores the TaskRun chains information in GCS // //nolint:staticcheck -func (s *TaskRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1beta1.TaskRun, *in_toto.Statement]) (*api.StoreResponse, error) { +func (s *TaskRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1.TaskRun, *in_toto.Statement]) (*api.StoreResponse, error) { tr := req.Artifact key := s.key if key == "" { @@ -268,7 +285,7 @@ type PipelineRunStorer struct { // Store stores the PipelineRun chains information in GCS // //nolint:staticcheck -func (s *PipelineRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1beta1.PipelineRun, *in_toto.Statement]) (*api.StoreResponse, error) { +func (s *PipelineRunStorer) Store(ctx context.Context, req *api.StoreRequest[*v1.PipelineRun, *in_toto.Statement]) (*api.StoreResponse, error) { pr := req.Artifact key := s.key if key == "" { diff --git a/pkg/chains/storage/gcs/gcs_test.go b/pkg/chains/storage/gcs/gcs_test.go index 1ff15f53af..5ceb110efb 100644 --- a/pkg/chains/storage/gcs/gcs_test.go +++ b/pkg/chains/storage/gcs/gcs_test.go @@ -23,7 +23,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" rtesting "knative.dev/pkg/reconciler/testing" @@ -32,8 +32,8 @@ import ( //nolint:staticcheck func TestBackend_StorePayload(t *testing.T) { type args struct { - tr *v1beta1.TaskRun - pr *v1beta1.PipelineRun + tr *v1.TaskRun + pr *v1.PipelineRun signed []byte signature string opts config.StorageOpts @@ -46,14 +46,14 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error, intoto", args: args{ - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", UID: types.UID("uid"), }, }, - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", @@ -68,14 +68,14 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no error, tekton", args: args{ - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", UID: types.UID("uid"), }, }, - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "foo", Name: "bar", @@ -98,7 +98,7 @@ func TestBackend_StorePayload(t *testing.T) { reader: mockGcsRead, cfg: config.Config{Storage: config.StorageConfigs{GCS: config.GCSStorageConfig{Bucket: "foo"}}}, } - trObj := objects.NewTaskRunObject(tt.args.tr) + trObj := objects.NewTaskRunObjectV1(tt.args.tr) if err := b.StorePayload(ctx, trObj, tt.args.signed, tt.args.signature, tt.args.opts); (err != nil) != tt.wantErr { t.Errorf("Backend.StorePayload() error = %v, wantErr %v", err, tt.wantErr) } @@ -121,7 +121,7 @@ func TestBackend_StorePayload(t *testing.T) { t.Errorf("wrong signature, expected %s, got %s", tt.args.signed, gotPayload[objectPayload]) } - prObj := objects.NewPipelineRunObject(tt.args.pr) + prObj := objects.NewPipelineRunObjectV1(tt.args.pr) if err := b.StorePayload(ctx, prObj, tt.args.signed, tt.args.signature, tt.args.opts); (err != nil) != tt.wantErr { t.Errorf("Backend.StorePayload() error = %v, wantErr %v", err, tt.wantErr) } diff --git a/pkg/chains/storage/grafeas/grafeas_test.go b/pkg/chains/storage/grafeas/grafeas_test.go index 295ceacc50..c68867e9df 100644 --- a/pkg/chains/storage/grafeas/grafeas_test.go +++ b/pkg/chains/storage/grafeas/grafeas_test.go @@ -29,7 +29,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats" "github.com/tektoncd/chains/pkg/chains/formats/slsa/extract" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" @@ -66,17 +66,17 @@ const ( var ( // clone taskrun // -------------- - cloneTaskRun = &v1beta1.TaskRun{ + cloneTaskRun = &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "git-clone", UID: types.UID("uid-task1"), }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "CHAINS-GIT_COMMIT", Value: *v1beta1.NewStructuredValues(commitSHA)}, - {Name: "CHAINS-GIT_URL", Value: *v1beta1.NewStructuredValues(repoURL)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "CHAINS-GIT_COMMIT", Value: *v1.NewStructuredValues(commitSHA)}, + {Name: "CHAINS-GIT_URL", Value: *v1.NewStructuredValues(repoURL)}, }, }, }, @@ -100,19 +100,19 @@ var ( artifactIdentifier2 = fmt.Sprintf("%s@sha256:%s", artifactURL2, artifactDigest2) // artifact build taskrun - buildTaskRun = &v1beta1.TaskRun{ + buildTaskRun = &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "artifact-build", UID: types.UID("uid-task2"), }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest1)}, - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues(artifactURL1)}, - {Name: "x_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest2)}, - {Name: "x_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues(artifactURL2)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "IMAGE_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest1)}, + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues(artifactURL1)}, + {Name: "x_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest2)}, + {Name: "x_ARTIFACT_URI", Value: *v1.NewStructuredValues(artifactURL2)}, }, }, }, @@ -139,23 +139,23 @@ var ( } // ci pipelinerun - ciPipeline = &v1beta1.PipelineRun{ + ciPipeline = &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "ci-pipeline", UID: types.UID("uid-pipeline"), }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{ + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{ // the results from task 1 - clone - {Name: "CHAINS-GIT_COMMIT", Value: *v1beta1.NewStructuredValues(commitSHA)}, - {Name: "CHAINS-GIT_URL", Value: *v1beta1.NewStructuredValues(repoURL)}, + {Name: "CHAINS-GIT_COMMIT", Value: *v1.NewStructuredValues(commitSHA)}, + {Name: "CHAINS-GIT_URL", Value: *v1.NewStructuredValues(repoURL)}, // the results from task 2 - build - {Name: "IMAGE_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest1)}, - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues(artifactURL1)}, - {Name: "x_ARTIFACT_DIGEST", Value: *v1beta1.NewStructuredValues("sha256:" + artifactDigest2)}, - {Name: "x_ARTIFACT_URI", Value: *v1beta1.NewStructuredValues(artifactURL2)}, + {Name: "IMAGE_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest1)}, + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues(artifactURL1)}, + {Name: "x_ARTIFACT_DIGEST", Value: *v1.NewStructuredValues("sha256:" + artifactDigest2)}, + {Name: "x_ARTIFACT_URI", Value: *v1.NewStructuredValues(artifactURL2)}, }, }, }, @@ -261,7 +261,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "intoto for clone taskrun, no error, no occurrences created because no artifacts were built.", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: cloneTaskRun, }, payload: getRawPayload(t, cloneTaskRunProvenance), @@ -274,7 +274,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "intoto for build taskrun, no error, 2 BUILD occurrences should be created for the 2 artifacts generated.", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: buildTaskRun, }, payload: getRawPayload(t, buildTaskRunProvenance), @@ -287,7 +287,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "simplesigning for the build taskrun, no error, 1 ATTESTATION occurrence should be created for the artifact specified in storageopts.key", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: buildTaskRun, }, payload: []byte("attestation payload"), @@ -300,7 +300,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "intoto for the ci pipeline, no error, 2 occurences should be created for the pipelinerun for the 2 artifact generated.", args: args{ - runObject: &objects.PipelineRunObject{ + runObject: &objects.PipelineRunObjectV1{ PipelineRun: ciPipeline, }, payload: getRawPayload(t, ciPipelineRunProvenance), @@ -313,7 +313,7 @@ func TestGrafeasBackend_StoreAndRetrieve(t *testing.T) { { name: "tekton format for a taskrun, error, only simplesigning and intoto are supported", args: args{ - runObject: &objects.TaskRunObject{ + runObject: &objects.TaskRunObjectV1{ TaskRun: buildTaskRun, }, payload: []byte("foo"), @@ -584,7 +584,7 @@ func setupConnection() (*grpc.ClientConn, pb.GrafeasClient, error) { return conn, client, nil } -// --------------------- Mocked GrafeasV1Beta1Server interface ----------------- +// --------------------- Mocked Grafeasv1Server interface ----------------- type mockGrafeasServer struct { // Embed for forward compatibility. // Tests will keep working if more methods are added in the future. diff --git a/pkg/chains/storage/oci/oci_test.go b/pkg/chains/storage/oci/oci_test.go index 36d5a37a70..eedd13188b 100644 --- a/pkg/chains/storage/oci/oci_test.go +++ b/pkg/chains/storage/oci/oci_test.go @@ -33,7 +33,7 @@ import ( "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/sigstore/sigstore/pkg/signature/payload" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" remotetest "github.com/tektoncd/pipeline/test" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" @@ -43,13 +43,13 @@ import ( const namespace = "oci-test" var ( - tr = &v1beta1.TaskRun{ + tr = &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: namespace, }, } - pr = &v1beta1.PipelineRun{ + pr = &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: namespace, @@ -116,7 +116,7 @@ func TestBackend_StorePayload(t *testing.T) { }{{ name: "simplesigning payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: simple, @@ -129,7 +129,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "into-to payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: intotoStatement, @@ -142,7 +142,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "no subject", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: in_toto.Statement{}, @@ -155,7 +155,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "simplesigning payload", fields: fields{ - object: objects.NewPipelineRunObject(pr), + object: objects.NewPipelineRunObjectV1(pr), }, args: args{ payload: simple, @@ -168,7 +168,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "into-to payload", fields: fields{ - object: objects.NewPipelineRunObject(pr), + object: objects.NewPipelineRunObjectV1(pr), }, args: args{ payload: intotoStatement, @@ -181,7 +181,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "in-toto-and-simple-payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: simple, @@ -194,7 +194,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "tekton-and-simple-payload", fields: fields{ - object: objects.NewTaskRunObject(tr), + object: objects.NewTaskRunObjectV1(tr), }, args: args{ payload: simple, @@ -207,7 +207,7 @@ func TestBackend_StorePayload(t *testing.T) { }, { name: "no subject", fields: fields{ - object: objects.NewPipelineRunObject(pr), + object: objects.NewPipelineRunObjectV1(pr), }, args: args{ payload: in_toto.Statement{}, diff --git a/pkg/chains/storage/pubsub/pubsub_test.go b/pkg/chains/storage/pubsub/pubsub_test.go index 7e76202c3b..17e86cfae5 100644 --- a/pkg/chains/storage/pubsub/pubsub_test.go +++ b/pkg/chains/storage/pubsub/pubsub_test.go @@ -23,9 +23,9 @@ import ( "github.com/tektoncd/chains/pkg/chains/formats" "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "gocloud.dev/pubsub" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" logtesting "knative.dev/pkg/logging/testing" rtesting "knative.dev/pkg/reconciler/testing" ) @@ -36,7 +36,7 @@ func TestBackend_StorePayload(t *testing.T) { logger := logtesting.TestLogger(t) type fields struct { - tr *v1beta1.TaskRun + tr *v1.TaskRun cfg config.Config } type args struct { @@ -53,8 +53,8 @@ func TestBackend_StorePayload(t *testing.T) { { name: "no subject", fields: fields{ - tr: &v1beta1.TaskRun{ - ObjectMeta: v1.ObjectMeta{ + tr: &v1.TaskRun{ + ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", }, @@ -109,7 +109,7 @@ func TestBackend_StorePayload(t *testing.T) { } }() - trObj := objects.NewTaskRunObject(tt.fields.tr) + trObj := objects.NewTaskRunObjectV1(tt.fields.tr) // Store the payload. if err := b.StorePayload(ctx, trObj, tt.args.rawPayload, tt.args.signature, tt.args.storageOpts); (err != nil) != tt.wantErr { t.Errorf("Backend.StorePayload() error = %v, wantErr %v", err, tt.wantErr) diff --git a/pkg/chains/storage/tekton/tekton_test.go b/pkg/chains/storage/tekton/tekton_test.go index 679d55439b..0a04023297 100644 --- a/pkg/chains/storage/tekton/tekton_test.go +++ b/pkg/chains/storage/tekton/tekton_test.go @@ -21,7 +21,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" rtesting "knative.dev/pkg/reconciler/testing" @@ -42,15 +42,15 @@ func TestBackend_StorePayload(t *testing.T) { A: "foo", B: 3, }, - object: objects.NewTaskRunObject(&v1beta1.TaskRun{ + object: objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - TaskRunResults: []v1beta1.TaskRunResult{ - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues("mockImage")}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + Results: []v1.TaskRunResult{ + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues("mockImage")}, }, }, }, @@ -62,15 +62,15 @@ func TestBackend_StorePayload(t *testing.T) { A: "foo", B: 3, }, - object: objects.NewPipelineRunObject(&v1beta1.PipelineRun{ + object: objects.NewPipelineRunObjectV1(&v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", }, - Status: v1beta1.PipelineRunStatus{ - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - PipelineResults: []v1beta1.PipelineRunResult{ - {Name: "IMAGE_URL", Value: *v1beta1.NewStructuredValues("mockImage")}, + Status: v1.PipelineRunStatus{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + Results: []v1.PipelineRunResult{ + {Name: "IMAGE_URL", Value: *v1.NewStructuredValues("mockImage")}, }, }, }, diff --git a/pkg/chains/verifier.go b/pkg/chains/verifier.go index bd0964567a..e21125a557 100644 --- a/pkg/chains/verifier.go +++ b/pkg/chains/verifier.go @@ -21,7 +21,7 @@ import ( "github.com/tektoncd/chains/pkg/chains/objects" "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/client-go/kubernetes" @@ -29,7 +29,7 @@ import ( ) type Verifier interface { - VerifyTaskRun(ctx context.Context, tr *v1beta1.TaskRun) error + VerifyTaskRun(ctx context.Context, tr *v1.TaskRun) error } type TaskRunVerifier struct { @@ -38,7 +38,7 @@ type TaskRunVerifier struct { SecretPath string } -func (tv *TaskRunVerifier) VerifyTaskRun(ctx context.Context, tr *v1beta1.TaskRun) error { +func (tv *TaskRunVerifier) VerifyTaskRun(ctx context.Context, tr *v1.TaskRun) error { // Get all the things we might need (storage backends, signers and formatters) cfg := *config.FromContext(ctx) logger := logging.FromContext(ctx) @@ -50,7 +50,7 @@ func (tv *TaskRunVerifier) VerifyTaskRun(ctx context.Context, tr *v1beta1.TaskRu &artifacts.OCIArtifact{}, } - trObj := objects.NewTaskRunObject(tr) + trObj := objects.NewTaskRunObjectV1(tr) // Storage allBackends, err := storage.InitializeBackends(ctx, tv.Pipelineclientset, tv.KubeClient, cfg) diff --git a/pkg/config/config.go b/pkg/config/config.go index 15c49e878e..f1f567de35 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -34,6 +34,7 @@ type Config struct { Builder BuilderConfig Transparency TransparencyConfig BuildDefinition BuildDefinitionConfig + TektonAPI TektonAPIConfig } // ArtifactConfigs contains the configuration for how to sign/store/format the signatures for each artifact type @@ -89,6 +90,11 @@ type KMSSigner struct { Auth KMSAuth } +// TODO(aaron-prindle) configure this value to be used correctly +type TektonAPIConfig struct { + WatchForTektonV1Beta1APIInstead bool +} + // KMSAuth configures authentication to the KMS server type KMSAuth struct { Address string @@ -191,6 +197,9 @@ const ( kmsAuthSpireSock = "signers.kms.auth.spire.sock" kmsAuthSpireAudience = "signers.kms.auth.spire.audience" + // Tekton API + tektonAPI = "tektonAPI.watchForTektonV1Beta1APIInstead" + // Fulcio x509SignerFulcioEnabled = "signers.x509.fulcio.enabled" x509SignerFulcioAddr = "signers.x509.fulcio.address" @@ -309,6 +318,9 @@ func NewConfigFromMap(data map[string]string) (*Config, error) { asString(kmsAuthSpireSock, &cfg.Signers.KMS.Auth.Spire.Sock), asString(kmsAuthSpireAudience, &cfg.Signers.KMS.Auth.Spire.Audience), + // Tekton API + asBool(tektonAPI, &cfg.TektonAPI.WatchForTektonV1Beta1APIInstead), + // Fulcio asBool(x509SignerFulcioEnabled, &cfg.Signers.X509.FulcioEnabled), asString(x509SignerFulcioAddr, &cfg.Signers.X509.FulcioAddr), diff --git a/pkg/internal/mocksigner/mocksigner.go b/pkg/internal/mocksigner/mocksigner.go index 69acbdd080..e6168089a8 100644 --- a/pkg/internal/mocksigner/mocksigner.go +++ b/pkg/internal/mocksigner/mocksigner.go @@ -26,7 +26,7 @@ type Signer struct { Signed bool } -func (m *Signer) Sign(ctx context.Context, obj objects.TektonObject) error { +func (m *Signer) Sign(ctx context.Context, obj ...objects.TektonObject) error { m.Signed = true return nil } diff --git a/pkg/internal/objectloader/objectloader.go b/pkg/internal/objectloader/objectloader.go index b2cb90f9a7..5c4786e8eb 100644 --- a/pkg/internal/objectloader/objectloader.go +++ b/pkg/internal/objectloader/objectloader.go @@ -20,27 +20,27 @@ import ( "encoding/json" "os" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" ) -func TaskRunFromFile(f string) (*v1beta1.TaskRun, error) { +func TaskRunFromFile(f string) (*v1.TaskRun, error) { contents, err := os.ReadFile(f) if err != nil { return nil, err } - var tr v1beta1.TaskRun + var tr v1.TaskRun if err := json.Unmarshal(contents, &tr); err != nil { return nil, err } return &tr, nil } -func PipelineRunFromFile(f string) (*v1beta1.PipelineRun, error) { +func PipelineRunFromFile(f string) (*v1.PipelineRun, error) { contents, err := os.ReadFile(f) if err != nil { return nil, err } - var pr v1beta1.PipelineRun + var pr v1.PipelineRun if err := json.Unmarshal(contents, &pr); err != nil { return nil, err } diff --git a/pkg/reconciler/pipelinerun/controller.go b/pkg/reconciler/pipelinerun/controller.go index bc3d7ad47e..56cfa4f853 100644 --- a/pkg/reconciler/pipelinerun/controller.go +++ b/pkg/reconciler/pipelinerun/controller.go @@ -19,11 +19,15 @@ import ( "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" pipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client" - pipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun" - taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" - pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun" + pipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun" + taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun" + v1beta1pipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun" + v1beta1taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" + pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun" + v1beta1pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun" "k8s.io/client-go/tools/cache" kubeclient "knative.dev/pkg/client/injection/kube/client" "knative.dev/pkg/configmap" @@ -33,7 +37,7 @@ import ( _ "github.com/tektoncd/chains/pkg/chains/formats/all" ) -func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { +func NewControllerV1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { logger := logging.FromContext(ctx) pipelineRunInformer := pipelineruninformer.Get(ctx) taskRunInformer := taskruninformer.Get(ctx) @@ -77,10 +81,64 @@ func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl c.Tracker = impl.Tracker - pipelineRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) + pipelineRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck - taskRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ - FilterFunc: controller.FilterController(&v1beta1.PipelineRun{}), + taskRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ //nolint:errcheck + FilterFunc: controller.FilterController(&v1.PipelineRun{}), + Handler: controller.HandleAll(impl.EnqueueControllerOf), + }) + + return impl +} + +func NewControllerV1Beta1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { + logger := logging.FromContext(ctx) + pipelineRunInformer := v1beta1pipelineruninformer.Get(ctx) + taskRunInformer := v1beta1taskruninformer.Get(ctx) + + kubeClient := kubeclient.Get(ctx) + pipelineClient := pipelineclient.Get(ctx) + + psSigner := &chains.ObjectSigner{ + SecretPath: SecretPath, + Pipelineclientset: pipelineClient, + } + + c := &ReconcilerV1Beta1{ + PipelineRunSigner: psSigner, + Pipelineclientset: pipelineClient, + TaskRunLister: taskRunInformer.Lister(), + } + impl := v1beta1pipelinerunreconciler.NewImpl(ctx, c, func(impl *controller.Impl) controller.Options { + cfgStore := config.NewConfigStore(logger, func(name string, value interface{}) { + // get updated config + cfg := *value.(*config.Config) + + // get all backends for storing provenance + backends, err := storage.InitializeBackends(ctx, pipelineClient, kubeClient, cfg) + if err != nil { + logger.Error(err) + } + psSigner.Backends = backends + }) + + // setup watches for the config names provided by client + cfgStore.WatchConfigs(cmw) + + return controller.Options{ + // The chains reconciler shouldn't mutate the pipelinerun's status. + SkipStatusUpdates: true, + ConfigStore: cfgStore, + FinalizerName: "chains.tekton.dev/pipelinerun", // TODO: unique name required? + } + }) + + c.Tracker = impl.Tracker + + pipelineRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck + + taskRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ //nolint:errcheck + FilterFunc: controller.FilterController(&v1beta1.PipelineRun{}), //nolint:staticcheck Handler: controller.HandleAll(impl.EnqueueControllerOf), }) diff --git a/pkg/reconciler/pipelinerun/pipelinerun.go b/pkg/reconciler/pipelinerun/pipelinerun.go index 069aa80d36..fd0a94aa26 100644 --- a/pkg/reconciler/pipelinerun/pipelinerun.go +++ b/pkg/reconciler/pipelinerun/pipelinerun.go @@ -15,14 +15,19 @@ package pipelinerun import ( "context" + "encoding/json" "fmt" signing "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun" - listers "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1beta1" + pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun" + v1beta1pipelinerunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun" + listers "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" + v1beta1listers "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1beta1" "k8s.io/apimachinery/pkg/api/errors" "knative.dev/pkg/logging" pkgreconciler "knative.dev/pkg/reconciler" @@ -47,7 +52,7 @@ var _ pipelinerunreconciler.Finalizer = (*Reconciler)(nil) // ReconcileKind handles a changed or created PipelineRun. // This is the main entrypoint for chains business logic. -func (r *Reconciler) ReconcileKind(ctx context.Context, pr *v1beta1.PipelineRun) pkgreconciler.Event { +func (r *Reconciler) ReconcileKind(ctx context.Context, pr *v1.PipelineRun) pkgreconciler.Event { log := logging.FromContext(ctx).With("pipelinerun", fmt.Sprintf("%s/%s", pr.Namespace, pr.Name)) return r.FinalizeKind(logging.WithLogger(ctx, log), pr) } @@ -56,13 +61,20 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, pr *v1beta1.PipelineRun) // We utilize finalizers to ensure that we get a crack at signing every pipelinerun // that we see flowing through the system. If we don't add a finalizer, it could // get cleaned up before we see the final state and sign it. -func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) pkgreconciler.Event { +func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1.PipelineRun) pkgreconciler.Event { + cfg := *config.FromContext(ctx) + + // Check to see if chains is configured to watch v1beta1 Tekton API objects + if cfg.TektonAPI.WatchForTektonV1Beta1APIInstead { + return nil + } + // Check to make sure the PipelineRun is finished. if !pr.IsDone() { logging.FromContext(ctx).Infof("pipelinerun is still running") return nil } - pro := objects.NewPipelineRunObject(pr) + pro := objects.NewPipelineRunObjectV1(pr) // Check to see if it has already been signed. if signing.Reconciled(ctx, r.Pipelineclientset, pro) { @@ -72,8 +84,110 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) // Get TaskRun names depending on whether embeddedstatus feature is set or not var trs []string - if len(pr.Status.ChildReferences) == 0 || len(pr.Status.TaskRuns) > 0 || len(pr.Status.Runs) > 0 { //nolint:all //incompatible with pipelines v0.45 - for trName, ptrs := range pr.Status.TaskRuns { //nolint:all //incompatible with pipelines v0.45 + for _, cr := range pr.Status.ChildReferences { + trs = append(trs, cr.Name) + } + + // Signing both taskruns and pipelineruns causes a race condition when using oci storage + // during the push to the registry. This checks the taskruns to ensure they've been reconciled + // before attempting to sign the pippelinerun. + for _, name := range trs { + tr, err := r.TaskRunLister.TaskRuns(pr.Namespace).Get(name) + if err != nil { + logging.FromContext(ctx).Errorf("Unable to get reconciled status of taskrun %s within pipelinerun", name) + if errors.IsNotFound(err) { + // Since this is an unrecoverable scenario, returning the error would prevent the + // finalizer from being removed, thus preventing the PipelineRun from being deleted. + return nil + } + return err + } + if tr == nil { + logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not found", name) + return nil + } + if tr.Status.CompletionTime == nil { + logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet finalized: status is not complete", name) + return r.trackTaskRun(tr, pr) + } + reconciled := signing.Reconciled(ctx, r.Pipelineclientset, objects.NewTaskRunObjectV1(tr)) + if !reconciled { + logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet reconciled", name) + return r.trackTaskRun(tr, pr) + } + pro.AppendTaskRun(tr) + } + + if err := r.PipelineRunSigner.Sign(ctx, pro); err != nil { + return err + } + return nil +} + +func (r *Reconciler) trackTaskRun(tr *v1.TaskRun, pr *v1.PipelineRun) error { + ref := tracker.Reference{ + APIVersion: "tekton.dev/v1", + Kind: "TaskRun", + Namespace: tr.Namespace, + Name: tr.Name, + } + return r.Tracker.TrackReference(ref, pr) +} + +type ReconcilerV1Beta1 struct { + PipelineRunSigner signing.Signer + Pipelineclientset versioned.Interface + TaskRunLister v1beta1listers.TaskRunLister + Tracker tracker.Interface +} + +// Check that our Reconciler implements pipelinerunreconciler.Interface and pipelinerunreconciler.Finalizer +var _ v1beta1pipelinerunreconciler.Interface = (*ReconcilerV1Beta1)(nil) +var _ v1beta1pipelinerunreconciler.Finalizer = (*ReconcilerV1Beta1)(nil) + +// ReconcileKind handles a changed or created PipelineRun. +// This is the main entrypoint for chains business logic. +func (r *ReconcilerV1Beta1) ReconcileKind(ctx context.Context, pr *v1beta1.PipelineRun) pkgreconciler.Event { //nolint:staticcheck + log := logging.FromContext(ctx).With("pipelinerun", fmt.Sprintf("%s/%s", pr.Namespace, pr.Name)) + return r.FinalizeKind(logging.WithLogger(ctx, log), pr) +} + +// FinalizeKind implements pipelinerunreconciler.Finalizer +// We utilize finalizers to ensure that we get a crack at signing every pipelinerun +// that we see flowing through the system. If we don't add a finalizer, it could +// get cleaned up before we see the final state and sign it. +func (r *ReconcilerV1Beta1) FinalizeKind(ctx context.Context, prV1Beta1 *v1beta1.PipelineRun) pkgreconciler.Event { //nolint:staticcheck + cfg := *config.FromContext(ctx) + + // Check to see if chains is configured to watch v1beta1 Tekton API objects + if !cfg.TektonAPI.WatchForTektonV1Beta1APIInstead { + return nil + } + + // Check to make sure the PipelineRun is finished. + if !prV1Beta1.IsDone() { + logging.FromContext(ctx).Infof("pipelinerun is still running") + return nil + } + + prV1 := &v1.PipelineRun{} + err := prV1Beta1.ConvertTo(ctx, prV1) + if err != nil { + return err + } + obj := objects.NewPipelineRunObjectV1(prV1) + objv1beta1 := objects.NewPipelineRunObjectV1Beta1(prV1Beta1) + + // Check to see if it has already been signed. + if signing.Reconciled(ctx, r.Pipelineclientset, objv1beta1) { + logging.FromContext(ctx).Infof("pipelinerun has been reconciled") + return nil + } + + // Get TaskRun names depending on whether embeddedstatus feature is set or not + var trs []string + if len(prV1Beta1.Status.ChildReferences) == 0 || len(prV1Beta1.Status.TaskRuns) > 0 || len(prV1Beta1.Status.Runs) > 0 { //nolint:all //incompatible with pipelines v0.45 + for trName, ptrs := range prV1Beta1.Status.TaskRuns { //nolint:all //incompatible with pipelines v0.45 // TaskRuns within a PipelineRun may not have been finalized yet if the PipelineRun timeout // has exceeded. Wait to process the PipelineRun on the next update, see // https://github.com/tektoncd/pipeline/issues/4916 @@ -84,7 +198,7 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) trs = append(trs, trName) } } else { - for _, cr := range pr.Status.ChildReferences { + for _, cr := range prV1Beta1.Status.ChildReferences { trs = append(trs, cr.Name) } } @@ -93,7 +207,7 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) // during the push to the registry. This checks the taskruns to ensure they've been reconciled // before attempting to sign the pippelinerun. for _, name := range trs { - tr, err := r.TaskRunLister.TaskRuns(pr.Namespace).Get(name) + trV1Beta1, err := r.TaskRunLister.TaskRuns(prV1Beta1.Namespace).Get(name) if err != nil { logging.FromContext(ctx).Errorf("Unable to get reconciled status of taskrun %s within pipelinerun", name) if errors.IsNotFound(err) { @@ -103,29 +217,54 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, pr *v1beta1.PipelineRun) } return err } - if tr == nil { + + if trV1Beta1 == nil { logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not found", name) return nil } - if tr.Status.CompletionTime == nil { + + trV1 := &v1.TaskRun{} + err = trV1Beta1.ConvertTo(ctx, trV1) + if err != nil { + return err + } + + if trV1.Status.CompletionTime == nil { logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet finalized: status is not complete", name) - return r.trackTaskRun(tr, pr) + return r.trackTaskRun(trV1Beta1, prV1Beta1) } - reconciled := signing.Reconciled(ctx, r.Pipelineclientset, objects.NewTaskRunObject(tr)) + reconciled := signing.Reconciled(ctx, r.Pipelineclientset, objects.NewTaskRunObjectV1(trV1)) if !reconciled { logging.FromContext(ctx).Infof("taskrun %s within pipelinerun is not yet reconciled", name) - return r.trackTaskRun(tr, pr) + return r.trackTaskRun(trV1Beta1, prV1Beta1) } - pro.AppendTaskRun(tr) + + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + return err + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + + if trV1Beta1.Status.TaskRunStatusFields.TaskSpec != nil && trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources) //nolint:staticcheck + if err != nil { + return err + } + trV1.Annotations["tekton.dev/v1beta1-status-taskrunstatusfields-taskspec-resources"] = string(jsonData) + } + + obj.AppendTaskRun(trV1) } - if err := r.PipelineRunSigner.Sign(ctx, pro); err != nil { + if err := r.PipelineRunSigner.Sign(ctx, obj, objv1beta1); err != nil { return err } return nil } -func (r *Reconciler) trackTaskRun(tr *v1beta1.TaskRun, pr *v1beta1.PipelineRun) error { +func (r *ReconcilerV1Beta1) trackTaskRun(tr *v1beta1.TaskRun, pr *v1beta1.PipelineRun) error { //nolint:staticcheck ref := tracker.Reference{ APIVersion: "tekton.dev/v1beta1", Kind: "TaskRun", diff --git a/pkg/reconciler/pipelinerun/pipelinerun_test.go b/pkg/reconciler/pipelinerun/pipelinerun_test.go index be6bb0aa7d..50d86c6779 100644 --- a/pkg/reconciler/pipelinerun/pipelinerun_test.go +++ b/pkg/reconciler/pipelinerun/pipelinerun_test.go @@ -23,14 +23,13 @@ import ( "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/internal/mocksigner" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" - fakepipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake" - faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake" + fakepipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/fake" + faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun/fake" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "knative.dev/pkg/apis" duckv1 "knative.dev/pkg/apis/duck/v1" @@ -46,17 +45,17 @@ func TestReconciler_Reconcile(t *testing.T) { tests := []struct { name string key string - pipelineRuns []*v1beta1.PipelineRun + pipelineRuns []*v1.PipelineRun }{ { name: "no pipelineRuns", key: "foo/bar", - pipelineRuns: []*v1beta1.PipelineRun{}, + pipelineRuns: []*v1.PipelineRun{}, }, { name: "found PipelineRun", key: "foo/bar", - pipelineRuns: []*v1beta1.PipelineRun{ + pipelineRuns: []*v1.PipelineRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "bar", @@ -78,7 +77,7 @@ func TestReconciler_Reconcile(t *testing.T) { Name: config.ChainsConfig, }, }) - ctl := NewController(ctx, configMapWatcher) + ctl := NewControllerV1(ctx, configMapWatcher) if la, ok := ctl.Reconciler.(pkgreconciler.LeaderAware); ok { if err := la.Promote(pkgreconciler.UniversalBucket(), func(pkgreconciler.Bucket, types.NamespacedName) {}); err != nil { @@ -93,13 +92,13 @@ func TestReconciler_Reconcile(t *testing.T) { } } -func setupData(ctx context.Context, t *testing.T, prs []*v1beta1.PipelineRun) informers.PipelineRunInformer { +func setupData(ctx context.Context, t *testing.T, prs []*v1.PipelineRun) informers.PipelineRunInformer { pri := fakepipelineruninformer.Get(ctx) c := fakepipelineclient.Get(ctx) for _, pa := range prs { pa := pa.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.TektonV1beta1().PipelineRuns(pa.Namespace).Create(ctx, pa, metav1.CreateOptions{}); err != nil { + if _, err := c.TektonV1().PipelineRuns(pa.Namespace).Create(ctx, pa, metav1.CreateOptions{}); err != nil { t.Fatal(err) } } @@ -111,20 +110,20 @@ func TestReconciler_handlePipelineRun(t *testing.T) { tests := []struct { name string - pr *v1beta1.PipelineRun - taskruns []*v1beta1.TaskRun + pr *v1.PipelineRun + taskruns []*v1.TaskRun shouldSign bool wantErr bool }{ { name: "complete, already signed", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{signing.ChainsAnnotation: "true"}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -133,13 +132,13 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "complete, not already signed", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -148,13 +147,13 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "not complete, not already signed", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{}, }}, @@ -163,31 +162,19 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "taskruns completed with full taskrun status", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{ - "taskrun1": { - PipelineTaskName: "task1", - Status: &v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &metav1.Time{}, - }, - }, - }, - }, - }, }, }, - taskruns: []*v1beta1.TaskRun{ + taskruns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "taskrun1", @@ -196,9 +183,9 @@ func TestReconciler_handlePipelineRun(t *testing.T) { "chains.tekton.dev/signed": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, }, @@ -208,19 +195,19 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, { name: "taskruns completed with child references", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - ChildReferences: []v1beta1.ChildStatusReference{ - v1beta1.ChildStatusReference{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + ChildReferences: []v1.ChildStatusReference{ + { Name: "taskrun1", PipelineTaskName: "task1", }, @@ -228,7 +215,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, }, }, - taskruns: []*v1beta1.TaskRun{ + taskruns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "taskrun1", @@ -237,9 +224,9 @@ func TestReconciler_handlePipelineRun(t *testing.T) { "chains.tekton.dev/signed": "true", }, }, - Status: v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &v1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, + Status: v1.TaskRunStatus{ + TaskRunStatusFields: v1.TaskRunStatusFields{ + CompletionTime: &metav1.Time{Time: time.Date(1995, time.December, 24, 6, 12, 12, 24, time.UTC)}, }, }, }, @@ -247,58 +234,21 @@ func TestReconciler_handlePipelineRun(t *testing.T) { shouldSign: true, wantErr: false, }, - { - name: "taskruns not yet completed", - pr: &v1beta1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "pipelinerun", - Namespace: "default", - Annotations: map[string]string{}, - }, - Status: v1beta1.PipelineRunStatus{ - Status: duckv1.Status{ - Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, - }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{ - "taskrun1": { - PipelineTaskName: "task1", - Status: &v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &metav1.Time{}, - }, - }, - }, - }, - }, - }, - }, - taskruns: []*v1beta1.TaskRun{ - { - ObjectMeta: metav1.ObjectMeta{ - Name: "taskrun1", - Namespace: "default", - }, - }, - }, - shouldSign: false, - wantErr: true, - }, { name: "taskruns not yet completed with child references", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - ChildReferences: []v1beta1.ChildStatusReference{ - v1beta1.ChildStatusReference{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + ChildReferences: []v1.ChildStatusReference{ + { Name: "taskrun1", PipelineTaskName: "task1", }, @@ -306,7 +256,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { }, }, }, - taskruns: []*v1beta1.TaskRun{ + taskruns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "taskrun1", @@ -317,50 +267,21 @@ func TestReconciler_handlePipelineRun(t *testing.T) { shouldSign: false, wantErr: true, }, - { - name: "missing taskrun", - pr: &v1beta1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "pipelinerun", - Namespace: "default", - Annotations: map[string]string{}, - }, - Status: v1beta1.PipelineRunStatus{ - Status: duckv1.Status{ - Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, - }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{ - "taskrun1": { - PipelineTaskName: "task1", - Status: &v1beta1.TaskRunStatus{ - TaskRunStatusFields: v1beta1.TaskRunStatusFields{ - CompletionTime: &metav1.Time{}, - }, - }, - }, - }, - }, - }, - }, - shouldSign: false, - wantErr: false, - }, { name: "missing taskrun with child references", - pr: &v1beta1.PipelineRun{ + pr: &v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ Name: "pipelinerun", Namespace: "default", Annotations: map[string]string{}, }, - Status: v1beta1.PipelineRunStatus{ + Status: v1.PipelineRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, - PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ - ChildReferences: []v1beta1.ChildStatusReference{ - v1beta1.ChildStatusReference{ + PipelineRunStatusFields: v1.PipelineRunStatusFields{ + ChildReferences: []v1.ChildStatusReference{ + { Name: "taskrun1", PipelineTaskName: "task1", }, @@ -377,7 +298,7 @@ func TestReconciler_handlePipelineRun(t *testing.T) { signer := &mocksigner.Signer{} ctx, _ := rtesting.SetupFakeContext(t) c := fakepipelineclient.Get(ctx) - tekton.CreateObject(t, ctx, c, objects.NewPipelineRunObject(tt.pr)) + tekton.CreateObject(t, ctx, c, objects.NewPipelineRunObjectV1(tt.pr)) tri := faketaskruninformer.Get(ctx) r := &Reconciler{ diff --git a/pkg/reconciler/taskrun/controller.go b/pkg/reconciler/taskrun/controller.go index dbbb1cdab9..af495d0380 100644 --- a/pkg/reconciler/taskrun/controller.go +++ b/pkg/reconciler/taskrun/controller.go @@ -20,8 +20,10 @@ import ( "github.com/tektoncd/chains/pkg/chains/storage" "github.com/tektoncd/chains/pkg/config" pipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client" - taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" - taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun" + taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun" + v1beta1taskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" + taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun" + v1beta1taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun" kubeclient "knative.dev/pkg/client/injection/kube/client" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" @@ -30,7 +32,7 @@ import ( _ "github.com/tektoncd/chains/pkg/chains/formats/all" ) -func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { +func NewControllerV1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { logger := logging.FromContext(ctx) taskRunInformer := taskruninformer.Get(ctx) @@ -42,7 +44,7 @@ func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl Pipelineclientset: pipelineClient, } - c := &Reconciler{ + c := &ReconcilerV1{ TaskRunSigner: tsSigner, Pipelineclientset: pipelineClient, } @@ -70,7 +72,52 @@ func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl } }) - taskRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) + taskRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck + + return impl +} + +func NewControllerV1Beta1(ctx context.Context, cmw configmap.Watcher) *controller.Impl { + logger := logging.FromContext(ctx) + taskRunInformer := v1beta1taskruninformer.Get(ctx) + + kubeClient := kubeclient.Get(ctx) + pipelineClient := pipelineclient.Get(ctx) + + tsSigner := &chains.ObjectSigner{ + SecretPath: SecretPath, + Pipelineclientset: pipelineClient, + } + + c := &ReconcilerV1Beta1{ + TaskRunSigner: tsSigner, + Pipelineclientset: pipelineClient, + } + impl := v1beta1taskrunreconciler.NewImpl(ctx, c, func(impl *controller.Impl) controller.Options { + cfgStore := config.NewConfigStore(logger, func(name string, value interface{}) { + // get updated config + cfg := *value.(*config.Config) + + // get all backends for storing provenance + backends, err := storage.InitializeBackends(ctx, pipelineClient, kubeClient, cfg) + if err != nil { + logger.Error(err) + } + tsSigner.Backends = backends + }) + + // setup watches for the config names provided by client + cfgStore.WatchConfigs(cmw) + + return controller.Options{ + // The chains reconciler shouldn't mutate the taskrun's status. + SkipStatusUpdates: true, + ConfigStore: cfgStore, + FinalizerName: "chains.tekton.dev", + } + }) + + taskRunInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) //nolint:errcheck return impl } diff --git a/pkg/reconciler/taskrun/taskrun.go b/pkg/reconciler/taskrun/taskrun.go index 18fa87d15b..13e017293a 100644 --- a/pkg/reconciler/taskrun/taskrun.go +++ b/pkg/reconciler/taskrun/taskrun.go @@ -15,12 +15,16 @@ package taskrun import ( "context" + "encoding/json" signing "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/objects" + "github.com/tektoncd/chains/pkg/config" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun" + taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun" + v1beta1taskrunreconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun" "knative.dev/pkg/logging" pkgreconciler "knative.dev/pkg/reconciler" ) @@ -30,18 +34,18 @@ const ( SecretPath = "/etc/signing-secrets" ) -type Reconciler struct { +type ReconcilerV1 struct { TaskRunSigner signing.Signer Pipelineclientset versioned.Interface } // Check that our Reconciler implements taskrunreconciler.Interface and taskrunreconciler.Finalizer -var _ taskrunreconciler.Interface = (*Reconciler)(nil) -var _ taskrunreconciler.Finalizer = (*Reconciler)(nil) +var _ taskrunreconciler.Interface = (*ReconcilerV1)(nil) +var _ taskrunreconciler.Finalizer = (*ReconcilerV1)(nil) // ReconcileKind handles a changed or created TaskRun. // This is the main entrypoint for chains business logic. -func (r *Reconciler) ReconcileKind(ctx context.Context, tr *v1beta1.TaskRun) pkgreconciler.Event { +func (r *ReconcilerV1) ReconcileKind(ctx context.Context, tr *v1.TaskRun) pkgreconciler.Event { return r.FinalizeKind(ctx, tr) } @@ -49,14 +53,21 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, tr *v1beta1.TaskRun) pkg // We utilize finalizers to ensure that we get a crack at signing every taskrun // that we see flowing through the system. If we don't add a finalizer, it could // get cleaned up before we see the final state and sign it. -func (r *Reconciler) FinalizeKind(ctx context.Context, tr *v1beta1.TaskRun) pkgreconciler.Event { +func (r *ReconcilerV1) FinalizeKind(ctx context.Context, tr *v1.TaskRun) pkgreconciler.Event { + cfg := *config.FromContext(ctx) + + // Check to see if chains is configured to watch v1beta1 Tekton API objects + if cfg.TektonAPI.WatchForTektonV1Beta1APIInstead { + return nil + } + // Check to make sure the TaskRun is finished. if !tr.IsDone() { logging.FromContext(ctx).Infof("taskrun %s/%s is still running", tr.Namespace, tr.Name) return nil } - obj := objects.NewTaskRunObject(tr) + obj := objects.NewTaskRunObjectV1(tr) // Check to see if it has already been signed. if signing.Reconciled(ctx, r.Pipelineclientset, obj) { @@ -69,3 +80,74 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, tr *v1beta1.TaskRun) pkgr } return nil } + +type ReconcilerV1Beta1 struct { + TaskRunSigner signing.Signer + Pipelineclientset versioned.Interface +} + +// Check that our Reconciler implements taskrunreconciler.Interface and taskrunreconciler.Finalizer +var _ v1beta1taskrunreconciler.Interface = (*ReconcilerV1Beta1)(nil) +var _ v1beta1taskrunreconciler.Finalizer = (*ReconcilerV1Beta1)(nil) + +// ReconcileKind handles a changed or created TaskRun. +// This is the main entrypoint for chains business logic. +func (r *ReconcilerV1Beta1) ReconcileKind(ctx context.Context, tr *v1beta1.TaskRun) pkgreconciler.Event { //nolint:staticcheck + return r.FinalizeKind(ctx, tr) +} + +// FinalizeKind implements taskrunreconciler.Finalizer +// We utilize finalizers to ensure that we get a crack at signing every taskrun +// that we see flowing through the system. If we don't add a finalizer, it could +// get cleaned up before we see the final state and sign it. +func (r *ReconcilerV1Beta1) FinalizeKind(ctx context.Context, trV1Beta1 *v1beta1.TaskRun) pkgreconciler.Event { //nolint:staticcheck + cfg := *config.FromContext(ctx) + + // Check to see if chains is configured to watch v1beta1 Tekton API objects + if !cfg.TektonAPI.WatchForTektonV1Beta1APIInstead { + return nil + } + + // Check to make sure the TaskRun is finished. + if !trV1Beta1.IsDone() { + logging.FromContext(ctx).Infof("taskrun %s/%s is still running", trV1Beta1.Namespace, trV1Beta1.Name) + return nil + } + + trV1 := &v1.TaskRun{} + err := trV1Beta1.ConvertTo(ctx, trV1) + if err != nil { + return err + } + + if trV1Beta1.Spec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Spec.Resources) //nolint:staticcheck + if err != nil { + return err + } + trV1.Annotations["tekton.dev/v1beta1-spec-resources"] = string(jsonData) + } + + if trV1Beta1.Status.TaskRunStatusFields.TaskSpec != nil && trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources != nil { //nolint:staticcheck + jsonData, err := json.Marshal(trV1Beta1.Status.TaskRunStatusFields.TaskSpec.Resources) //nolint:staticcheck + if err != nil { + return err + } + trV1.Annotations["tekton.dev/v1beta1-status-taskrunstatusfields-taskspec-resources"] = string(jsonData) + } + + obj := objects.NewTaskRunObjectV1(trV1) + objv1beta1 := objects.NewTaskRunObjectV1Beta1(trV1Beta1) + + // Check to see if it has already been signed. + if signing.Reconciled(ctx, r.Pipelineclientset, objv1beta1) { + logging.FromContext(ctx).Infof("taskrun %s/%s has been reconciled", trV1Beta1.Namespace, trV1Beta1.Name) + return nil + } + + if err := r.TaskRunSigner.Sign(ctx, obj, objv1beta1); err != nil { + return err + } + + return nil +} diff --git a/pkg/reconciler/taskrun/taskrun_test.go b/pkg/reconciler/taskrun/taskrun_test.go index d0e8a2b36c..5083663c46 100644 --- a/pkg/reconciler/taskrun/taskrun_test.go +++ b/pkg/reconciler/taskrun/taskrun_test.go @@ -22,10 +22,10 @@ import ( "github.com/tektoncd/chains/pkg/config" "github.com/tektoncd/chains/pkg/internal/mocksigner" "github.com/tektoncd/chains/pkg/test/tekton" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1" fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" - faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake" + faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun/fake" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" @@ -42,17 +42,17 @@ func TestReconciler_Reconcile(t *testing.T) { tests := []struct { name string key string - taskRuns []*v1beta1.TaskRun + taskRuns []*v1.TaskRun }{ { name: "no taskruns", key: "foo/bar", - taskRuns: []*v1beta1.TaskRun{}, + taskRuns: []*v1.TaskRun{}, }, { name: "found taskrun", key: "foo/bar", - taskRuns: []*v1beta1.TaskRun{ + taskRuns: []*v1.TaskRun{ { ObjectMeta: metav1.ObjectMeta{ Name: "bar", @@ -74,7 +74,7 @@ func TestReconciler_Reconcile(t *testing.T) { Name: config.ChainsConfig, }, }) - ctl := NewController(ctx, configMapWatcher) + ctl := NewControllerV1(ctx, configMapWatcher) if la, ok := ctl.Reconciler.(pkgreconciler.LeaderAware); ok { if err := la.Promote(pkgreconciler.UniversalBucket(), func(pkgreconciler.Bucket, types.NamespacedName) {}); err != nil { @@ -89,13 +89,13 @@ func TestReconciler_Reconcile(t *testing.T) { } } -func setupData(ctx context.Context, t *testing.T, trs []*v1beta1.TaskRun) informers.TaskRunInformer { +func setupData(ctx context.Context, t *testing.T, trs []*v1.TaskRun) informers.TaskRunInformer { tri := faketaskruninformer.Get(ctx) c := fakepipelineclient.Get(ctx) for _, ta := range trs { ta := ta.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.TektonV1beta1().TaskRuns(ta.Namespace).Create(ctx, ta, metav1.CreateOptions{}); err != nil { + if _, err := c.TektonV1().TaskRuns(ta.Namespace).Create(ctx, ta, metav1.CreateOptions{}); err != nil { t.Fatal(err) } } @@ -107,16 +107,16 @@ func TestReconciler_handleTaskRun(t *testing.T) { tests := []struct { name string - tr *v1beta1.TaskRun + tr *v1.TaskRun shouldSign bool }{ { name: "complete, already signed", - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{signing.ChainsAnnotation: "true"}, }, - Status: v1beta1.TaskRunStatus{ + Status: v1.TaskRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -125,11 +125,11 @@ func TestReconciler_handleTaskRun(t *testing.T) { }, { name: "complete, not already signed", - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{}, }, - Status: v1beta1.TaskRunStatus{ + Status: v1.TaskRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }}, @@ -138,11 +138,11 @@ func TestReconciler_handleTaskRun(t *testing.T) { }, { name: "not complete, not already signed", - tr: &v1beta1.TaskRun{ + tr: &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{}, }, - Status: v1beta1.TaskRunStatus{ + Status: v1.TaskRunStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{}, }}, @@ -155,9 +155,9 @@ func TestReconciler_handleTaskRun(t *testing.T) { signer := &mocksigner.Signer{} ctx, _ := rtesting.SetupFakeContext(t) c := fakepipelineclient.Get(ctx) - tekton.CreateObject(t, ctx, c, objects.NewTaskRunObject(tt.tr)) + tekton.CreateObject(t, ctx, c, objects.NewTaskRunObjectV1(tt.tr)) - r := &Reconciler{ + r := &ReconcilerV1{ TaskRunSigner: signer, Pipelineclientset: c, } diff --git a/pkg/test/tekton/tekton.go b/pkg/test/tekton/tekton.go index d74bf1461c..3de9073b97 100644 --- a/pkg/test/tekton/tekton.go +++ b/pkg/test/tekton/tekton.go @@ -19,7 +19,7 @@ import ( "testing" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" pipelineclientset "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/watch" @@ -27,18 +27,18 @@ import ( func CreateObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, obj objects.TektonObject) objects.TektonObject { switch o := obj.GetObject().(type) { - case *v1beta1.PipelineRun: - pr, err := ps.TektonV1beta1().PipelineRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) + case *v1.PipelineRun: + pr, err := ps.TektonV1().PipelineRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) if err != nil { t.Fatalf("error creating pipelinerun: %v", err) } - return objects.NewPipelineRunObject(pr) - case *v1beta1.TaskRun: - tr, err := ps.TektonV1beta1().TaskRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) + return objects.NewPipelineRunObjectV1(pr) + case *v1.TaskRun: + tr, err := ps.TektonV1().TaskRuns(obj.GetNamespace()).Create(ctx, o, metav1.CreateOptions{}) if err != nil { t.Fatalf("error creating taskrun: %v", err) } - return objects.NewTaskRunObject(tr) + return objects.NewTaskRunObjectV1(tr) } return nil } @@ -46,9 +46,9 @@ func CreateObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interf // Passing in TektonObject since it encapsulates namespace, name, and type. func GetObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, obj objects.TektonObject) (objects.TektonObject, error) { switch obj.GetObject().(type) { - case *v1beta1.PipelineRun: + case *v1.PipelineRun: return GetPipelineRun(t, ctx, ps, obj.GetNamespace(), obj.GetName()) - case *v1beta1.TaskRun: + case *v1.TaskRun: return GetTaskRun(t, ctx, ps, obj.GetNamespace(), obj.GetName()) } t.Fatalf("unknown object type %T", obj.GetObject()) @@ -56,30 +56,30 @@ func GetObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface } func GetPipelineRun(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, namespace, name string) (objects.TektonObject, error) { - pr, err := ps.TektonV1beta1().PipelineRuns(namespace).Get(ctx, name, metav1.GetOptions{}) + pr, err := ps.TektonV1().PipelineRuns(namespace).Get(ctx, name, metav1.GetOptions{}) if err != nil { t.Fatalf("error getting pipelinerun: %v", err) } - return objects.NewPipelineRunObject(pr), nil + return objects.NewPipelineRunObjectV1(pr), nil } func GetTaskRun(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, namespace, name string) (objects.TektonObject, error) { - tr, err := ps.TektonV1beta1().TaskRuns(namespace).Get(ctx, name, metav1.GetOptions{}) + tr, err := ps.TektonV1().TaskRuns(namespace).Get(ctx, name, metav1.GetOptions{}) if err != nil { t.Fatalf("error getting taskrun: %v", err) } - return objects.NewTaskRunObject(tr), nil + return objects.NewTaskRunObjectV1(tr), nil } func WatchObject(t *testing.T, ctx context.Context, ps pipelineclientset.Interface, obj objects.TektonObject) (watch.Interface, error) { switch o := obj.GetObject().(type) { - case *v1beta1.PipelineRun: - return ps.TektonV1beta1().PipelineRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ + case *v1.PipelineRun: + return ps.TektonV1().PipelineRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ Name: o.GetName(), Namespace: o.GetNamespace(), })) - case *v1beta1.TaskRun: - return ps.TektonV1beta1().TaskRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ + case *v1.TaskRun: + return ps.TektonV1().TaskRuns(obj.GetNamespace()).Watch(ctx, metav1.SingleObject(metav1.ObjectMeta{ Name: o.GetName(), Namespace: o.GetNamespace(), })) diff --git a/test/e2e_test.go b/test/e2e_test.go index 1f8c135ed4..2b8d048ee8 100644 --- a/test/e2e_test.go +++ b/test/e2e_test.go @@ -635,13 +635,13 @@ var imageTaskRun = v1beta1.TaskRun{ } func getTaskRunObject(ns string) objects.TektonObject { - o := objects.NewTaskRunObject(&imageTaskRun) + o := objects.NewTaskRunObjectV1(&imageTaskRun) o.Namespace = ns return o } func getTaskRunObjectWithParams(ns string, params []v1beta1.Param) objects.TektonObject { - o := objects.NewTaskRunObject(&imageTaskRun) + o := objects.NewTaskRunObjectV1(&imageTaskRun) o.Namespace = ns o.Spec.Params = params return o @@ -672,13 +672,13 @@ var imagePipelineRun = v1beta1.PipelineRun{ } func getPipelineRunObject(ns string) objects.TektonObject { - o := objects.NewPipelineRunObject(&imagePipelineRun) + o := objects.NewPipelineRunObjectV1(&imagePipelineRun) o.Namespace = ns return o } func getPipelineRunObjectWithParams(ns string, params []v1beta1.Param) objects.TektonObject { - o := objects.NewPipelineRunObject(&imagePipelineRun) + o := objects.NewPipelineRunObjectV1(&imagePipelineRun) o.Namespace = ns o.Spec.Params = params return o diff --git a/test/examples_test.go b/test/examples_test.go index dcce933ac2..bc06653fff 100644 --- a/test/examples_test.go +++ b/test/examples_test.go @@ -470,7 +470,7 @@ func taskRunFromExample(t *testing.T, ns, example string) objects.TektonObject { t.Fatal(err) } tr.Namespace = ns - return objects.NewTaskRunObject(tr) + return objects.NewTaskRunObjectV1(tr) } func pipelineRunFromExample(t *testing.T, ns, example string) objects.TektonObject { @@ -483,7 +483,7 @@ func pipelineRunFromExample(t *testing.T, ns, example string) objects.TektonObje t.Fatal(err) } pr.Namespace = ns - return objects.NewPipelineRunObject(pr) + return objects.NewPipelineRunObjectV1(pr) } func ignoreEnvironmentAnnotationsAndLabels(key string, value any) bool { diff --git a/test/kaniko.go b/test/kaniko.go index 401b1cdde0..206309a836 100644 --- a/test/kaniko.go +++ b/test/kaniko.go @@ -23,83 +23,83 @@ import ( "github.com/google/go-containerregistry/pkg/name" "github.com/tektoncd/chains/pkg/chains" "github.com/tektoncd/chains/pkg/chains/objects" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - v1 "k8s.io/api/core/v1" + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) const taskName = "kaniko-task" func kanikoPipelineRun(ns string) objects.TektonObject { - imagePipelineRun := v1beta1.PipelineRun{ + imagePipelineRun := v1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "image-pipelinerun", Namespace: ns, Annotations: map[string]string{chains.RekorAnnotation: "true"}, }, - Spec: v1beta1.PipelineRunSpec{ - PipelineSpec: &v1beta1.PipelineSpec{ - Tasks: []v1beta1.PipelineTask{{ + Spec: v1.PipelineRunSpec{ + PipelineSpec: &v1.PipelineSpec{ + Tasks: []v1.PipelineTask{{ Name: "kaniko", - TaskRef: &v1beta1.TaskRef{ + TaskRef: &v1.TaskRef{ Name: "kaniko-task", - Kind: v1beta1.NamespacedTaskKind, + Kind: v1.NamespacedTaskKind, }, }}, - Results: []v1beta1.PipelineResult{{ + Results: []v1.PipelineResult{{ Name: "IMAGE_URL", - Value: *v1beta1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_URL)"), + Value: *v1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_URL)"), }, { Name: "IMAGE_DIGEST", - Value: *v1beta1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_DIGEST)"), + Value: *v1.NewStructuredValues("$(tasks.kaniko.results.IMAGE_DIGEST)"), }}, }, }, } - return objects.NewPipelineRunObject(&imagePipelineRun) + return objects.NewPipelineRunObjectV1(&imagePipelineRun) } func kanikoTaskRun(namespace string) objects.TektonObject { - tr := &v1beta1.TaskRun{ + tr := &v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "kaniko-taskrun", Namespace: namespace, }, - Spec: v1beta1.TaskRunSpec{ - TaskRef: &v1beta1.TaskRef{ + Spec: v1.TaskRunSpec{ + TaskRef: &v1.TaskRef{ Name: taskName, }, }, } - return objects.NewTaskRunObject(tr) + return objects.NewTaskRunObjectV1(tr) } -func kanikoTask(t *testing.T, namespace, destinationImage string) *v1beta1.Task { +func kanikoTask(t *testing.T, namespace, destinationImage string) *v1.Task { ref, err := name.ParseReference(destinationImage) if err != nil { t.Fatalf("unable to parse image name: %v", err) } - return &v1beta1.Task{ + return &v1.Task{ ObjectMeta: metav1.ObjectMeta{ Name: taskName, Namespace: namespace, }, - Spec: v1beta1.TaskSpec{ - Results: []v1beta1.TaskResult{ + Spec: v1.TaskSpec{ + Results: []v1.TaskResult{ {Name: "IMAGE_URL"}, {Name: "IMAGE_DIGEST"}, }, - Steps: []v1beta1.Step{{ + Steps: []v1.Step{{ Name: "create-dockerfile", Image: "bash:latest", - VolumeMounts: []v1.VolumeMount{{ + VolumeMounts: []corev1.VolumeMount{{ Name: "dockerfile", MountPath: "/dockerfile", }}, Script: "#!/usr/bin/env bash\necho \"FROM gcr.io/distroless/base@sha256:6ec6da1888b18dd971802c2a58a76a7702902b4c9c1be28f38e75e871cedc2df\" > /dockerfile/Dockerfile", }, { Name: "build-and-push", - Image: "gcr.io/kaniko-project/executor:v1.6.0", + Image: "gcr.io/kaniko-project/executor:corev1.6.0", Command: []string{"/kaniko/executor"}, Args: []string{ "--dockerfile=/dockerfile/Dockerfile", @@ -109,23 +109,23 @@ func kanikoTask(t *testing.T, namespace, destinationImage string) *v1beta1.Task // Need this to push the image to the insecure registry "--insecure", }, - VolumeMounts: []v1.VolumeMount{{ + VolumeMounts: []corev1.VolumeMount{{ Name: "dockerfile", MountPath: "/dockerfile", }}, }, { Name: "save-image-url", Image: "bash:latest", - VolumeMounts: []v1.VolumeMount{{ + VolumeMounts: []corev1.VolumeMount{{ Name: "dockerfile", MountPath: "/dockerfile", }}, Script: fmt.Sprintf("#!/usr/bin/env bash\necho %s | tee $(results.IMAGE_URL.path)", ref.String()), }, }, - Volumes: []v1.Volume{{ + Volumes: []corev1.Volume{{ Name: "dockerfile", - VolumeSource: v1.VolumeSource{EmptyDir: &v1.EmptyDirVolumeSource{}}, + VolumeSource: corev1.VolumeSource{EmptyDir: &corev1.EmptyDirVolumeSource{}}, }}, }, } @@ -144,14 +144,14 @@ cosign verify --allow-insecure-registry --key cosign.pub %s cosign verify-attestation --allow-insecure-registry --key cosign.pub %s` script = fmt.Sprintf(script, publicKey, destinationImage, destinationImage) - return objects.NewTaskRunObject(&v1beta1.TaskRun{ + return objects.NewTaskRunObjectV1(&v1.TaskRun{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "verify-kaniko-taskrun", Namespace: namespace, }, - Spec: v1beta1.TaskRunSpec{ - TaskSpec: &v1beta1.TaskSpec{ - Steps: []v1beta1.Step{{ + Spec: v1.TaskRunSpec{ + TaskSpec: &v1.TaskSpec{ + Steps: []v1.Step{{ Name: "verify-image", Image: "gcr.io/projectsigstore/cosign/ci/cosign:d764e8b89934dc1043bd1b13112a66641c63a038@sha256:228c37f9f37415efbd6a4ff16aae81197206ce1410a227bcab8ac8b039b36237", Script: script, diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go deleted file mode 100644 index 2b97a8ec5d..0000000000 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake/fake.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Tekton Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - fake "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory/fake" - pipelinerun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" -) - -var Get = pipelinerun.Get - -func init() { - injection.Fake.RegisterInformer(withInformer) -} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := fake.Get(ctx) - inf := f.Tekton().V1beta1().PipelineRuns() - return context.WithValue(ctx, pipelinerun.Key{}, inf), inf.Informer() -} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go deleted file mode 100644 index 79919b4612..0000000000 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake/fake.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Tekton Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - fake "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory/fake" - taskrun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" -) - -var Get = taskrun.Get - -func init() { - injection.Fake.RegisterInformer(withInformer) -} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := fake.Get(ctx) - inf := f.Tekton().V1beta1().TaskRuns() - return context.WithValue(ctx, taskrun.Key{}, inf), inf.Informer() -} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/controller.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/controller.go new file mode 100644 index 0000000000..b1efcea654 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/controller.go @@ -0,0 +1,170 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package pipelinerun + +import ( + context "context" + fmt "fmt" + reflect "reflect" + strings "strings" + + versionedscheme "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/scheme" + client "github.com/tektoncd/pipeline/pkg/client/injection/client" + pipelinerun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun" + zap "go.uber.org/zap" + corev1 "k8s.io/api/core/v1" + labels "k8s.io/apimachinery/pkg/labels" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + scheme "k8s.io/client-go/kubernetes/scheme" + v1 "k8s.io/client-go/kubernetes/typed/core/v1" + record "k8s.io/client-go/tools/record" + kubeclient "knative.dev/pkg/client/injection/kube/client" + controller "knative.dev/pkg/controller" + logging "knative.dev/pkg/logging" + logkey "knative.dev/pkg/logging/logkey" + reconciler "knative.dev/pkg/reconciler" +) + +const ( + defaultControllerAgentName = "pipelinerun-controller" + defaultFinalizerName = "pipelineruns.tekton.dev" +) + +// NewImpl returns a controller.Impl that handles queuing and feeding work from +// the queue through an implementation of controller.Reconciler, delegating to +// the provided Interface and optional Finalizer methods. OptionsFn is used to return +// controller.ControllerOptions to be used by the internal reconciler. +func NewImpl(ctx context.Context, r Interface, optionsFns ...controller.OptionsFn) *controller.Impl { + logger := logging.FromContext(ctx) + + // Check the options function input. It should be 0 or 1. + if len(optionsFns) > 1 { + logger.Fatal("Up to one options function is supported, found: ", len(optionsFns)) + } + + pipelinerunInformer := pipelinerun.Get(ctx) + + lister := pipelinerunInformer.Lister() + + var promoteFilterFunc func(obj interface{}) bool + var promoteFunc = func(bkt reconciler.Bucket) {} + + rec := &reconcilerImpl{ + LeaderAwareFuncs: reconciler.LeaderAwareFuncs{ + PromoteFunc: func(bkt reconciler.Bucket, enq func(reconciler.Bucket, types.NamespacedName)) error { + + // Signal promotion event + promoteFunc(bkt) + + all, err := lister.List(labels.Everything()) + if err != nil { + return err + } + for _, elt := range all { + if promoteFilterFunc != nil { + if ok := promoteFilterFunc(elt); !ok { + continue + } + } + enq(bkt, types.NamespacedName{ + Namespace: elt.GetNamespace(), + Name: elt.GetName(), + }) + } + return nil + }, + }, + Client: client.Get(ctx), + Lister: lister, + reconciler: r, + finalizerName: defaultFinalizerName, + } + + ctrType := reflect.TypeOf(r).Elem() + ctrTypeName := fmt.Sprintf("%s.%s", ctrType.PkgPath(), ctrType.Name()) + ctrTypeName = strings.ReplaceAll(ctrTypeName, "/", ".") + + logger = logger.With( + zap.String(logkey.ControllerType, ctrTypeName), + zap.String(logkey.Kind, "tekton.dev.PipelineRun"), + ) + + impl := controller.NewContext(ctx, rec, controller.ControllerOptions{WorkQueueName: ctrTypeName, Logger: logger}) + agentName := defaultControllerAgentName + + // Pass impl to the options. Save any optional results. + for _, fn := range optionsFns { + opts := fn(impl) + if opts.ConfigStore != nil { + rec.configStore = opts.ConfigStore + } + if opts.FinalizerName != "" { + rec.finalizerName = opts.FinalizerName + } + if opts.AgentName != "" { + agentName = opts.AgentName + } + if opts.SkipStatusUpdates { + rec.skipStatusUpdates = true + } + if opts.DemoteFunc != nil { + rec.DemoteFunc = opts.DemoteFunc + } + if opts.PromoteFilterFunc != nil { + promoteFilterFunc = opts.PromoteFilterFunc + } + if opts.PromoteFunc != nil { + promoteFunc = opts.PromoteFunc + } + } + + rec.Recorder = createRecorder(ctx, agentName) + + return impl +} + +func createRecorder(ctx context.Context, agentName string) record.EventRecorder { + logger := logging.FromContext(ctx) + + recorder := controller.GetEventRecorder(ctx) + if recorder == nil { + // Create event broadcaster + logger.Debug("Creating event broadcaster") + eventBroadcaster := record.NewBroadcaster() + watches := []watch.Interface{ + eventBroadcaster.StartLogging(logger.Named("event-broadcaster").Infof), + eventBroadcaster.StartRecordingToSink( + &v1.EventSinkImpl{Interface: kubeclient.Get(ctx).CoreV1().Events("")}), + } + recorder = eventBroadcaster.NewRecorder(scheme.Scheme, corev1.EventSource{Component: agentName}) + go func() { + <-ctx.Done() + for _, w := range watches { + w.Stop() + } + }() + } + + return recorder +} + +func init() { + versionedscheme.AddToScheme(scheme.Scheme) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/reconciler.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/reconciler.go new file mode 100644 index 0000000000..f49825001b --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/reconciler.go @@ -0,0 +1,432 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package pipelinerun + +import ( + context "context" + json "encoding/json" + fmt "fmt" + + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" + pipelinev1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" + zap "go.uber.org/zap" + "go.uber.org/zap/zapcore" + corev1 "k8s.io/api/core/v1" + equality "k8s.io/apimachinery/pkg/api/equality" + errors "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + types "k8s.io/apimachinery/pkg/types" + sets "k8s.io/apimachinery/pkg/util/sets" + record "k8s.io/client-go/tools/record" + controller "knative.dev/pkg/controller" + kmp "knative.dev/pkg/kmp" + logging "knative.dev/pkg/logging" + reconciler "knative.dev/pkg/reconciler" +) + +// Interface defines the strongly typed interfaces to be implemented by a +// controller reconciling v1.PipelineRun. +type Interface interface { + // ReconcileKind implements custom logic to reconcile v1.PipelineRun. Any changes + // to the objects .Status or .Finalizers will be propagated to the stored + // object. It is recommended that implementors do not call any update calls + // for the Kind inside of ReconcileKind, it is the responsibility of the calling + // controller to propagate those properties. The resource passed to ReconcileKind + // will always have an empty deletion timestamp. + ReconcileKind(ctx context.Context, o *v1.PipelineRun) reconciler.Event +} + +// Finalizer defines the strongly typed interfaces to be implemented by a +// controller finalizing v1.PipelineRun. +type Finalizer interface { + // FinalizeKind implements custom logic to finalize v1.PipelineRun. Any changes + // to the objects .Status or .Finalizers will be ignored. Returning a nil or + // Normal type reconciler.Event will allow the finalizer to be deleted on + // the resource. The resource passed to FinalizeKind will always have a set + // deletion timestamp. + FinalizeKind(ctx context.Context, o *v1.PipelineRun) reconciler.Event +} + +// ReadOnlyInterface defines the strongly typed interfaces to be implemented by a +// controller reconciling v1.PipelineRun if they want to process resources for which +// they are not the leader. +type ReadOnlyInterface interface { + // ObserveKind implements logic to observe v1.PipelineRun. + // This method should not write to the API. + ObserveKind(ctx context.Context, o *v1.PipelineRun) reconciler.Event +} + +type doReconcile func(ctx context.Context, o *v1.PipelineRun) reconciler.Event + +// reconcilerImpl implements controller.Reconciler for v1.PipelineRun resources. +type reconcilerImpl struct { + // LeaderAwareFuncs is inlined to help us implement reconciler.LeaderAware. + reconciler.LeaderAwareFuncs + + // Client is used to write back status updates. + Client versioned.Interface + + // Listers index properties about resources. + Lister pipelinev1.PipelineRunLister + + // Recorder is an event recorder for recording Event resources to the + // Kubernetes API. + Recorder record.EventRecorder + + // configStore allows for decorating a context with config maps. + // +optional + configStore reconciler.ConfigStore + + // reconciler is the implementation of the business logic of the resource. + reconciler Interface + + // finalizerName is the name of the finalizer to reconcile. + finalizerName string + + // skipStatusUpdates configures whether or not this reconciler automatically updates + // the status of the reconciled resource. + skipStatusUpdates bool +} + +// Check that our Reconciler implements controller.Reconciler. +var _ controller.Reconciler = (*reconcilerImpl)(nil) + +// Check that our generated Reconciler is always LeaderAware. +var _ reconciler.LeaderAware = (*reconcilerImpl)(nil) + +func NewReconciler(ctx context.Context, logger *zap.SugaredLogger, client versioned.Interface, lister pipelinev1.PipelineRunLister, recorder record.EventRecorder, r Interface, options ...controller.Options) controller.Reconciler { + // Check the options function input. It should be 0 or 1. + if len(options) > 1 { + logger.Fatal("Up to one options struct is supported, found: ", len(options)) + } + + // Fail fast when users inadvertently implement the other LeaderAware interface. + // For the typed reconcilers, Promote shouldn't take any arguments. + if _, ok := r.(reconciler.LeaderAware); ok { + logger.Fatalf("%T implements the incorrect LeaderAware interface. Promote() should not take an argument as genreconciler handles the enqueuing automatically.", r) + } + + rec := &reconcilerImpl{ + LeaderAwareFuncs: reconciler.LeaderAwareFuncs{ + PromoteFunc: func(bkt reconciler.Bucket, enq func(reconciler.Bucket, types.NamespacedName)) error { + all, err := lister.List(labels.Everything()) + if err != nil { + return err + } + for _, elt := range all { + // TODO: Consider letting users specify a filter in options. + enq(bkt, types.NamespacedName{ + Namespace: elt.GetNamespace(), + Name: elt.GetName(), + }) + } + return nil + }, + }, + Client: client, + Lister: lister, + Recorder: recorder, + reconciler: r, + finalizerName: defaultFinalizerName, + } + + for _, opts := range options { + if opts.ConfigStore != nil { + rec.configStore = opts.ConfigStore + } + if opts.FinalizerName != "" { + rec.finalizerName = opts.FinalizerName + } + if opts.SkipStatusUpdates { + rec.skipStatusUpdates = true + } + if opts.DemoteFunc != nil { + rec.DemoteFunc = opts.DemoteFunc + } + } + + return rec +} + +// Reconcile implements controller.Reconciler +func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { + logger := logging.FromContext(ctx) + + // Initialize the reconciler state. This will convert the namespace/name + // string into a distinct namespace and name, determine if this instance of + // the reconciler is the leader, and any additional interfaces implemented + // by the reconciler. Returns an error is the resource key is invalid. + s, err := newState(key, r) + if err != nil { + logger.Error("Invalid resource key: ", key) + return nil + } + + // If we are not the leader, and we don't implement either ReadOnly + // observer interfaces, then take a fast-path out. + if s.isNotLeaderNorObserver() { + return controller.NewSkipKey(key) + } + + // If configStore is set, attach the frozen configuration to the context. + if r.configStore != nil { + ctx = r.configStore.ToContext(ctx) + } + + // Add the recorder to context. + ctx = controller.WithEventRecorder(ctx, r.Recorder) + + // Get the resource with this namespace/name. + + getter := r.Lister.PipelineRuns(s.namespace) + + original, err := getter.Get(s.name) + + if errors.IsNotFound(err) { + // The resource may no longer exist, in which case we stop processing and call + // the ObserveDeletion handler if appropriate. + logger.Debugf("Resource %q no longer exists", key) + if del, ok := r.reconciler.(reconciler.OnDeletionInterface); ok { + return del.ObserveDeletion(ctx, types.NamespacedName{ + Namespace: s.namespace, + Name: s.name, + }) + } + return nil + } else if err != nil { + return err + } + + // Don't modify the informers copy. + resource := original.DeepCopy() + + var reconcileEvent reconciler.Event + + name, do := s.reconcileMethodFor(resource) + // Append the target method to the logger. + logger = logger.With(zap.String("targetMethod", name)) + switch name { + case reconciler.DoReconcileKind: + // Set and update the finalizer on resource if r.reconciler + // implements Finalizer. + if resource, err = r.setFinalizerIfFinalizer(ctx, resource); err != nil { + return fmt.Errorf("failed to set finalizers: %w", err) + } + + // Reconcile this copy of the resource and then write back any status + // updates regardless of whether the reconciliation errored out. + reconcileEvent = do(ctx, resource) + + case reconciler.DoFinalizeKind: + // For finalizing reconcilers, if this resource being marked for deletion + // and reconciled cleanly (nil or normal event), remove the finalizer. + reconcileEvent = do(ctx, resource) + + if resource, err = r.clearFinalizer(ctx, resource, reconcileEvent); err != nil { + return fmt.Errorf("failed to clear finalizers: %w", err) + } + + case reconciler.DoObserveKind: + // Observe any changes to this resource, since we are not the leader. + reconcileEvent = do(ctx, resource) + + } + + // Synchronize the status. + switch { + case r.skipStatusUpdates: + // This reconciler implementation is configured to skip resource updates. + // This may mean this reconciler does not observe spec, but reconciles external changes. + case equality.Semantic.DeepEqual(original.Status, resource.Status): + // If we didn't change anything then don't call updateStatus. + // This is important because the copy we loaded from the injectionInformer's + // cache may be stale and we don't want to overwrite a prior update + // to status with this stale state. + case !s.isLeader: + // High-availability reconcilers may have many replicas watching the resource, but only + // the elected leader is expected to write modifications. + logger.Warn("Saw status changes when we aren't the leader!") + default: + if err = r.updateStatus(ctx, logger, original, resource); err != nil { + logger.Warnw("Failed to update resource status", zap.Error(err)) + r.Recorder.Eventf(resource, corev1.EventTypeWarning, "UpdateFailed", + "Failed to update status for %q: %v", resource.Name, err) + return err + } + } + + // Report the reconciler event, if any. + if reconcileEvent != nil { + var event *reconciler.ReconcilerEvent + if reconciler.EventAs(reconcileEvent, &event) { + logger.Infow("Returned an event", zap.Any("event", reconcileEvent)) + r.Recorder.Event(resource, event.EventType, event.Reason, event.Error()) + + // the event was wrapped inside an error, consider the reconciliation as failed + if _, isEvent := reconcileEvent.(*reconciler.ReconcilerEvent); !isEvent { + return reconcileEvent + } + return nil + } + + if controller.IsSkipKey(reconcileEvent) { + // This is a wrapped error, don't emit an event. + } else if ok, _ := controller.IsRequeueKey(reconcileEvent); ok { + // This is a wrapped error, don't emit an event. + } else { + logger.Errorw("Returned an error", zap.Error(reconcileEvent)) + r.Recorder.Event(resource, corev1.EventTypeWarning, "InternalError", reconcileEvent.Error()) + } + return reconcileEvent + } + + return nil +} + +func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLogger, existing *v1.PipelineRun, desired *v1.PipelineRun) error { + existing = existing.DeepCopy() + return reconciler.RetryUpdateConflicts(func(attempts int) (err error) { + // The first iteration tries to use the injectionInformer's state, subsequent attempts fetch the latest state via API. + if attempts > 0 { + + getter := r.Client.TektonV1().PipelineRuns(desired.Namespace) + + existing, err = getter.Get(ctx, desired.Name, metav1.GetOptions{}) + if err != nil { + return err + } + } + + // If there's nothing to update, just return. + if equality.Semantic.DeepEqual(existing.Status, desired.Status) { + return nil + } + + if logger.Desugar().Core().Enabled(zapcore.DebugLevel) { + if diff, err := kmp.SafeDiff(existing.Status, desired.Status); err == nil && diff != "" { + logger.Debug("Updating status with: ", diff) + } + } + + existing.Status = desired.Status + + updater := r.Client.TektonV1().PipelineRuns(existing.Namespace) + + _, err = updater.UpdateStatus(ctx, existing, metav1.UpdateOptions{}) + return err + }) +} + +// updateFinalizersFiltered will update the Finalizers of the resource. +// TODO: this method could be generic and sync all finalizers. For now it only +// updates defaultFinalizerName or its override. +func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource *v1.PipelineRun, desiredFinalizers sets.String) (*v1.PipelineRun, error) { + // Don't modify the informers copy. + existing := resource.DeepCopy() + + var finalizers []string + + // If there's nothing to update, just return. + existingFinalizers := sets.NewString(existing.Finalizers...) + + if desiredFinalizers.Has(r.finalizerName) { + if existingFinalizers.Has(r.finalizerName) { + // Nothing to do. + return resource, nil + } + // Add the finalizer. + finalizers = append(existing.Finalizers, r.finalizerName) + } else { + if !existingFinalizers.Has(r.finalizerName) { + // Nothing to do. + return resource, nil + } + // Remove the finalizer. + existingFinalizers.Delete(r.finalizerName) + finalizers = existingFinalizers.List() + } + + mergePatch := map[string]interface{}{ + "metadata": map[string]interface{}{ + "finalizers": finalizers, + "resourceVersion": existing.ResourceVersion, + }, + } + + patch, err := json.Marshal(mergePatch) + if err != nil { + return resource, err + } + + patcher := r.Client.TektonV1().PipelineRuns(resource.Namespace) + + resourceName := resource.Name + updated, err := patcher.Patch(ctx, resourceName, types.MergePatchType, patch, metav1.PatchOptions{}) + if err != nil { + r.Recorder.Eventf(existing, corev1.EventTypeWarning, "FinalizerUpdateFailed", + "Failed to update finalizers for %q: %v", resourceName, err) + } else { + r.Recorder.Eventf(updated, corev1.EventTypeNormal, "FinalizerUpdate", + "Updated %q finalizers", resource.GetName()) + } + return updated, err +} + +func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource *v1.PipelineRun) (*v1.PipelineRun, error) { + if _, ok := r.reconciler.(Finalizer); !ok { + return resource, nil + } + + finalizers := sets.NewString(resource.Finalizers...) + + // If this resource is not being deleted, mark the finalizer. + if resource.GetDeletionTimestamp().IsZero() { + finalizers.Insert(r.finalizerName) + } + + // Synchronize the finalizers filtered by r.finalizerName. + return r.updateFinalizersFiltered(ctx, resource, finalizers) +} + +func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1.PipelineRun, reconcileEvent reconciler.Event) (*v1.PipelineRun, error) { + if _, ok := r.reconciler.(Finalizer); !ok { + return resource, nil + } + if resource.GetDeletionTimestamp().IsZero() { + return resource, nil + } + + finalizers := sets.NewString(resource.Finalizers...) + + if reconcileEvent != nil { + var event *reconciler.ReconcilerEvent + if reconciler.EventAs(reconcileEvent, &event) { + if event.EventType == corev1.EventTypeNormal { + finalizers.Delete(r.finalizerName) + } + } + } else { + finalizers.Delete(r.finalizerName) + } + + // Synchronize the finalizers filtered by r.finalizerName. + return r.updateFinalizersFiltered(ctx, resource, finalizers) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/state.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/state.go new file mode 100644 index 0000000000..35540fcf4e --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun/state.go @@ -0,0 +1,97 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package pipelinerun + +import ( + fmt "fmt" + + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + types "k8s.io/apimachinery/pkg/types" + cache "k8s.io/client-go/tools/cache" + reconciler "knative.dev/pkg/reconciler" +) + +// state is used to track the state of a reconciler in a single run. +type state struct { + // key is the original reconciliation key from the queue. + key string + // namespace is the namespace split from the reconciliation key. + namespace string + // name is the name split from the reconciliation key. + name string + // reconciler is the reconciler. + reconciler Interface + // roi is the read only interface cast of the reconciler. + roi ReadOnlyInterface + // isROI (Read Only Interface) the reconciler only observes reconciliation. + isROI bool + // isLeader the instance of the reconciler is the elected leader. + isLeader bool +} + +func newState(key string, r *reconcilerImpl) (*state, error) { + // Convert the namespace/name string into a distinct namespace and name. + namespace, name, err := cache.SplitMetaNamespaceKey(key) + if err != nil { + return nil, fmt.Errorf("invalid resource key: %s", key) + } + + roi, isROI := r.reconciler.(ReadOnlyInterface) + + isLeader := r.IsLeaderFor(types.NamespacedName{ + Namespace: namespace, + Name: name, + }) + + return &state{ + key: key, + namespace: namespace, + name: name, + reconciler: r.reconciler, + roi: roi, + isROI: isROI, + isLeader: isLeader, + }, nil +} + +// isNotLeaderNorObserver checks to see if this reconciler with the current +// state is enabled to do any work or not. +// isNotLeaderNorObserver returns true when there is no work possible for the +// reconciler. +func (s *state) isNotLeaderNorObserver() bool { + if !s.isLeader && !s.isROI { + // If we are not the leader, and we don't implement the ReadOnly + // interface, then take a fast-path out. + return true + } + return false +} + +func (s *state) reconcileMethodFor(o *v1.PipelineRun) (string, doReconcile) { + if o.GetDeletionTimestamp().IsZero() { + if s.isLeader { + return reconciler.DoReconcileKind, s.reconciler.ReconcileKind + } else if s.isROI { + return reconciler.DoObserveKind, s.roi.ObserveKind + } + } else if fin, ok := s.reconciler.(Finalizer); s.isLeader && ok { + return reconciler.DoFinalizeKind, fin.FinalizeKind + } + return "unknown", nil +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go new file mode 100644 index 0000000000..2cf0767987 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/controller.go @@ -0,0 +1,170 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package taskrun + +import ( + context "context" + fmt "fmt" + reflect "reflect" + strings "strings" + + versionedscheme "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/scheme" + client "github.com/tektoncd/pipeline/pkg/client/injection/client" + taskrun "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun" + zap "go.uber.org/zap" + corev1 "k8s.io/api/core/v1" + labels "k8s.io/apimachinery/pkg/labels" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + scheme "k8s.io/client-go/kubernetes/scheme" + v1 "k8s.io/client-go/kubernetes/typed/core/v1" + record "k8s.io/client-go/tools/record" + kubeclient "knative.dev/pkg/client/injection/kube/client" + controller "knative.dev/pkg/controller" + logging "knative.dev/pkg/logging" + logkey "knative.dev/pkg/logging/logkey" + reconciler "knative.dev/pkg/reconciler" +) + +const ( + defaultControllerAgentName = "taskrun-controller" + defaultFinalizerName = "taskruns.tekton.dev" +) + +// NewImpl returns a controller.Impl that handles queuing and feeding work from +// the queue through an implementation of controller.Reconciler, delegating to +// the provided Interface and optional Finalizer methods. OptionsFn is used to return +// controller.ControllerOptions to be used by the internal reconciler. +func NewImpl(ctx context.Context, r Interface, optionsFns ...controller.OptionsFn) *controller.Impl { + logger := logging.FromContext(ctx) + + // Check the options function input. It should be 0 or 1. + if len(optionsFns) > 1 { + logger.Fatal("Up to one options function is supported, found: ", len(optionsFns)) + } + + taskrunInformer := taskrun.Get(ctx) + + lister := taskrunInformer.Lister() + + var promoteFilterFunc func(obj interface{}) bool + var promoteFunc = func(bkt reconciler.Bucket) {} + + rec := &reconcilerImpl{ + LeaderAwareFuncs: reconciler.LeaderAwareFuncs{ + PromoteFunc: func(bkt reconciler.Bucket, enq func(reconciler.Bucket, types.NamespacedName)) error { + + // Signal promotion event + promoteFunc(bkt) + + all, err := lister.List(labels.Everything()) + if err != nil { + return err + } + for _, elt := range all { + if promoteFilterFunc != nil { + if ok := promoteFilterFunc(elt); !ok { + continue + } + } + enq(bkt, types.NamespacedName{ + Namespace: elt.GetNamespace(), + Name: elt.GetName(), + }) + } + return nil + }, + }, + Client: client.Get(ctx), + Lister: lister, + reconciler: r, + finalizerName: defaultFinalizerName, + } + + ctrType := reflect.TypeOf(r).Elem() + ctrTypeName := fmt.Sprintf("%s.%s", ctrType.PkgPath(), ctrType.Name()) + ctrTypeName = strings.ReplaceAll(ctrTypeName, "/", ".") + + logger = logger.With( + zap.String(logkey.ControllerType, ctrTypeName), + zap.String(logkey.Kind, "tekton.dev.TaskRun"), + ) + + impl := controller.NewContext(ctx, rec, controller.ControllerOptions{WorkQueueName: ctrTypeName, Logger: logger}) + agentName := defaultControllerAgentName + + // Pass impl to the options. Save any optional results. + for _, fn := range optionsFns { + opts := fn(impl) + if opts.ConfigStore != nil { + rec.configStore = opts.ConfigStore + } + if opts.FinalizerName != "" { + rec.finalizerName = opts.FinalizerName + } + if opts.AgentName != "" { + agentName = opts.AgentName + } + if opts.SkipStatusUpdates { + rec.skipStatusUpdates = true + } + if opts.DemoteFunc != nil { + rec.DemoteFunc = opts.DemoteFunc + } + if opts.PromoteFilterFunc != nil { + promoteFilterFunc = opts.PromoteFilterFunc + } + if opts.PromoteFunc != nil { + promoteFunc = opts.PromoteFunc + } + } + + rec.Recorder = createRecorder(ctx, agentName) + + return impl +} + +func createRecorder(ctx context.Context, agentName string) record.EventRecorder { + logger := logging.FromContext(ctx) + + recorder := controller.GetEventRecorder(ctx) + if recorder == nil { + // Create event broadcaster + logger.Debug("Creating event broadcaster") + eventBroadcaster := record.NewBroadcaster() + watches := []watch.Interface{ + eventBroadcaster.StartLogging(logger.Named("event-broadcaster").Infof), + eventBroadcaster.StartRecordingToSink( + &v1.EventSinkImpl{Interface: kubeclient.Get(ctx).CoreV1().Events("")}), + } + recorder = eventBroadcaster.NewRecorder(scheme.Scheme, corev1.EventSource{Component: agentName}) + go func() { + <-ctx.Done() + for _, w := range watches { + w.Stop() + } + }() + } + + return recorder +} + +func init() { + versionedscheme.AddToScheme(scheme.Scheme) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go new file mode 100644 index 0000000000..30a208a556 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/reconciler.go @@ -0,0 +1,432 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package taskrun + +import ( + context "context" + json "encoding/json" + fmt "fmt" + + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" + pipelinev1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" + zap "go.uber.org/zap" + "go.uber.org/zap/zapcore" + corev1 "k8s.io/api/core/v1" + equality "k8s.io/apimachinery/pkg/api/equality" + errors "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + types "k8s.io/apimachinery/pkg/types" + sets "k8s.io/apimachinery/pkg/util/sets" + record "k8s.io/client-go/tools/record" + controller "knative.dev/pkg/controller" + kmp "knative.dev/pkg/kmp" + logging "knative.dev/pkg/logging" + reconciler "knative.dev/pkg/reconciler" +) + +// Interface defines the strongly typed interfaces to be implemented by a +// controller reconciling v1.TaskRun. +type Interface interface { + // ReconcileKind implements custom logic to reconcile v1.TaskRun. Any changes + // to the objects .Status or .Finalizers will be propagated to the stored + // object. It is recommended that implementors do not call any update calls + // for the Kind inside of ReconcileKind, it is the responsibility of the calling + // controller to propagate those properties. The resource passed to ReconcileKind + // will always have an empty deletion timestamp. + ReconcileKind(ctx context.Context, o *v1.TaskRun) reconciler.Event +} + +// Finalizer defines the strongly typed interfaces to be implemented by a +// controller finalizing v1.TaskRun. +type Finalizer interface { + // FinalizeKind implements custom logic to finalize v1.TaskRun. Any changes + // to the objects .Status or .Finalizers will be ignored. Returning a nil or + // Normal type reconciler.Event will allow the finalizer to be deleted on + // the resource. The resource passed to FinalizeKind will always have a set + // deletion timestamp. + FinalizeKind(ctx context.Context, o *v1.TaskRun) reconciler.Event +} + +// ReadOnlyInterface defines the strongly typed interfaces to be implemented by a +// controller reconciling v1.TaskRun if they want to process resources for which +// they are not the leader. +type ReadOnlyInterface interface { + // ObserveKind implements logic to observe v1.TaskRun. + // This method should not write to the API. + ObserveKind(ctx context.Context, o *v1.TaskRun) reconciler.Event +} + +type doReconcile func(ctx context.Context, o *v1.TaskRun) reconciler.Event + +// reconcilerImpl implements controller.Reconciler for v1.TaskRun resources. +type reconcilerImpl struct { + // LeaderAwareFuncs is inlined to help us implement reconciler.LeaderAware. + reconciler.LeaderAwareFuncs + + // Client is used to write back status updates. + Client versioned.Interface + + // Listers index properties about resources. + Lister pipelinev1.TaskRunLister + + // Recorder is an event recorder for recording Event resources to the + // Kubernetes API. + Recorder record.EventRecorder + + // configStore allows for decorating a context with config maps. + // +optional + configStore reconciler.ConfigStore + + // reconciler is the implementation of the business logic of the resource. + reconciler Interface + + // finalizerName is the name of the finalizer to reconcile. + finalizerName string + + // skipStatusUpdates configures whether or not this reconciler automatically updates + // the status of the reconciled resource. + skipStatusUpdates bool +} + +// Check that our Reconciler implements controller.Reconciler. +var _ controller.Reconciler = (*reconcilerImpl)(nil) + +// Check that our generated Reconciler is always LeaderAware. +var _ reconciler.LeaderAware = (*reconcilerImpl)(nil) + +func NewReconciler(ctx context.Context, logger *zap.SugaredLogger, client versioned.Interface, lister pipelinev1.TaskRunLister, recorder record.EventRecorder, r Interface, options ...controller.Options) controller.Reconciler { + // Check the options function input. It should be 0 or 1. + if len(options) > 1 { + logger.Fatal("Up to one options struct is supported, found: ", len(options)) + } + + // Fail fast when users inadvertently implement the other LeaderAware interface. + // For the typed reconcilers, Promote shouldn't take any arguments. + if _, ok := r.(reconciler.LeaderAware); ok { + logger.Fatalf("%T implements the incorrect LeaderAware interface. Promote() should not take an argument as genreconciler handles the enqueuing automatically.", r) + } + + rec := &reconcilerImpl{ + LeaderAwareFuncs: reconciler.LeaderAwareFuncs{ + PromoteFunc: func(bkt reconciler.Bucket, enq func(reconciler.Bucket, types.NamespacedName)) error { + all, err := lister.List(labels.Everything()) + if err != nil { + return err + } + for _, elt := range all { + // TODO: Consider letting users specify a filter in options. + enq(bkt, types.NamespacedName{ + Namespace: elt.GetNamespace(), + Name: elt.GetName(), + }) + } + return nil + }, + }, + Client: client, + Lister: lister, + Recorder: recorder, + reconciler: r, + finalizerName: defaultFinalizerName, + } + + for _, opts := range options { + if opts.ConfigStore != nil { + rec.configStore = opts.ConfigStore + } + if opts.FinalizerName != "" { + rec.finalizerName = opts.FinalizerName + } + if opts.SkipStatusUpdates { + rec.skipStatusUpdates = true + } + if opts.DemoteFunc != nil { + rec.DemoteFunc = opts.DemoteFunc + } + } + + return rec +} + +// Reconcile implements controller.Reconciler +func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { + logger := logging.FromContext(ctx) + + // Initialize the reconciler state. This will convert the namespace/name + // string into a distinct namespace and name, determine if this instance of + // the reconciler is the leader, and any additional interfaces implemented + // by the reconciler. Returns an error is the resource key is invalid. + s, err := newState(key, r) + if err != nil { + logger.Error("Invalid resource key: ", key) + return nil + } + + // If we are not the leader, and we don't implement either ReadOnly + // observer interfaces, then take a fast-path out. + if s.isNotLeaderNorObserver() { + return controller.NewSkipKey(key) + } + + // If configStore is set, attach the frozen configuration to the context. + if r.configStore != nil { + ctx = r.configStore.ToContext(ctx) + } + + // Add the recorder to context. + ctx = controller.WithEventRecorder(ctx, r.Recorder) + + // Get the resource with this namespace/name. + + getter := r.Lister.TaskRuns(s.namespace) + + original, err := getter.Get(s.name) + + if errors.IsNotFound(err) { + // The resource may no longer exist, in which case we stop processing and call + // the ObserveDeletion handler if appropriate. + logger.Debugf("Resource %q no longer exists", key) + if del, ok := r.reconciler.(reconciler.OnDeletionInterface); ok { + return del.ObserveDeletion(ctx, types.NamespacedName{ + Namespace: s.namespace, + Name: s.name, + }) + } + return nil + } else if err != nil { + return err + } + + // Don't modify the informers copy. + resource := original.DeepCopy() + + var reconcileEvent reconciler.Event + + name, do := s.reconcileMethodFor(resource) + // Append the target method to the logger. + logger = logger.With(zap.String("targetMethod", name)) + switch name { + case reconciler.DoReconcileKind: + // Set and update the finalizer on resource if r.reconciler + // implements Finalizer. + if resource, err = r.setFinalizerIfFinalizer(ctx, resource); err != nil { + return fmt.Errorf("failed to set finalizers: %w", err) + } + + // Reconcile this copy of the resource and then write back any status + // updates regardless of whether the reconciliation errored out. + reconcileEvent = do(ctx, resource) + + case reconciler.DoFinalizeKind: + // For finalizing reconcilers, if this resource being marked for deletion + // and reconciled cleanly (nil or normal event), remove the finalizer. + reconcileEvent = do(ctx, resource) + + if resource, err = r.clearFinalizer(ctx, resource, reconcileEvent); err != nil { + return fmt.Errorf("failed to clear finalizers: %w", err) + } + + case reconciler.DoObserveKind: + // Observe any changes to this resource, since we are not the leader. + reconcileEvent = do(ctx, resource) + + } + + // Synchronize the status. + switch { + case r.skipStatusUpdates: + // This reconciler implementation is configured to skip resource updates. + // This may mean this reconciler does not observe spec, but reconciles external changes. + case equality.Semantic.DeepEqual(original.Status, resource.Status): + // If we didn't change anything then don't call updateStatus. + // This is important because the copy we loaded from the injectionInformer's + // cache may be stale and we don't want to overwrite a prior update + // to status with this stale state. + case !s.isLeader: + // High-availability reconcilers may have many replicas watching the resource, but only + // the elected leader is expected to write modifications. + logger.Warn("Saw status changes when we aren't the leader!") + default: + if err = r.updateStatus(ctx, logger, original, resource); err != nil { + logger.Warnw("Failed to update resource status", zap.Error(err)) + r.Recorder.Eventf(resource, corev1.EventTypeWarning, "UpdateFailed", + "Failed to update status for %q: %v", resource.Name, err) + return err + } + } + + // Report the reconciler event, if any. + if reconcileEvent != nil { + var event *reconciler.ReconcilerEvent + if reconciler.EventAs(reconcileEvent, &event) { + logger.Infow("Returned an event", zap.Any("event", reconcileEvent)) + r.Recorder.Event(resource, event.EventType, event.Reason, event.Error()) + + // the event was wrapped inside an error, consider the reconciliation as failed + if _, isEvent := reconcileEvent.(*reconciler.ReconcilerEvent); !isEvent { + return reconcileEvent + } + return nil + } + + if controller.IsSkipKey(reconcileEvent) { + // This is a wrapped error, don't emit an event. + } else if ok, _ := controller.IsRequeueKey(reconcileEvent); ok { + // This is a wrapped error, don't emit an event. + } else { + logger.Errorw("Returned an error", zap.Error(reconcileEvent)) + r.Recorder.Event(resource, corev1.EventTypeWarning, "InternalError", reconcileEvent.Error()) + } + return reconcileEvent + } + + return nil +} + +func (r *reconcilerImpl) updateStatus(ctx context.Context, logger *zap.SugaredLogger, existing *v1.TaskRun, desired *v1.TaskRun) error { + existing = existing.DeepCopy() + return reconciler.RetryUpdateConflicts(func(attempts int) (err error) { + // The first iteration tries to use the injectionInformer's state, subsequent attempts fetch the latest state via API. + if attempts > 0 { + + getter := r.Client.TektonV1().TaskRuns(desired.Namespace) + + existing, err = getter.Get(ctx, desired.Name, metav1.GetOptions{}) + if err != nil { + return err + } + } + + // If there's nothing to update, just return. + if equality.Semantic.DeepEqual(existing.Status, desired.Status) { + return nil + } + + if logger.Desugar().Core().Enabled(zapcore.DebugLevel) { + if diff, err := kmp.SafeDiff(existing.Status, desired.Status); err == nil && diff != "" { + logger.Debug("Updating status with: ", diff) + } + } + + existing.Status = desired.Status + + updater := r.Client.TektonV1().TaskRuns(existing.Namespace) + + _, err = updater.UpdateStatus(ctx, existing, metav1.UpdateOptions{}) + return err + }) +} + +// updateFinalizersFiltered will update the Finalizers of the resource. +// TODO: this method could be generic and sync all finalizers. For now it only +// updates defaultFinalizerName or its override. +func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource *v1.TaskRun, desiredFinalizers sets.String) (*v1.TaskRun, error) { + // Don't modify the informers copy. + existing := resource.DeepCopy() + + var finalizers []string + + // If there's nothing to update, just return. + existingFinalizers := sets.NewString(existing.Finalizers...) + + if desiredFinalizers.Has(r.finalizerName) { + if existingFinalizers.Has(r.finalizerName) { + // Nothing to do. + return resource, nil + } + // Add the finalizer. + finalizers = append(existing.Finalizers, r.finalizerName) + } else { + if !existingFinalizers.Has(r.finalizerName) { + // Nothing to do. + return resource, nil + } + // Remove the finalizer. + existingFinalizers.Delete(r.finalizerName) + finalizers = existingFinalizers.List() + } + + mergePatch := map[string]interface{}{ + "metadata": map[string]interface{}{ + "finalizers": finalizers, + "resourceVersion": existing.ResourceVersion, + }, + } + + patch, err := json.Marshal(mergePatch) + if err != nil { + return resource, err + } + + patcher := r.Client.TektonV1().TaskRuns(resource.Namespace) + + resourceName := resource.Name + updated, err := patcher.Patch(ctx, resourceName, types.MergePatchType, patch, metav1.PatchOptions{}) + if err != nil { + r.Recorder.Eventf(existing, corev1.EventTypeWarning, "FinalizerUpdateFailed", + "Failed to update finalizers for %q: %v", resourceName, err) + } else { + r.Recorder.Eventf(updated, corev1.EventTypeNormal, "FinalizerUpdate", + "Updated %q finalizers", resource.GetName()) + } + return updated, err +} + +func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource *v1.TaskRun) (*v1.TaskRun, error) { + if _, ok := r.reconciler.(Finalizer); !ok { + return resource, nil + } + + finalizers := sets.NewString(resource.Finalizers...) + + // If this resource is not being deleted, mark the finalizer. + if resource.GetDeletionTimestamp().IsZero() { + finalizers.Insert(r.finalizerName) + } + + // Synchronize the finalizers filtered by r.finalizerName. + return r.updateFinalizersFiltered(ctx, resource, finalizers) +} + +func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1.TaskRun, reconcileEvent reconciler.Event) (*v1.TaskRun, error) { + if _, ok := r.reconciler.(Finalizer); !ok { + return resource, nil + } + if resource.GetDeletionTimestamp().IsZero() { + return resource, nil + } + + finalizers := sets.NewString(resource.Finalizers...) + + if reconcileEvent != nil { + var event *reconciler.ReconcilerEvent + if reconciler.EventAs(reconcileEvent, &event) { + if event.EventType == corev1.EventTypeNormal { + finalizers.Delete(r.finalizerName) + } + } + } else { + finalizers.Delete(r.finalizerName) + } + + // Synchronize the finalizers filtered by r.finalizerName. + return r.updateFinalizersFiltered(ctx, resource, finalizers) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go new file mode 100644 index 0000000000..b989b339ba --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun/state.go @@ -0,0 +1,97 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package taskrun + +import ( + fmt "fmt" + + v1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" + types "k8s.io/apimachinery/pkg/types" + cache "k8s.io/client-go/tools/cache" + reconciler "knative.dev/pkg/reconciler" +) + +// state is used to track the state of a reconciler in a single run. +type state struct { + // key is the original reconciliation key from the queue. + key string + // namespace is the namespace split from the reconciliation key. + namespace string + // name is the name split from the reconciliation key. + name string + // reconciler is the reconciler. + reconciler Interface + // roi is the read only interface cast of the reconciler. + roi ReadOnlyInterface + // isROI (Read Only Interface) the reconciler only observes reconciliation. + isROI bool + // isLeader the instance of the reconciler is the elected leader. + isLeader bool +} + +func newState(key string, r *reconcilerImpl) (*state, error) { + // Convert the namespace/name string into a distinct namespace and name. + namespace, name, err := cache.SplitMetaNamespaceKey(key) + if err != nil { + return nil, fmt.Errorf("invalid resource key: %s", key) + } + + roi, isROI := r.reconciler.(ReadOnlyInterface) + + isLeader := r.IsLeaderFor(types.NamespacedName{ + Namespace: namespace, + Name: name, + }) + + return &state{ + key: key, + namespace: namespace, + name: name, + reconciler: r.reconciler, + roi: roi, + isROI: isROI, + isLeader: isLeader, + }, nil +} + +// isNotLeaderNorObserver checks to see if this reconciler with the current +// state is enabled to do any work or not. +// isNotLeaderNorObserver returns true when there is no work possible for the +// reconciler. +func (s *state) isNotLeaderNorObserver() bool { + if !s.isLeader && !s.isROI { + // If we are not the leader, and we don't implement the ReadOnly + // interface, then take a fast-path out. + return true + } + return false +} + +func (s *state) reconcileMethodFor(o *v1.TaskRun) (string, doReconcile) { + if o.GetDeletionTimestamp().IsZero() { + if s.isLeader { + return reconciler.DoReconcileKind, s.reconciler.ReconcileKind + } else if s.isROI { + return reconciler.DoObserveKind, s.roi.ObserveKind + } + } else if fin, ok := s.reconciler.(Finalizer); s.isLeader && ok { + return reconciler.DoFinalizeKind, fin.FinalizeKind + } + return "unknown", nil +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 1201f24e57..496dfe430b 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1899,9 +1899,9 @@ github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/clu github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/customrun github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/customrun/fake github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun -github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/pipelinerun/fake github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun -github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/taskrun/fake +github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/pipelinerun +github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1/taskrun github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/pipelinerun github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/taskrun github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1