From de41e5c59289a7a35070af68e95087c62c872eb1 Mon Sep 17 00:00:00 2001 From: Tommy Li Date: Fri, 22 Dec 2023 09:45:04 -0800 Subject: [PATCH] chore(cleanup): Remove tekton catalog in v2 branch to prepare it for upstream merge (#1438) * remove tekton catalog in v2 to prepare it for upstream merge * remove tekton catalog ci --- .github/workflows/pipelineloop.yml | 30 - tekton-catalog/cache/LICENSE | 201 -- tekton-catalog/cache/README.md | 41 - tekton-catalog/cache/go.mod | 9 - .../cache/pkg/db/db_conn_manager.go | 62 - tekton-catalog/cache/pkg/db/mysql.go | 98 - tekton-catalog/cache/pkg/db/sqlite.go | 32 - tekton-catalog/cache/pkg/model/task_cache.go | 26 - tekton-catalog/cache/pkg/task_cache_store.go | 85 - .../cache/pkg/task_cache_store_test.go | 162 -- .../hack/boilerplate/add-boilerplate.sh | 38 - .../hack/boilerplate/boilerplate.go.txt | 13 - .../exit-handler/hack/update-codegen.sh | 61 - tekton-catalog/objectstore/LICENSE | 201 -- tekton-catalog/objectstore/README.md | 127 - tekton-catalog/objectstore/go.mod | 5 - .../pkg/writer/objectstore_writer.go | 108 - .../pkg/writer/objectstorelogger.go | 132 - tekton-catalog/pipeline-loops/.gitignore | 2 - tekton-catalog/pipeline-loops/Dockerfile | 11 - tekton-catalog/pipeline-loops/LICENSE | 201 -- tekton-catalog/pipeline-loops/Makefile | 69 - tekton-catalog/pipeline-loops/README.md | 181 -- tekton-catalog/pipeline-loops/cmd/cli/main.go | 286 -- .../pipeline-loops/cmd/controller/main.go | 43 - .../pipeline-loops/cmd/webhook/main.go | 127 - .../config/200-serviceaccount.yaml | 32 - .../config/201-clusterrole.yaml | 109 - .../pipeline-loops/config/201-role.yaml | 68 - .../config/201-rolebinding.yaml | 49 - .../config/202-clusterrolebinding.yaml | 102 - .../config/203-object-store-config.yaml | 30 - .../config/204-cache-config.yaml | 33 - .../config/300-pipelineloop.yaml | 51 - .../pipeline-loops/config/301-breaktask.yaml | 51 - .../pipeline-loops/config/500-controller.yaml | 80 - .../config/500-webhook-configuration.yaml | 68 - .../pipeline-loops/config/500-webhook.yaml | 122 - .../examples/break-task-example.yaml | 98 - .../examples/loop-example-basic-retry.yaml | 72 - .../examples/loop-example-basic.yaml | 71 - .../examples/loop-example-basic_taskspec.yaml | 61 - .../examples/loop-example-numeric-param.yaml | 79 - .../loop-example-with-parallelism.yaml | 72 - .../examples/loop-example-workspaces.yaml | 132 - ...ipelinespec-with-nested-loop-embedded.yaml | 96 - .../pipelinespec-with-nested-loop.yaml | 110 - .../pipelinespec-with-run-arrary-value.yaml | 47 - .../pipelinespec-with-run-condition.yaml | 74 - .../pipelinespec-with-run-dict-value.yaml | 48 - ...pipelinespec-with-run-iterate-numeric.yaml | 45 - .../pipelinespec-with-run-string-value.yaml | 41 - .../examples/simplepipelineloop.yaml | 82 - .../examples/simplepipelineloop_taskspec.yaml | 58 - tekton-catalog/pipeline-loops/go.mod | 28 - .../hack/boilerplate/boilerplate.go.txt | 15 - tekton-catalog/pipeline-loops/hack/tools.go | 7 - .../pipeline-loops/hack/update-codegen.sh | 47 - .../pipeline-loops/hack/update-deps.sh | 60 - .../pkg/apis/pipelineloop/controller.go | 23 - .../pkg/apis/pipelineloop/register.go | 22 - .../pkg/apis/pipelineloop/v1alpha1/doc.go | 22 - .../v1alpha1/pipelineloop_defaults.go | 51 - .../v1alpha1/pipelineloop_types.go | 167 -- .../v1alpha1/pipelineloop_validation.go | 72 - .../v1alpha1/pipelineloop_validation_test.go | 210 -- .../apis/pipelineloop/v1alpha1/register.go | 54 - .../v1alpha1/zz_generated.deepcopy.go | 176 -- .../client/clientset/versioned/clientset.go | 97 - .../pkg/client/clientset/versioned/doc.go | 20 - .../versioned/fake/clientset_generated.go | 82 - .../client/clientset/versioned/fake/doc.go | 20 - .../clientset/versioned/fake/register.go | 56 - .../client/clientset/versioned/scheme/doc.go | 20 - .../clientset/versioned/scheme/register.go | 56 - .../typed/pipelineloop/v1alpha1/doc.go | 20 - .../typed/pipelineloop/v1alpha1/fake/doc.go | 20 - .../v1alpha1/fake/fake_pipelineloop.go | 130 - .../v1alpha1/fake/fake_pipelineloop_client.go | 40 - .../v1alpha1/generated_expansion.go | 21 - .../pipelineloop/v1alpha1/pipelineloop.go | 178 -- .../v1alpha1/pipelineloop_client.go | 89 - .../informers/externalversions/factory.go | 180 -- .../informers/externalversions/generic.go | 62 - .../internalinterfaces/factory_interfaces.go | 40 - .../pipelineloop/interface.go | 46 - .../pipelineloop/v1alpha1/interface.go | 45 - .../pipelineloop/v1alpha1/pipelineloop.go | 90 - .../pkg/client/injection/client/client.go | 49 - .../pkg/client/injection/client/fake/fake.go | 54 - .../injection/informers/factory/factory.go | 56 - .../injection/informers/factory/fake/fake.go | 45 - .../v1alpha1/pipelineloop/fake/fake.go | 40 - .../v1alpha1/pipelineloop/pipelineloop.go | 52 - .../v1alpha1/expansion_generated.go | 27 - .../pipelineloop/v1alpha1/pipelineloop.go | 94 - .../reconciler/pipelinelooprun/controller.go | 230 -- .../pipelineloop_range_test.go | 363 --- .../pipelinelooprun/pipelinelooprun.go | 1248 --------- .../pipelinelooprun/pipelinelooprun_test.go | 2360 ----------------- .../pipeline-loops/test/controller.go | 276 -- 101 files changed, 11492 deletions(-) delete mode 100644 .github/workflows/pipelineloop.yml delete mode 100644 tekton-catalog/cache/LICENSE delete mode 100644 tekton-catalog/cache/README.md delete mode 100644 tekton-catalog/cache/go.mod delete mode 100644 tekton-catalog/cache/pkg/db/db_conn_manager.go delete mode 100644 tekton-catalog/cache/pkg/db/mysql.go delete mode 100644 tekton-catalog/cache/pkg/db/sqlite.go delete mode 100644 tekton-catalog/cache/pkg/model/task_cache.go delete mode 100644 tekton-catalog/cache/pkg/task_cache_store.go delete mode 100644 tekton-catalog/cache/pkg/task_cache_store_test.go delete mode 100755 tekton-catalog/exit-handler/hack/boilerplate/add-boilerplate.sh delete mode 100644 tekton-catalog/exit-handler/hack/boilerplate/boilerplate.go.txt delete mode 100755 tekton-catalog/exit-handler/hack/update-codegen.sh delete mode 100644 tekton-catalog/objectstore/LICENSE delete mode 100644 tekton-catalog/objectstore/README.md delete mode 100644 tekton-catalog/objectstore/go.mod delete mode 100644 tekton-catalog/objectstore/pkg/writer/objectstore_writer.go delete mode 100644 tekton-catalog/objectstore/pkg/writer/objectstorelogger.go delete mode 100644 tekton-catalog/pipeline-loops/.gitignore delete mode 100644 tekton-catalog/pipeline-loops/Dockerfile delete mode 100644 tekton-catalog/pipeline-loops/LICENSE delete mode 100644 tekton-catalog/pipeline-loops/Makefile delete mode 100644 tekton-catalog/pipeline-loops/README.md delete mode 100644 tekton-catalog/pipeline-loops/cmd/cli/main.go delete mode 100644 tekton-catalog/pipeline-loops/cmd/controller/main.go delete mode 100644 tekton-catalog/pipeline-loops/cmd/webhook/main.go delete mode 100644 tekton-catalog/pipeline-loops/config/200-serviceaccount.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/201-clusterrole.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/201-role.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/201-rolebinding.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/202-clusterrolebinding.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/203-object-store-config.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/204-cache-config.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/300-pipelineloop.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/301-breaktask.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/500-controller.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/500-webhook-configuration.yaml delete mode 100644 tekton-catalog/pipeline-loops/config/500-webhook.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/break-task-example.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/loop-example-basic-retry.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/loop-example-basic.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/loop-example-basic_taskspec.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/loop-example-numeric-param.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/loop-example-with-parallelism.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/loop-example-workspaces.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop-embedded.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-arrary-value.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-condition.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-dict-value.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-iterate-numeric.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-string-value.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/simplepipelineloop.yaml delete mode 100644 tekton-catalog/pipeline-loops/examples/simplepipelineloop_taskspec.yaml delete mode 100644 tekton-catalog/pipeline-loops/go.mod delete mode 100644 tekton-catalog/pipeline-loops/hack/boilerplate/boilerplate.go.txt delete mode 100644 tekton-catalog/pipeline-loops/hack/tools.go delete mode 100755 tekton-catalog/pipeline-loops/hack/update-codegen.sh delete mode 100755 tekton-catalog/pipeline-loops/hack/update-deps.sh delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/controller.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/register.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/doc.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_defaults.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_types.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation_test.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/register.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/zz_generated.deepcopy.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/clientset.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/doc.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/clientset_generated.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/doc.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/register.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/doc.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/register.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/doc.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/doc.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop_client.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/generated_expansion.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop_client.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/factory.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/generic.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces/factory_interfaces.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/interface.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/interface.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/pipelineloop.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/injection/client/client.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/injection/client/fake/fake.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/factory.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/fake/fake.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/fake/fake.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/pipelineloop.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/expansion_generated.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/pipelineloop.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/controller.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelineloop_range_test.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun.go delete mode 100644 tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun_test.go delete mode 100644 tekton-catalog/pipeline-loops/test/controller.go diff --git a/.github/workflows/pipelineloop.yml b/.github/workflows/pipelineloop.yml deleted file mode 100644 index ec7e232d52..0000000000 --- a/.github/workflows/pipelineloop.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: KFP Tekton pipelineloop unit tests - -on: - push: - branches: [v2-integration] - - # Run tests for any PRs. - pull_request: - paths: - - 'tekton-catalog/pipeline-loops/go.mod' - - 'tekton-catalog/pipeline-loops/cmd/**' - - 'tekton-catalog/pipeline-loops/pkg/**' - - 'tekton-catalog/pipeline-loops/test/**' - -env: - GITHUB_ACTION: "true" - SETUPTOOLS_USE_DISTUTILS: "stdlib" - -jobs: - run-pipelineloop-unittests: - runs-on: ubuntu-latest - steps: - - name: Install Go - uses: actions/setup-go@v2 - with: - go-version: 1.20.x - - name: Checkout code - uses: actions/checkout@v2 - - name: "run go pipelineLoop unit tests" - run: cd tekton-catalog/pipeline-loops && make test-all diff --git a/tekton-catalog/cache/LICENSE b/tekton-catalog/cache/LICENSE deleted file mode 100644 index 261eeb9e9f..0000000000 --- a/tekton-catalog/cache/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/tekton-catalog/cache/README.md b/tekton-catalog/cache/README.md deleted file mode 100644 index fbcfe3d3d4..0000000000 --- a/tekton-catalog/cache/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Cache: Reuse the results from previous execution for custom tasks. - -### How To - -1. Setup. -```go - -import ( - "fmt" - "time" - - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/db" - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/model" -) - -taskCacheStore := TaskCacheStore{Params: db.ConnectionParams{DbDriver: "sqlite3", DbName: "example.db"}} - err := taskCacheStore.Connect() - // Currently, mysql and sqlite3 are supported driver. -``` - -2. Store an entry to cache. -```go - taskCache := &model.TaskCache{ - TaskHashKey: cacheKey, - TaskOutput: cacheOutput, - } - taskCacheStore.Put(taskCache) -``` - -3. Fetch an entry from cache. -```go - cacheResult, err := taskCacheStore.Get(taskCache.TaskHashKey) - if err != nil { - fmt.Printf("%v", err) - } - -``` -4. Prune entries older than a day using: -```go - taskCacheStore.PruneOlderThan(time.Now().Add(-24*time.Hour)) -``` \ No newline at end of file diff --git a/tekton-catalog/cache/go.mod b/tekton-catalog/cache/go.mod deleted file mode 100644 index 303134d826..0000000000 --- a/tekton-catalog/cache/go.mod +++ /dev/null @@ -1,9 +0,0 @@ -module github.com/kubeflow/kfp-tekton/tekton-catalog/cache - -go 1.13 - -require ( - gorm.io/driver/mysql v1.4.3 - gorm.io/driver/sqlite v1.4.2 - gorm.io/gorm v1.24.0 -) diff --git a/tekton-catalog/cache/pkg/db/db_conn_manager.go b/tekton-catalog/cache/pkg/db/db_conn_manager.go deleted file mode 100644 index 494758f02b..0000000000 --- a/tekton-catalog/cache/pkg/db/db_conn_manager.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package db - -import ( - "fmt" - "time" - - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/model" - "gorm.io/gorm" -) - -type ConnectionParams struct { - DbDriver string - DbHost string - DbPort string - DbName string - DbUser string - DbPwd string - DbGroupConcatMaxLen string - DbExtraParams string - Timeout time.Duration -} - -func InitDBClient(params ConnectionParams, initConnectionTimeout time.Duration) (*gorm.DB, error) { - driverName := params.DbDriver - var db *gorm.DB - var err error - - switch driverName { - case "mysql": - db, err = initMysql(params) - case "sqlite": - db, err = initSqlite(params.DbName) - default: - return nil, fmt.Errorf("driver %s is not supported", driverName) - } - - // db is safe for concurrent use by multiple goroutines - // and maintains its own pool of idle connections. - if err != nil { - return nil, err - } - // Create table - response := db.AutoMigrate(&model.TaskCache{}) - if response != nil { - return nil, fmt.Errorf("failed to initialize the databases: Error: %v", response) - } - return db, nil -} diff --git a/tekton-catalog/cache/pkg/db/mysql.go b/tekton-catalog/cache/pkg/db/mysql.go deleted file mode 100644 index 567fe277b6..0000000000 --- a/tekton-catalog/cache/pkg/db/mysql.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2022 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package db - -import ( - "encoding/json" - "fmt" - "time" - - "gorm.io/driver/mysql" - "gorm.io/gorm" -) - -const ( - mysqlDBDriverDefault = "mysql" - mysqlDBHostDefault = "mysql.kubeflow.svc.cluster.local" - mysqlDBPortDefault = "3306" - mysqlDBGroupConcatMaxLenDefault = "4194304" - DefaultConnectionTimeout = time.Minute * 6 -) - -func setDefault(field *string, defaultVal string) { - if *field == "" { - *field = defaultVal - } -} - -func (params *ConnectionParams) LoadMySQLDefaults() { - setDefault(¶ms.DbDriver, mysqlDBDriverDefault) - setDefault(¶ms.DbHost, mysqlDBHostDefault) - setDefault(¶ms.DbPort, mysqlDBPortDefault) - setDefault(¶ms.DbName, "cachedb") - setDefault(¶ms.DbUser, "root") - setDefault(¶ms.DbPwd, "") - setDefault(¶ms.DbGroupConcatMaxLen, mysqlDBGroupConcatMaxLenDefault) - if params.Timeout == 0 { - params.Timeout = DefaultConnectionTimeout - } -} - -func initMysql(params ConnectionParams) (*gorm.DB, error) { - var mysqlExtraParams = map[string]string{} - data := []byte(params.DbExtraParams) - _ = json.Unmarshal(data, &mysqlExtraParams) - mysqlConfigDSN := CreateMySQLConfigDSN( - params.DbUser, - params.DbPwd, - params.DbHost, - params.DbPort, - params.DbName, - params.DbGroupConcatMaxLen, - mysqlExtraParams, - ) - db, err := gorm.Open(mysql.New(mysql.Config{ - DSN: mysqlConfigDSN, // data source name, refer https://github.com/go-sql-driver/mysql#dsn-data-source-name - DefaultStringSize: 256, // add default size for string fields, by default, will use db type `longtext` for fields without size, not a primary key, no index defined and don't have default values - DontSupportRenameIndex: true, // drop & create index when rename index, rename index not supported before MySQL 5.7, MariaDB - DontSupportRenameColumn: true, // use change when rename column, rename rename not supported before MySQL 8, MariaDB - SkipInitializeWithVersion: false, // smart configure based on used version - }), &gorm.Config{}) - - return db, err -} - -func CreateMySQLConfigDSN(user, password, mysqlServiceHost, mysqlServicePort, dbName, mysqlGroupConcatMaxLen string, - mysqlExtraParams map[string]string) string { - - if mysqlGroupConcatMaxLen == "" { - mysqlGroupConcatMaxLen = "4194304" - } - params := map[string]string{ - "parseTime": "True", - "loc": "Local", - "group_concat_max_len": mysqlGroupConcatMaxLen, - } - - for k, v := range mysqlExtraParams { - params[k] = v - } - dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?charset=utf8mb4", user, password, mysqlServiceHost, mysqlServicePort, dbName) - - for k, v := range params { - dsn = fmt.Sprintf("%s&%s=%s", dsn, k, v) - } - return dsn -} diff --git a/tekton-catalog/cache/pkg/db/sqlite.go b/tekton-catalog/cache/pkg/db/sqlite.go deleted file mode 100644 index 18d2b71723..0000000000 --- a/tekton-catalog/cache/pkg/db/sqlite.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package db - -import ( - "gorm.io/driver/sqlite" - "gorm.io/gorm" -) - -func initSqlite(dbName string) (*gorm.DB, error) { - var db *gorm.DB - var err error - if dbName == "" { - db, err = gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) - } else { - db, err = gorm.Open(sqlite.Open(dbName), &gorm.Config{}) - } - - return db, err -} diff --git a/tekton-catalog/cache/pkg/model/task_cache.go b/tekton-catalog/cache/pkg/model/task_cache.go deleted file mode 100644 index 4e7ff5417c..0000000000 --- a/tekton-catalog/cache/pkg/model/task_cache.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2022 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package model - -import ( - "time" -) - -type TaskCache struct { - ID int64 `gorm:"column:ID; not null; primary_key; AUTO_INCREMENT"` - TaskHashKey string `gorm:"column:TaskHashKey; not null; index:idx_cache_key"` - TaskOutput string `gorm:"column:TaskOutput; type:longtext; not null"` - CreatedAt time.Time `gorm:"column:CreatedAt; autoCreateTime:nano; not null"` -} diff --git a/tekton-catalog/cache/pkg/task_cache_store.go b/tekton-catalog/cache/pkg/task_cache_store.go deleted file mode 100644 index 25fcee430c..0000000000 --- a/tekton-catalog/cache/pkg/task_cache_store.go +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2022 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cache - -import ( - "fmt" - "time" - - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/db" - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/model" - "gorm.io/gorm" -) - -type TaskCacheStore struct { - db *gorm.DB - Disabled bool - Params db.ConnectionParams -} - -func (t *TaskCacheStore) Connect() error { - if t.db != nil || t.Disabled { - return nil - } - var err error - t.db, err = db.InitDBClient(t.Params, t.Params.Timeout) - return err -} - -func (t *TaskCacheStore) Get(taskHashKey string) (*model.TaskCache, error) { - if t.Disabled || t.db == nil { - return nil, nil - } - entry := &model.TaskCache{} - d := t.db.Model(&model.TaskCache{}).Where("TaskHashKey = ?", taskHashKey). - Order("CreatedAt DESC").First(entry) - if d.Error != nil { - return nil, fmt.Errorf("failed to get entry from cache: %q. Error: %v", taskHashKey, d.Error) - } - return entry, nil -} - -func (t *TaskCacheStore) Put(entry *model.TaskCache) error { - if t.Disabled || t.db == nil { - return nil - } - d := t.db.Create(entry) - if d.Error != nil { - return fmt.Errorf("failed to create a new cache entry, %#v, Error: %v", entry, t.db.Error) - } - return nil -} - -func (t *TaskCacheStore) Delete(id string) error { - if t.Disabled || t.db == nil { - return nil - } - d := t.db.Delete(&model.TaskCache{}, "ID = ?", id) - if d.Error != nil { - return d.Error - } - return nil -} - -func (t *TaskCacheStore) PruneOlderThan(timestamp time.Time) error { - if t.Disabled || t.db == nil { - return nil - } - d := t.db.Delete(&model.TaskCache{}, "CreatedAt <= ?", timestamp) - if d.Error != nil { - return d.Error - } - return nil -} diff --git a/tekton-catalog/cache/pkg/task_cache_store_test.go b/tekton-catalog/cache/pkg/task_cache_store_test.go deleted file mode 100644 index fcbdbd92ee..0000000000 --- a/tekton-catalog/cache/pkg/task_cache_store_test.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2022 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package cache - -import ( - "fmt" - "strings" - "testing" - "time" - - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/db" - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/model" -) - -func newTestingCacheStore(disabled bool) (*TaskCacheStore, error) { - t := TaskCacheStore{ - Disabled: disabled, - // Params: db.ConnectionParams{DbDriver: "mysql", DbName: "testdb", - // DbHost: "127.0.0.1", DbPort: "3306", DbPwd: "", DbUser: "root", - // Timeout: 10 * time.Second, - // }, - Params: db.ConnectionParams{DbDriver: "sqlite", DbName: ":memory:"}, - } - err := t.Connect() - return &t, err -} - -func createTaskCache(cacheKey string, cacheOutput string) *model.TaskCache { - return &model.TaskCache{ - TaskHashKey: cacheKey, - TaskOutput: cacheOutput, - } -} - -func TestPut(t *testing.T) { - taskCacheStore, err := newTestingCacheStore(false) - if err != nil { - t.Fatal(err) - } - entry := createTaskCache("x", "y") - err = taskCacheStore.Put(entry) - if err != nil { - t.Fatal(err) - } - -} - -func TestGet(t *testing.T) { - taskCacheStore, err := newTestingCacheStore(false) - if err != nil { - t.Fatal(err) - } - entry := createTaskCache("x", "y") - err = taskCacheStore.Put(entry) - if err != nil { - t.Fatal(err) - } - cacheResult, err := taskCacheStore.Get(entry.TaskHashKey) - if err != nil { - t.Error(err) - } - if cacheResult.TaskHashKey != entry.TaskHashKey { - t.Errorf("Mismatached key. Expected %s Found: %s", entry.TaskHashKey, - cacheResult.TaskHashKey) - } - if cacheResult.TaskOutput != entry.TaskOutput { - t.Errorf("Mismatached output. Expected : %s Found: %s", - entry.TaskOutput, - cacheResult.TaskOutput) - } -} - -// Get should get the latest entry each time. -func TestGetLatest(t *testing.T) { - taskCacheStore, err := newTestingCacheStore(false) - if err != nil { - t.Fatal(err) - } - for i := 1; i < 10; i++ { - entry := createTaskCache("x", fmt.Sprintf("y%d", i)) - err := taskCacheStore.Put(entry) - if err != nil { - t.Fatal(err) - } - cacheResult, err := taskCacheStore.Get(entry.TaskHashKey) - if err != nil { - t.Error(err) - } - if cacheResult.TaskHashKey != entry.TaskHashKey { - t.Errorf("Mismatached key. Expected %s Found: %s", entry.TaskHashKey, - cacheResult.TaskHashKey) - } - if cacheResult.TaskOutput != entry.TaskOutput { - t.Errorf("Mismatached output. Expected : %s Found: %s", - entry.TaskOutput, - cacheResult.TaskOutput) - } - } -} - -func TestDisabledCache(t *testing.T) { - taskCacheStore, err := newTestingCacheStore(true) - if err != nil { - t.Fatal(err) - } - taskCache, err := taskCacheStore.Get("random") - if err != nil { - t.Errorf("a disabled cache returned non nil error: %s", err) - } - if taskCache != nil { - t.Errorf("a disabled cache should return nil") - } -} - -func TestPruneOlderThan(t *testing.T) { - taskCacheStore, err := newTestingCacheStore(false) - if err != nil { - t.Fatal(err) - } - hashKey := "cacheKey" - for i := 1; i < 10000000; i *= 100 { - t1 := &model.TaskCache{ - TaskHashKey: hashKey, - TaskOutput: "cacheOutput", - CreatedAt: time.UnixMicro(int64(i * 100)), - } - err = taskCacheStore.Put(t1) - if err != nil { - t.Fatal(err) - } - } - taskCache, err := taskCacheStore.Get(hashKey) - if err != nil { - t.Error(err) - } - if taskCache == nil { - t.Error("TaskCache should be not nil.") - } - err = taskCacheStore.PruneOlderThan(time.UnixMicro(100000000)) - if err != nil { - t.Fatal(err) - } - _, err = taskCacheStore.Get(hashKey) - if err == nil { - t.Errorf("Expected error to be not nil") - } - if !strings.HasPrefix(err.Error(), "failed to get entry from cache") { - t.Error("Should fail with entry not found in cache.") - } -} diff --git a/tekton-catalog/exit-handler/hack/boilerplate/add-boilerplate.sh b/tekton-catalog/exit-handler/hack/boilerplate/add-boilerplate.sh deleted file mode 100755 index 3c926a2c0b..0000000000 --- a/tekton-catalog/exit-handler/hack/boilerplate/add-boilerplate.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2019 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -USAGE=$(cat <.txt to all . files missing it in a directory. - -Usage: (from repository root) - ./hack/boilerplate/add-boilerplate.sh - -Example: (from repository root) - ./hack/boilerplate/add-boilerplate.sh go cmd -EOF -) - -set -e - -if [[ -z $1 || -z $2 ]]; then - echo "${USAGE}" - exit 1 -fi - -grep -r -L -P "Copyright \d+ The Knative Authors" $2 \ - | grep -P "\.$1\$" \ - | xargs -I {} sh -c \ - "cat hack/boilerplate/boilerplate.$1.txt {} > /tmp/boilerplate && mv /tmp/boilerplate {}" diff --git a/tekton-catalog/exit-handler/hack/boilerplate/boilerplate.go.txt b/tekton-catalog/exit-handler/hack/boilerplate/boilerplate.go.txt deleted file mode 100644 index c31e26a6b1..0000000000 --- a/tekton-catalog/exit-handler/hack/boilerplate/boilerplate.go.txt +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2023 kubeflow.org -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. diff --git a/tekton-catalog/exit-handler/hack/update-codegen.sh b/tekton-catalog/exit-handler/hack/update-codegen.sh deleted file mode 100755 index d253568a67..0000000000 --- a/tekton-catalog/exit-handler/hack/update-codegen.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2019 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# run this on the project root directory - -set -o errexit -set -o nounset -set -o pipefail - -# need these two go pkgs for code-gen: -# - "knative.dev/hack" -# - "knative.dev/pkg/hack" - -CODE_GEN_DIR=code-gen - -go mod vendor -# If we run with -mod=vendor here, then generate-groups.sh looks for vendor files in the wrong place. -export GOFLAGS=-mod= - -echo "=== Update Codegen for exit-handler" - -rm -rf "${CODE_GEN_DIR}" - -# generate the code with: -# --output-base because this script should also be able to run inside the vendor dir of -# k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir -# instead of the $GOPATH directly. For normal projects this can be dropped. -CODEGEN_PKG=vendor/k8s.io/code-generator -bash ${CODEGEN_PKG}/generate-groups.sh "deepcopy,client,informer,lister" \ - github.com/kubeflow/pipelines/backend/src/v2/tekton-exithandler/client \ - github.com/kubeflow/pipelines/backend/src/v2/tekton-exithandler/apis \ - "exithandler:v1alpha1" \ - --go-header-file ./tekton-catalog/exit-handler/hack/boilerplate/boilerplate.go.txt \ - --output-base "${CODE_GEN_DIR}" - -# Knative Injection -KNATIVE_CODEGEN_PKG=vendor/knative.dev/pkg -bash ${KNATIVE_CODEGEN_PKG}/hack/generate-knative.sh "injection" \ - github.com/kubeflow/pipelines/backend/src/v2/tekton-exithandler/client \ - github.com/kubeflow/pipelines/backend/src/v2/tekton-exithandler/apis \ - "exithandler:v1alpha1" \ - --go-header-file ./tekton-catalog/exit-handler/hack/boilerplate/boilerplate.go.txt \ - --output-base "${CODE_GEN_DIR}" - -cp -r "${CODE_GEN_DIR}/github.com/kubeflow/pipelines/backend/src/v2/tekton-exithandler/client" backend/src/v2/tekton-exithandler/ -cp -r "${CODE_GEN_DIR}/github.com/kubeflow/pipelines/backend/src/v2/tekton-exithandler/apis/exithandler/v1alpha1/zz_generated.deepcopy.go" backend/src/v2/tekton-exithandler/apis/exithandler/v1alpha1/ - -rm -rf "${CODE_GEN_DIR}" diff --git a/tekton-catalog/objectstore/LICENSE b/tekton-catalog/objectstore/LICENSE deleted file mode 100644 index 261eeb9e9f..0000000000 --- a/tekton-catalog/objectstore/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/tekton-catalog/objectstore/README.md b/tekton-catalog/objectstore/README.md deleted file mode 100644 index 2286e8a487..0000000000 --- a/tekton-catalog/objectstore/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# objectstore - -## Connect and store objects in object store. - -**Purpose**: use object store for storing the results of custom tasks. - -**For example:** - -1. To setup connection. -```go -import "github.com/kubeflow/kfp-tekton/tekton-catalog/objectstore/pkg/writer" - -func init() { - config := writer.ObjectStoreConfig{ - CreateBucket: true, - DefaultBucketName: "testing-bucket", - AccessKey: "<>", - SecretKey: "<>", - Region: "us-south", - ServiceEndpoint: "https://s3.us-south.cloud-object-storage.appdomain.cloud", - Token: "", - S3ForcePathStyle: false, - } - w = writer.Writer{} - err := w.Load(config) - if err != nil { - fmt.Printf("error while connecting to s3 %#v \n", err) - } else { - fmt.Printf("\nConnected to s3.\n") - } -} -``` - -2. Store results of tasks. - -```go -result := fmt.Sprintf("%s", out.ConvertToType(types.StringType).Value()) - // retrieve Pipeline Run name and task name as follows. - prName := run.ObjectMeta.Labels["tekton.dev/pipelineRun"] - taskName:= run.ObjectMeta.Labels["tekton.dev/pipelineTask"] - err = w.Write(prName, taskName, param.Name, []byte(result)) - if err != nil { - logger.Errorf("error while writing to s3 %#v", err) - } - runResults = append(runResults, v1alpha1.RunResult{ - Name: param.Name, - Value: result, - }) -``` - -__Results are stored as:__ - -`/artifacts/$PIPELINERUN/$PIPELINETASK/.tgz` - -## Setup log to object store for golang. - -Log to cloud object storage for golang implemented as `io.Writer`. - -### Use it as a plugin/extension to [uber-go/zap](https://github.com/uber-go/zap) logger - -Configure logger and add a multi write syncer for `zap` as follows, - -```go -package main - -import ( - "fmt" - "os" - "os/signal" - "syscall" - - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - cl "github.com/kubeflow/kfp-tekton/tekton-catalog/objectstore/pkg/writer" -) - -func initializeLogger() { - - loggerConfig := cl.ObjectStoreConfig{} - objectStoreLogger := cl.Logger{ - MaxSize: 1024 * 100, // After reaching this size the buffer syncs with object store. - } - - // Provide all the configuration. - loggerConfig.Enable = true - loggerConfig.AccessKey = "key" - loggerConfig.SecretKey = "key_secret" - loggerConfig.Region = "us-south" - loggerConfig.ServiceEndpoint = "" - loggerConfig.DefaultBucketName = "" - loggerConfig.CreateBucket = false // If the bucket already exists. - - _ = objectStoreLogger.LoadDefaults(loggerConfig) - // setup a multi sync Writer as follows, - w := zapcore.NewMultiWriteSyncer( - zapcore.AddSync(os.Stdout), - zapcore.AddSync(&objectStoreLogger), - ) - core := zapcore.NewCore( - zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()), - w, - zap.InfoLevel, - ) - logger := zap.New(core) - logger.Info("First log msg with object store logger.") -} - -// If you wish to sync with object store before shutdown. -func shutdownHook() { - // set up SIGHUP to send logs to object store before shutdown. - signal.Ignore(syscall.SIGHUP, syscall.SIGTERM, syscall.SIGINT) - c := make(chan os.Signal, 3) - signal.Notify(c, syscall.SIGTERM) - signal.Notify(c, syscall.SIGINT) - signal.Notify(c, syscall.SIGHUP) - - go func() { - for { - <-c - err := objectStoreLogger.Close() //causes a sync with object store. - fmt.Printf("Synced with object store... %v", err) - os.Exit(0) - } - }() -} - -``` \ No newline at end of file diff --git a/tekton-catalog/objectstore/go.mod b/tekton-catalog/objectstore/go.mod deleted file mode 100644 index 509c5716ac..0000000000 --- a/tekton-catalog/objectstore/go.mod +++ /dev/null @@ -1,5 +0,0 @@ -module github.com/kubeflow/kfp-tekton/tekton-catalog/objectstore - -require github.com/IBM/ibm-cos-sdk-go v1.8.0 - -go 1.13 diff --git a/tekton-catalog/objectstore/pkg/writer/objectstore_writer.go b/tekton-catalog/objectstore/pkg/writer/objectstore_writer.go deleted file mode 100644 index 11348d5bf9..0000000000 --- a/tekton-catalog/objectstore/pkg/writer/objectstore_writer.go +++ /dev/null @@ -1,108 +0,0 @@ -/* -Copyright [2022] [IBM] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package writer - -import ( - "bytes" - "compress/gzip" - "fmt" - "sync" - - "github.com/IBM/ibm-cos-sdk-go/aws" - "github.com/IBM/ibm-cos-sdk-go/aws/credentials" - "github.com/IBM/ibm-cos-sdk-go/aws/session" - "github.com/IBM/ibm-cos-sdk-go/service/s3" -) - -type ObjectStoreConfig struct { - CreateBucket bool - DefaultBucketName string - AccessKey string - SecretKey string - Region string - ServiceEndpoint string - Token string - S3ForcePathStyle bool -} - -type Writer struct { - DefaultBucketName string - client *s3.S3 - mu sync.Mutex -} - -func (w *Writer) Load(o ObjectStoreConfig) error { - cosCredentials := credentials.NewStaticCredentials(o.AccessKey, o.SecretKey, o.Token) - // Create client config - var conf = aws.NewConfig(). - WithRegion(o.Region). - WithEndpoint(o.ServiceEndpoint). - WithCredentials(cosCredentials). - WithS3ForcePathStyle(o.S3ForcePathStyle) - - var sess = session.Must(session.NewSession()) - w.client = s3.New(sess, conf) - input := &s3.CreateBucketInput{ - Bucket: aws.String(o.DefaultBucketName), - } - w.DefaultBucketName = o.DefaultBucketName - if o.CreateBucket { - _, err := w.client.CreateBucket(input) - if err != nil { - return err - } - } - return nil -} - -func (w *Writer) CreateNewBucket(bucketName string) error { - input := &s3.CreateBucketInput{ - Bucket: aws.String(bucketName), - } - _, err := w.client.CreateBucket(input) - return err -} - -func compress(data []byte) ([]byte, error) { - var b bytes.Buffer - gz := gzip.NewWriter(&b) - if _, err := gz.Write(data); err != nil { - return nil, err - } - if err := gz.Close(); err != nil { - return nil, err - } - return b.Bytes(), nil -} - -func (w *Writer) Write(pipelineRunName, pipelineTaskName, resultName string, content []byte) error { - bucketName := fmt.Sprintf("%s/artifacts/%s/%s/", w.DefaultBucketName, pipelineRunName, pipelineTaskName) - err := w.CreateNewBucket(bucketName) - if err != nil { - // TODO check if the failure is due to bucket already exists. - } - compressed, err := compress(content) - key := fmt.Sprintf("%s.tgz", resultName) - input := s3.PutObjectInput{ - Bucket: aws.String(bucketName), - Key: aws.String(key), - Body: bytes.NewReader(compressed), - } - - _, err = w.client.PutObject(&input) - return err -} diff --git a/tekton-catalog/objectstore/pkg/writer/objectstorelogger.go b/tekton-catalog/objectstore/pkg/writer/objectstorelogger.go deleted file mode 100644 index 084d6a8fbe..0000000000 --- a/tekton-catalog/objectstore/pkg/writer/objectstorelogger.go +++ /dev/null @@ -1,132 +0,0 @@ -/* -Copyright [2022] [IBM] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// cos_logger or Cloud object storage based logger, plays well with any logging -// package that can write to an io.Writer, including the standard library's log package. -// or as an extension to uber-go/zap logger. - -package writer - -import ( - "bytes" - "io" - "sync" - "time" - - "github.com/IBM/ibm-cos-sdk-go/aws" - "github.com/IBM/ibm-cos-sdk-go/aws/credentials" - "github.com/IBM/ibm-cos-sdk-go/aws/session" - "github.com/IBM/ibm-cos-sdk-go/service/s3" -) - -type Logger struct { - Enabled bool - buffer *bytes.Buffer - // When buffer reaches the size of MaxSize, it tries to sync with object store. - MaxSize int64 - // Whether to compress before syncing the buffer. - Compress bool - // Current size of the buffer. - size int64 - mu sync.Mutex - Writer *Writer -} - -// ensure we always implement io.WriteCloser -var _ io.WriteCloser = (*Logger)(nil) - -func (l *Logger) Write(p []byte) (n int, err error) { - l.mu.Lock() - defer l.mu.Unlock() - writeLen := int64(len(p)) - if l.size+writeLen >= l.MaxSize { - if err := l.syncBuffer(); err != nil { - return 0, err - } - } - if n, err = l.buffer.Write(p); err != nil { - return n, err - } - l.size = l.size + int64(n) - return n, nil -} - -func (l *Logger) syncBuffer() error { - var err error - err = l.writeToObjectStore(l.Writer.DefaultBucketName, - time.Now().Format(time.RFC3339Nano), l.buffer.Bytes()) - if err != nil { - return err - } - l.buffer.Reset() - l.size = 0 - return nil -} - -func (l *Logger) Close() error { - l.mu.Lock() - defer l.mu.Unlock() - return l.syncBuffer() -} - -func (l *Logger) load(o ObjectStoreConfig) error { - cosCredentials := credentials.NewStaticCredentials(o.AccessKey, o.SecretKey, o.Token) - // Create client config - var conf = aws.NewConfig(). - WithRegion(o.Region). - WithEndpoint(o.ServiceEndpoint). - WithCredentials(cosCredentials). - WithS3ForcePathStyle(o.S3ForcePathStyle) - - var sess = session.Must(session.NewSession()) - l.Writer = &Writer{ - DefaultBucketName: o.DefaultBucketName, - client: s3.New(sess, conf), - mu: sync.Mutex{}, - } - input := &s3.CreateBucketInput{ - Bucket: aws.String(o.DefaultBucketName), - } - if o.CreateBucket { - _, err := l.Writer.client.CreateBucket(input) - if err != nil { - return err - } - } - return nil -} - -func (l *Logger) writeToObjectStore(bucketName string, key string, content []byte) error { - input := s3.PutObjectInput{ - Bucket: aws.String(bucketName), - Key: aws.String(key), - Body: bytes.NewReader(content), - } - - _, err := l.Writer.client.PutObject(&input) - return err -} - -func (l *Logger) LoadDefaults(config ObjectStoreConfig) error { - err := l.load(config) - if err != nil { - return err - } - if l.buffer == nil { - l.buffer = new(bytes.Buffer) - } - return nil -} diff --git a/tekton-catalog/pipeline-loops/.gitignore b/tekton-catalog/pipeline-loops/.gitignore deleted file mode 100644 index 16e08bc2f3..0000000000 --- a/tekton-catalog/pipeline-loops/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -.ko.yaml - diff --git a/tekton-catalog/pipeline-loops/Dockerfile b/tekton-catalog/pipeline-loops/Dockerfile deleted file mode 100644 index 9f84440550..0000000000 --- a/tekton-catalog/pipeline-loops/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM ubuntu -ARG bin_dir=_output/bin -ARG bin_name -ENV BIN ${bin_name} - -WORKDIR / - -COPY ${bin_dir}/${BIN} /usr/local/bin - -ENTRYPOINT [] -CMD ${BIN} diff --git a/tekton-catalog/pipeline-loops/LICENSE b/tekton-catalog/pipeline-loops/LICENSE deleted file mode 100644 index 261eeb9e9f..0000000000 --- a/tekton-catalog/pipeline-loops/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/tekton-catalog/pipeline-loops/Makefile b/tekton-catalog/pipeline-loops/Makefile deleted file mode 100644 index 064bca249b..0000000000 --- a/tekton-catalog/pipeline-loops/Makefile +++ /dev/null @@ -1,69 +0,0 @@ -BIN_DIR ?= _output/bin - -DOCKER_REGISTRY ?= aipipeline - -TIMEOUT_TESTS = 2.5m - -SYSTEM_NAMESPACE=tekton-pipelines - - -# If tag not explicitly set in users default to the git sha. -TAG ?= v0.0.1 - -.EXPORT_ALL_VARIABLES: - -all: - $(info try make help) - -KO = ko -ko: PACKAGE=github.com/google/ko/cmd/ko - -apply: $(info apply: Apply PipelineLoop controller to the k8s cluster ) - $(KO) apply -f config - -test-all: update init $(info test-all: run unit tests ) - go test -v -cover -timeout=${TIMEOUT_TESTS} ./... - -.PHONY: init -init: - mkdir -p ${BIN_DIR} - -.PHONY: cli -cli: update init $(info cli: build validation cli ) - go build -o=${BIN_DIR}/pipelineloop-cli ./cmd/cli - -.PHONY: validate-testdata-python-sdk -validate-testdata-python-sdk: cli $(info validate-testdata-python-sdk: validate testdata for python SDK ) -## works w/o findutils installed, but globbing has a limit -## for x in ../../sdk/python/tests/compiler/testdata/*yaml; do ${BIN_DIR}/pipelineloop-cli -f $$x || true ; done - @find ../../sdk/python/tests/compiler/testdata \ - \( -type f -name "*yaml" -not -path "*\test_data\*" -not -name "*component.yaml" \) \ - -print0 | xargs -0 -n1 ${BIN_DIR}/pipelineloop-cli -f - -.PHONY: validate-examples -validate-examples: cli $(info validate-examples: validate example yamls for pipelineloop custom task. ) - @find ./examples/ \ - \( -type f -name "*yaml" \) -print0 | xargs -0 -n1 ${BIN_DIR}/pipelineloop-cli -f - -local: update init - go build -o=${BIN_DIR}/pipelineloop-controller ./cmd/controller - go build -o=${BIN_DIR}/pipelineloop-webhook ./cmd/webhook - go build -o=${BIN_DIR}/pipelineloop-cli ./cmd/cli - -build-linux: update init - CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o=${BIN_DIR}/pipelineloop-controller ./cmd/controller - CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o=${BIN_DIR}/pipelineloop-webhook ./cmd/webhook - CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o=${BIN_DIR}/pipelineloop-cli ./cmd/cli - -images: build-linux - docker build --build-arg bin_name=pipelineloop-controller . -t ${DOCKER_REGISTRY}/pipelineloop-controller:$(TAG) - docker build --build-arg bin_name=pipelineloop-webhook . -t ${DOCKER_REGISTRY}/pipelineloop-webhook:$(TAG) - -update: - go mod download - go mod tidy - go mod vendor - -clean: - rm -r ${BIN_DIR} - rm -f *.log diff --git a/tekton-catalog/pipeline-loops/README.md b/tekton-catalog/pipeline-loops/README.md deleted file mode 100644 index 65f62cd909..0000000000 --- a/tekton-catalog/pipeline-loops/README.md +++ /dev/null @@ -1,181 +0,0 @@ -# Custom Task: Pipeline Loops - -`Pipeline Loops` implementation is based on [Tekton Custom Task](https://github.com/tektoncd/community/blob/master/teps/0002-custom-tasks.md) and the template of [Knative sample controller](https://github.com/knative-sandbox/sample-controller) - -# Goal -`Pipeline Loops` is trying to provide pipeline level loops to handle `withItems` loop in `Pipeline`(Tekton). - -# Installation - -## Option One: Install using KO - -- Install and configure [KO](https://github.com/google/ko) -- Install pipelineloop controller - `ko apply -f config/` - -## Option Two: Install by building your own Docker images - -- Modify `Makefile` by changing `registry-name` to your Docker hub name - -- Run `make images` to build the docker image of yours. - -- Modify `config/500-webhook.yaml` and `config/500-controller.yaml` Change the image name to your docker image, e.g.: -``` -- name: webhook - image: fenglixa/pipelineloop-webhook:v0.0.1 -``` -``` -- name: tekton-pipelineloop-controller - image: fenglixa/pipelineloop-controller:v0.0.1 -``` - -- Install pipelineloop controller `kubectl apply -f config/` - -# Parallelism -`Parallelism` is to define the number of pipelineruns can be created at the same time. It must be eqaul to or bigger than 1. If it's not set then the default value will be 1. If it's bigger than the total iterations in the loop then the number will be total iterations. - -# Break -It's common to break the loop when some condition is met, like what we do in programming languages. This can be done by specifying the task name with the `last-loop-task` label. When the task specified the label is skipped during pipelineloop iteration, the loop will be marked `Succeeded` with a `pass` condition, and no more iteration will be started. The common use case is to check the condition in the task with `when` expression so that to decide whether to break the loop or just continue. - -# Verification -- check controller and the webhook. `kubectl get po -n tekton-pipelines` - ``` - ... - tekton-pipelineloop-controller-db4c7dddb-vrlsd 1/1 Running 0 6h24m - tekton-pipelineloop-webhook-7bb98ddc98-qqkv6 1/1 Running 0 6h17m - ``` -- Try cases to loop pipelines - - `kubectl apply -f examples/pipelinespec-with-run-arrary-value.yaml` - - `kubectl apply -f examples/pipelinespec-with-run-string-value.yaml` - - `kubectl apply -f examples/pipelinespec-with-run-iterate-numeric.yaml` - - `kubectl apply -f examples/pipelinespec-with-run-condition.yaml` - -# End to end example -- Install Tekton version >= v0.19 -- Edit feature-flags configmap, ensure "data.enable-custom-tasks" is "true": -`kubectl edit cm feature-flags -n tekton-pipelines` - -- Run the E2E example: - - `kubectl apply -f examples/loop-example-basic.yaml` - - -- Tekton now supports custom task as embedded spec, it requires tekton version >= v0.25 - - - Install Tekton version >= v0.25 - Or directly from source as, - ``` - git clone https://github.com/tektoncd/pipeline.git - cd pipeline - make apply - ``` - - - To use the `taskSpec` example as below - - e.g. - - ```yaml - apiVersion: tekton.dev/v1beta1 - kind: PipelineRun - metadata: - name: pr-loop-example - spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskSpec: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - spec: # Following is the embedded spec. - iterateParam: message - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" - ``` - - - To run the above example: - - `kubectl apply -f examples/loop-example-basic_taskspec.yaml` - -# Validation - -A validation CLI validates any Pipeline/PipelineRun/Run with embedded -PipelineLoop Spec or any PipelineLoop custom task definition. - -To build it from source, use the make tool as follows: - - -1. For both Mac and linux, invoke make with a default target. - -``` -export BIN_DIR="bin" -make cli -``` - -Output: -``` -mkdir -p bin -go build -o=bin/pipelineloop-cli ./cmd/cli -``` - -For linux specific build: -``` -export BIN_DIR="bin" -make build-linux -``` - -2. Above command will generate the output in `bin` dir. Use as follows: -``` -bin/pipelineloop-cli -f examples/loop-example-basic_taskspec.yaml -``` - -Output: -``` - -Congratulations, all checks passed !! -``` diff --git a/tekton-catalog/pipeline-loops/cmd/cli/main.go b/tekton-catalog/pipeline-loops/cmd/cli/main.go deleted file mode 100644 index 1ff2ad3d83..0000000000 --- a/tekton-catalog/pipeline-loops/cmd/cli/main.go +++ /dev/null @@ -1,286 +0,0 @@ -/* -Copyright 2021 The Kubeflow Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "context" - "encoding/json" - "flag" - "fmt" - "io" - "os" - "strings" - - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop" - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha1" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - "k8s.io/apimachinery/pkg/util/yaml" -) - -func validateFlags(action, inputFileName, inputFileType string) { - var errs []error - if action != "validate" { - err := fmt.Errorf("unsupported action %s\n", action) - errs = append(errs, err) - } - if inputFileName == "" { - err := fmt.Errorf("missing input spec, please specify -f /path/to/file.(yaml|json)\n") - errs = append(errs, err) - } - if inputFileType != "yaml" && inputFileType != "json" { - err := fmt.Errorf("unsupported input file type:%s\n", inputFileType) - errs = append(errs, err) - } - if len(errs) > 0 { - fmt.Println("Errors while processing input for filename: ", inputFileName) - for i, e := range errs { - fmt.Printf("Error: %d -> %v\n", i, e) - } - os.Exit(1) - } -} - -func main() { - var action, inputFileName, inputFileType string - var quiet bool - flag.StringVar(&action, "a", "validate", "The `action` on the resource.") - flag.StringVar(&inputFileName, "f", "", "path to input `filename` either a yaml or json.") - flag.StringVar(&inputFileType, "file-type", "yaml", "`yaml` or a json") - flag.BoolVar(&quiet, "q", false, "quiet mode, report only errors.") - flag.Parse() - validateFlags(action, inputFileName, inputFileType) - if inputFileType == "json" { - fmt.Println("Error: json file type is not supported.") - os.Exit(5) - } - errs := []string{} - if inputFileType == "yaml" { - objs, err := readFile(inputFileName) - if err != nil { - fmt.Printf("\nWarn: skipping file:%s, %v\n", inputFileName, err) - // os.Exit(2) These are warning because, certain yaml are not k8s resource and we can skip. - os.Exit(2) - } - fmt.Printf("Reading file: %s\n", inputFileName) - for _, o := range objs { - marshalledBytes, err := o.MarshalJSON() - if err != nil { - fmt.Printf("\nWarn: skipping file due to json Marshal errors:%s, %v\n", inputFileName, err) - // os.Exit(3) These are warning because, certain yaml are not k8s resource and we can skip. - os.Exit(0) - } - err = nil - switch kind := o.GetKind(); kind { - case "Task": - // No validation. - case "Run": - err = validateRun(marshalledBytes) - case "CustomRun": - err = validateCustomRun(marshalledBytes) - case "Pipeline": - err = validatePipeline(marshalledBytes) - case pipelineloop.PipelineLoopControllerName: - err = validatePipelineLoop(marshalledBytes) - case "PipelineRun": - err = validatePipelineRun(marshalledBytes) - default: - if !quiet { - fmt.Printf("\nWarn: Unsupported kind: %s.\n", kind) - } - } - if err != nil { - errs = append(errs, err.Error()) - } - } - if len(errs) > 0 { - fmt.Printf("\nFound validation errors in %s: \n%s\n", inputFileName, - strings.Join(errs, "\n")) - os.Exit(100) - } else { - if !quiet { - fmt.Printf("\nCongratulations, all checks passed in %s\n", inputFileName) - } - } - } -} - -func validatePipeline(bytes []byte) error { - p := v1.Pipeline{} - if err := json.Unmarshal(bytes, &p); err != nil { - return err - } - return validatePipelineSpec(&p.Spec, p.Name) -} - -func validatePipelineSpec(p *v1.PipelineSpec, name string) error { - errs := []string{} - // Here we validate those embedded spec, whose kind is pipelineLoop. - if p.Tasks != nil { - for _, task := range p.Tasks { - if task.TaskSpec != nil && task.TaskSpec.Kind == pipelineloop.PipelineLoopControllerName { - err := validatePipelineLoopEmbedded(task.TaskSpec.Spec.Raw) - if err != nil { - errs = append(errs, err.Error()) - } - } - } - } - if len(errs) > 0 { - e := strings.Join(errs, "\n") - return fmt.Errorf("Validation errors found in pipeline %s\n %s", name, e) - } - - // Validate the Tekton pipelineSpec - ctx := context.Background() - ctx = pipelinelooprun.EnableCustomTaskFeatureFlag(ctx) - p.SetDefaults(ctx) - errors := p.Validate(ctx) - if errors != nil { - return errors - } - return nil -} - -func validateRun(bytes []byte) error { - r := v1alpha1.Run{} - err := json.Unmarshal(bytes, &r) - if err != nil { - return fmt.Errorf("Error while unmarshal Run:%s\n", err.Error()) - } - // We do not need to validate Run because it is validated by tekton admission webhook - // And r.Spec.Ref is also validated by tekton. - // Here we only need to validate the embedded spec. i.e. r.Spec.Spec - if r.Spec.Spec != nil && r.Spec.Spec.Kind == pipelineloop.PipelineLoopControllerName { - if err := validatePipelineLoopEmbedded(r.Spec.Spec.Spec.Raw); err != nil { - return fmt.Errorf("Found validation errors in Run: %s \n %s", r.Name, err.Error()) - } - } - return nil -} - -func validateCustomRun(bytes []byte) error { - customRun := v1beta1.CustomRun{} - err := json.Unmarshal(bytes, &customRun) - if err != nil { - return fmt.Errorf("Error while unmarshal CustomRun:%s\n", err.Error()) - } - // We do not need to validate CustomRun because it is validated by tekton admission webhook - // And r.Spec.CustomRef is also validated by tekton. - // Here we only need to validate the embedded spec. i.e. r.Spec.Spec - if customRun.Spec.CustomSpec != nil && customRun.Spec.CustomSpec.Kind == pipelineloop.PipelineLoopControllerName { - if err := validatePipelineLoopEmbedded(customRun.Spec.CustomSpec.Spec.Raw); err != nil { - return fmt.Errorf("Found validation errors in CustomRun: %s \n %s", customRun.Name, err.Error()) - } - } - return nil -} - -func validatePipelineRun(bytes []byte) error { - pr := v1.PipelineRun{} - if err := json.Unmarshal(bytes, &pr); err != nil { - return fmt.Errorf("Error while unmarshal PipelineRun spec:%s\n", err.Error()) - } - return validatePipelineSpec(pr.Spec.PipelineSpec, pr.Name) -} - -func validatePipelineLoopEmbedded(bytes []byte) error { - var embeddedSpec map[string]interface{} - if err := json.Unmarshal(bytes, &embeddedSpec); err != nil { - return fmt.Errorf("Error while unmarshal PipelineLoop embedded spec:%s\n", err.Error()) - } - r1 := map[string]interface{}{ - "kind": pipelineloop.PipelineLoopControllerName, - "apiVersion": pipelineloopv1alpha1.SchemeGroupVersion.String(), - "metadata": metav1.ObjectMeta{Name: "embedded"}, - "spec": embeddedSpec, - } - - marshalBytes, err := json.Marshal(r1) - if err != nil { - return fmt.Errorf("Error while marshalling embedded to PipelineLoop:%s\n", err.Error()) - } - return validatePipelineLoop(marshalBytes) -} - -func validatePipelineLoop(bytes []byte) error { - pipelineLoop := pipelineloopv1alpha1.PipelineLoop{} - if err := json.Unmarshal(bytes, &pipelineLoop); err != nil { - return fmt.Errorf("Error while unmarshal PipelineLoop:%s\n", err.Error()) - } - ctx := context.Background() - ctx = pipelinelooprun.EnableCustomTaskFeatureFlag(ctx) - pipelineLoop.SetDefaults(ctx) - if err := pipelineLoop.Validate(ctx); err != nil { - return fmt.Errorf("PipelineLoop name:%s\n %s", pipelineLoop.Name, err.Error()) - } - if err, name := validateNestedPipelineLoop(pipelineLoop); err != nil { - return fmt.Errorf("Nested PipelineLoop name:%s\n %s", name, err.Error()) - } - return nil -} - -func validateNestedPipelineLoop(pl pipelineloopv1alpha1.PipelineLoop) (error, string) { - if pl.Spec.PipelineSpec != nil { - for _, task := range pl.Spec.PipelineSpec.Tasks { - if task.TaskSpec != nil && task.TaskSpec.Kind == pipelineloop.PipelineLoopControllerName { - err := validatePipelineLoopEmbedded(task.TaskSpec.Spec.Raw) - if err != nil { - return err, task.Name - } - } - } - } - return nil, "" -} - -// readFile parses a single file. -func readFile(pathname string) ([]unstructured.Unstructured, error) { - file, err := os.Open(pathname) - if err != nil { - return nil, err - } - defer file.Close() - - return decode(file) -} - -// decode consumes the given reader and parses its contents as YAML. -func decode(reader io.Reader) ([]unstructured.Unstructured, error) { - decoder := yaml.NewYAMLToJSONDecoder(reader) - objs := []unstructured.Unstructured{} - var err error - for { - out := unstructured.Unstructured{} - err = decoder.Decode(&out) - if err != nil { - break - } - if len(out.Object) == 0 { - continue - } - objs = append(objs, out) - } - if err != io.EOF { - return nil, err - } - return objs, nil -} diff --git a/tekton-catalog/pipeline-loops/cmd/controller/main.go b/tekton-catalog/pipeline-loops/cmd/controller/main.go deleted file mode 100644 index 2c033fb998..0000000000 --- a/tekton-catalog/pipeline-loops/cmd/controller/main.go +++ /dev/null @@ -1,43 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "flag" - - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun" - corev1 "k8s.io/api/core/v1" - "knative.dev/pkg/injection" - "knative.dev/pkg/injection/sharedmain" - "knative.dev/pkg/signals" -) - -const ( - // ControllerLogKey is the name of the logger for the controller cmd - ControllerLogKey = "pipelineloop-controller" -) - -var ( - namespace = flag.String("namespace", corev1.NamespaceAll, "Namespace to restrict informer to. Optional, defaults to all namespaces.") -) - -func main() { - flag.Parse() - sharedmain.MainWithContext(injection.WithNamespaceScope(signals.NewContext(), *namespace), ControllerLogKey, - pipelinelooprun.NewController(*namespace), - ) -} diff --git a/tekton-catalog/pipeline-loops/cmd/webhook/main.go b/tekton-catalog/pipeline-loops/cmd/webhook/main.go deleted file mode 100644 index dc4eab292e..0000000000 --- a/tekton-catalog/pipeline-loops/cmd/webhook/main.go +++ /dev/null @@ -1,127 +0,0 @@ -/* -Copyright 2019 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "context" - "os" - - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - defaultconfig "github.com/tektoncd/pipeline/pkg/apis/config" - "k8s.io/apimachinery/pkg/runtime/schema" - "knative.dev/pkg/configmap" - "knative.dev/pkg/controller" - "knative.dev/pkg/injection" - "knative.dev/pkg/injection/sharedmain" - "knative.dev/pkg/logging" - "knative.dev/pkg/signals" - "knative.dev/pkg/system" - "knative.dev/pkg/webhook" - "knative.dev/pkg/webhook/certificates" - "knative.dev/pkg/webhook/resourcesemantics" - "knative.dev/pkg/webhook/resourcesemantics/defaulting" - "knative.dev/pkg/webhook/resourcesemantics/validation" -) - -const ( - // WebhookLogKey is the name of the logger for the webhook cmd. - // This name is also used to form lease names for the leader election of the webhook's controllers. - WebhookLogKey = "pipelineloop-webhook" -) - -var types = map[schema.GroupVersionKind]resourcesemantics.GenericCRD{ - pipelineloopv1alpha1.SchemeGroupVersion.WithKind("PipelineLoop"): &pipelineloopv1alpha1.PipelineLoop{}, -} - -func newDefaultingAdmissionController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { - // Decorate contexts with the current state of the config. - store := defaultconfig.NewStore(logging.FromContext(ctx).Named("config-store")) - store.WatchConfigs(cmw) - - return defaulting.NewAdmissionController(ctx, - - // Name of the resource webhook. - "webhook.pipelineloop.custom.tekton.dev", - - // The path on which to serve the webhook. - "/defaulting", - - // The resources to validate and default. - types, - - // A function that infuses the context passed to Validate/SetDefaults with custom metadata. - func(ctx context.Context) context.Context { - return store.ToContext(ctx) - }, - - // Whether to disallow unknown fields. - true, - ) -} - -func newValidationAdmissionController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { - // Decorate contexts with the current state of the config. - store := defaultconfig.NewStore(logging.FromContext(ctx).Named("config-store")) - store.WatchConfigs(cmw) - return validation.NewAdmissionController(ctx, - - // Name of the resource webhook. - "validation.webhook.pipelineloop.custom.tekton.dev", - - // The path on which to serve the webhook. - "/resource-validation", - - // The resources to validate and default. - types, - - // A function that infuses the context passed to Validate/SetDefaults with custom metadata. - func(ctx context.Context) context.Context { - return store.ToContext(ctx) - }, - - // Whether to disallow unknown fields. - true, - ) -} - -func main() { - serviceName := os.Getenv("WEBHOOK_SERVICE_NAME") - if serviceName == "" { - serviceName = "tekton-pipelineloop-webhook" - } - - secretName := os.Getenv("WEBHOOK_SECRET_NAME") - if secretName == "" { - secretName = "tekton-pipelineloop-webhook-certs" // #nosec - } - - // Scope informers to the webhook's namespace instead of cluster-wide - ctx := injection.WithNamespaceScope(signals.NewContext(), system.Namespace()) - - // Set up a signal context with our webhook options - ctx = webhook.WithOptions(ctx, webhook.Options{ - ServiceName: serviceName, - Port: 8443, - SecretName: secretName, - }) - - sharedmain.MainWithContext(ctx, WebhookLogKey, - certificates.NewController, - newDefaultingAdmissionController, - newValidationAdmissionController, - ) -} diff --git a/tekton-catalog/pipeline-loops/config/200-serviceaccount.yaml b/tekton-catalog/pipeline-loops/config/200-serviceaccount.yaml deleted file mode 100644 index f2e6dc96c2..0000000000 --- a/tekton-catalog/pipeline-loops/config/200-serviceaccount.yaml +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: v1 -kind: ServiceAccount -metadata: - name: tekton-pipelineloop-controller - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops \ No newline at end of file diff --git a/tekton-catalog/pipeline-loops/config/201-clusterrole.yaml b/tekton-catalog/pipeline-loops/config/201-clusterrole.yaml deleted file mode 100644 index 7b10f3cb6b..0000000000 --- a/tekton-catalog/pipeline-loops/config/201-clusterrole.yaml +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -kind: ClusterRole -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: tekton-pipelineloop-controller-cluster-access - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -rules: - # Controller needs cluster access to all of the CRDs that it is responsible for managing. - - apiGroups: ["tekton.dev"] - resources: ["runs", "customruns", "taskruns", "pipelineruns"] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] - - apiGroups: ["tekton.dev"] - resources: ["runs/status", "customruns/status", "taskruns/status", "pipelineruns/status", "runs/finalizers", "customruns/finalizers",] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] - - apiGroups: ["custom.tekton.dev"] - resources: ["pipelineloops"] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] - - apiGroups: ["apps"] - resources: ["deployments", "deployments/finalizers"] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] ---- -kind: ClusterRole -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - # This is the access that the controller needs on a per-namespace basis. - name: tekton-pipelineloop-controller-tenant-access - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -rules: - - apiGroups: [""] - resources: ["events"] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] ---- -kind: ClusterRole -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: tekton-pipelineloop-webhook-cluster-access - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -rules: - # The webhook needs to be able to list and update customresourcedefinitions, - # mainly to update the webhook certificates. - - apiGroups: ["apiextensions.k8s.io"] - resources: ["customresourcedefinitions", "customresourcedefinitions/status"] - verbs: ["get", "list", "update", "patch", "watch"] - - apiGroups: [""] - resources: ["namespaces"] - verbs: ["get", "list", "update", "patch", "watch"] - - apiGroups: ["admissionregistration.k8s.io"] - # The webhook performs a reconciliation on these two resources and continuously - # updates configuration. - resources: ["mutatingwebhookconfigurations", "validatingwebhookconfigurations"] - # knative starts informers on these things, which is why we need get, list and watch. - verbs: ["list", "watch"] - - apiGroups: ["admissionregistration.k8s.io"] - resources: ["mutatingwebhookconfigurations"] - # This mutating webhook is responsible for applying defaults to tekton objects - # as they are received. - resourceNames: ["webhook.pipelineloop.custom.tekton.dev"] - # When there are changes to the configs or secrets, knative updates the mutatingwebhook config - # with the updated certificates or the refreshed set of rules. - verbs: ["get", "update"] - - apiGroups: ["apps"] - resources: ["deployments", "deployments/finalizers"] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] - - apiGroups: ["admissionregistration.k8s.io"] - resources: ["validatingwebhookconfigurations"] - # validation.webhook.pipelineloop.custom.tekton.dev performs schema validation when you, for example, create PipelineLoops. - resourceNames: ["validation.webhook.pipelineloop.custom.tekton.dev"] - # When there are changes to the configs or secrets, knative updates the validatingwebhook config - # with the updated certificates or the refreshed set of rules. - verbs: ["get", "update"] - - apiGroups: ["policy"] - resources: ["podsecuritypolicies"] - resourceNames: ["tekton-pipelines"] - verbs: ["use"] ---- -kind: ClusterRole -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: tekton-pipelineloop-leader-election - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -rules: - # We uses leases for leaderelection - - apiGroups: ["coordination.k8s.io"] - resources: ["leases"] - verbs: ["get", "list", "create", "update", "delete", "patch", "watch"] diff --git a/tekton-catalog/pipeline-loops/config/201-role.yaml b/tekton-catalog/pipeline-loops/config/201-role.yaml deleted file mode 100644 index fb3d560b1c..0000000000 --- a/tekton-catalog/pipeline-loops/config/201-role.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -kind: Role -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: tekton-pipelineloop-controller - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -rules: - - apiGroups: [""] - resources: ["configmaps"] - verbs: ["list", "watch"] - - apiGroups: [""] - resources: ["configmaps"] - verbs: ["get"] - resourceNames: ["config-leader-election", "config-logging", "config-observability", "object-store-config", "cache-config"] - - apiGroups: ["policy"] - resources: ["podsecuritypolicies"] - resourceNames: ["tekton-pipelines"] - verbs: ["use"] ---- -kind: Role -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -rules: - - apiGroups: [""] - resources: ["configmaps"] - verbs: ["list", "watch"] - # The webhook needs access to these configmaps for logging information. - - apiGroups: [""] - resources: ["configmaps"] - verbs: ["get"] - resourceNames: ["config-logging", "config-observability", "config-leader-election", "object-store-config", "cache-config"] - - apiGroups: [""] - resources: ["secrets"] - verbs: ["list", "watch"] - # The webhook daemon makes a reconciliation loop on tekton-pipelineloop-webhook-certs. Whenever - # the secret changes it updates the webhook configurations with the certificates - # stored in the secret. - - apiGroups: [""] - resources: ["secrets"] - verbs: ["get", "update"] - resourceNames: ["tekton-pipelineloop-webhook-certs"] - - apiGroups: ["policy"] - resources: ["podsecuritypolicies"] - resourceNames: ["tekton-pipelines"] - verbs: ["use"] \ No newline at end of file diff --git a/tekton-catalog/pipeline-loops/config/201-rolebinding.yaml b/tekton-catalog/pipeline-loops/config/201-rolebinding.yaml deleted file mode 100644 index 7613e476a4..0000000000 --- a/tekton-catalog/pipeline-loops/config/201-rolebinding.yaml +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: tekton-pipelineloop-controller - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-controller - namespace: tekton-pipelines -roleRef: - kind: Role - name: tekton-pipelineloop-controller - apiGroup: rbac.authorization.k8s.io ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines -roleRef: - kind: Role - name: tekton-pipelineloop-webhook - apiGroup: rbac.authorization.k8s.io diff --git a/tekton-catalog/pipeline-loops/config/202-clusterrolebinding.yaml b/tekton-catalog/pipeline-loops/config/202-clusterrolebinding.yaml deleted file mode 100644 index bd00a9ddfb..0000000000 --- a/tekton-catalog/pipeline-loops/config/202-clusterrolebinding.yaml +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: tekton-pipelineloop-controller-cluster-access - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-controller - namespace: tekton-pipelines -roleRef: - kind: ClusterRole - name: tekton-pipelineloop-controller-cluster-access - apiGroup: rbac.authorization.k8s.io ---- -# If this ClusterRoleBinding is replaced with a RoleBinding -# then the ClusterRole would be namespaced. The access described by -# the tekton-pipelineloop-controller-tenant-access ClusterRole would -# be scoped to individual tenant namespaces. -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: tekton-pipelineloop-controller-tenant-access - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-controller - namespace: tekton-pipelines -roleRef: - kind: ClusterRole - name: tekton-pipelineloop-controller-tenant-access - apiGroup: rbac.authorization.k8s.io ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: tekton-pipelineloop-controller-leaderelection - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-controller - namespace: tekton-pipelines -roleRef: - kind: ClusterRole - name: tekton-pipelineloop-leader-election - apiGroup: rbac.authorization.k8s.io ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: tekton-pipelineloop-webhook-cluster-access - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines -roleRef: - kind: ClusterRole - name: tekton-pipelineloop-webhook-cluster-access - apiGroup: rbac.authorization.k8s.io ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: tekton-pipelineloop-webhook-leaderelection - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops -subjects: - - kind: ServiceAccount - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines -roleRef: - kind: ClusterRole - name: tekton-pipelineloop-leader-election - apiGroup: rbac.authorization.k8s.io diff --git a/tekton-catalog/pipeline-loops/config/203-object-store-config.yaml b/tekton-catalog/pipeline-loops/config/203-object-store-config.yaml deleted file mode 100644 index 0d41bc64bd..0000000000 --- a/tekton-catalog/pipeline-loops/config/203-object-store-config.yaml +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: ConfigMap -metadata: - name: object-store-config - namespace: tekton-pipelines - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines-loops -data: - enable: "false" - defaultBucketName: "pipelineloop-default" - region: "us-south" - accessKey: "" - secretKey: "" - serviceEndpoint: "https://s3.us-south.cloud-object-storage.appdomain.cloud" - token: "" \ No newline at end of file diff --git a/tekton-catalog/pipeline-loops/config/204-cache-config.yaml b/tekton-catalog/pipeline-loops/config/204-cache-config.yaml deleted file mode 100644 index b64ca72f3f..0000000000 --- a/tekton-catalog/pipeline-loops/config/204-cache-config.yaml +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: ConfigMap -metadata: - name: cache-config - namespace: tekton-pipelines - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines-loops -data: - disabled: "true" - driver: "mysql" - host: "mysql.kubeflow.svc.cluster.local" - port: "3306" - dbName: "cachedb" - user: "root" - password: "" - timeout: "6m" - extraParams: "" - mysqlDBGroupConcatMaxLen: "4194304" diff --git a/tekton-catalog/pipeline-loops/config/300-pipelineloop.yaml b/tekton-catalog/pipeline-loops/config/300-pipelineloop.yaml deleted file mode 100644 index cb9cbeced0..0000000000 --- a/tekton-catalog/pipeline-loops/config/300-pipelineloop.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: pipelineloops.custom.tekton.dev - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" - version: "devel" -spec: - group: custom.tekton.dev - versions: - - name: v1alpha1 - served: true - storage: true - # Opt into the status subresource so metadata.generation - # starts to increment - subresources: - status: {} - schema: - openAPIV3Schema: - type: object - # One can use x-kubernetes-preserve-unknown-fields: true - # at the root of the schema (and inside any properties, additionalProperties) - # to get the traditional CRD behaviour that nothing is pruned, despite - # setting spec.preserveUnknownProperties: false. - # - # See https://kubernetes.io/blog/2019/06/20/crd-structural-schema/ - # See issue: https://github.com/knative/serving/issues/912 - x-kubernetes-preserve-unknown-fields: true - names: - kind: PipelineLoop - plural: pipelineloops - categories: - - tekton - - tekton-pipelines - scope: Namespaced diff --git a/tekton-catalog/pipeline-loops/config/301-breaktask.yaml b/tekton-catalog/pipeline-loops/config/301-breaktask.yaml deleted file mode 100644 index 4efdfe4ce3..0000000000 --- a/tekton-catalog/pipeline-loops/config/301-breaktask.yaml +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apiextensions.k8s.io/v1 -kind: CustomResourceDefinition -metadata: - name: breaktasks.custom.tekton.dev - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" - version: "devel" -spec: - group: custom.tekton.dev - versions: - - name: v1alpha1 - served: true - storage: true - # Opt into the status subresource so metadata.generation - # starts to increment - subresources: - status: {} - schema: - openAPIV3Schema: - type: object - # One can use x-kubernetes-preserve-unknown-fields: true - # at the root of the schema (and inside any properties, additionalProperties) - # to get the traditional CRD behaviour that nothing is pruned, despite - # setting spec.preserveUnknownProperties: false. - # - # See https://kubernetes.io/blog/2019/06/20/crd-structural-schema/ - # See issue: https://github.com/knative/serving/issues/912 - x-kubernetes-preserve-unknown-fields: true - names: - kind: BreakTask - plural: breaktasks - categories: - - tekton - - tekton-pipelines - scope: Namespaced diff --git a/tekton-catalog/pipeline-loops/config/500-controller.yaml b/tekton-catalog/pipeline-loops/config/500-controller.yaml deleted file mode 100644 index 5473e54607..0000000000 --- a/tekton-catalog/pipeline-loops/config/500-controller.yaml +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: tekton-pipelineloop-controller - namespace: tekton-pipelines - labels: - app.kubernetes.io/name: controller - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/version: "devel" - app.kubernetes.io/part-of: tekton-pipeline-loops - # tekton.dev/release value replaced with inputs.params.versionTag in pipeline/tekton/publish.yaml - pipeline.tekton.dev/release: "devel" - # labels below are related to istio and should not be used for resource lookup - version: "devel" -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: controller - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - labels: - app.kubernetes.io/name: controller - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/version: "devel" - app.kubernetes.io/part-of: tekton-pipeline-loops - # tekton.dev/release value replaced with inputs.params.versionTag in pipeline/tekton/publish.yaml - pipeline.tekton.dev/release: "devel" - # labels below are related to istio and should not be used for resource lookup - app: tekton-pipelineloop-controller - version: "devel" - spec: - serviceAccountName: tekton-pipelineloop-controller - containers: - - name: tekton-pipelineloop-controller - image: ko://github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/cmd/controller - env: - - name: SYSTEM_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: CONFIG_LEADERELECTION_NAME - value: config-leader-election - - name: CONFIG_LOGGING_NAME - value: config-logging - - name: CONFIG_OBSERVABILITY_NAME - value: config-observability - - name: METRICS_DOMAIN - value: tekton.dev/pipeline - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - runAsGroup: 65532 - runAsNonRoot: true - runAsUser: 65532 - seccompProfile: - type: RuntimeDefault diff --git a/tekton-catalog/pipeline-loops/config/500-webhook-configuration.yaml b/tekton-catalog/pipeline-loops/config/500-webhook-configuration.yaml deleted file mode 100644 index ff386f3e85..0000000000 --- a/tekton-catalog/pipeline-loops/config/500-webhook-configuration.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: v1 -kind: Secret -metadata: - name: tekton-pipelineloop-webhook-certs - namespace: tekton-pipelines - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" -# The data is populated at install time. - ---- -apiVersion: admissionregistration.k8s.io/v1 -kind: ValidatingWebhookConfiguration -metadata: - name: validation.webhook.pipelineloop.custom.tekton.dev - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" -webhooks: -- admissionReviewVersions: - - v1beta1 - clientConfig: - service: - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines - failurePolicy: Fail - sideEffects: None - name: validation.webhook.pipelineloop.custom.tekton.dev - ---- -apiVersion: admissionregistration.k8s.io/v1 -kind: MutatingWebhookConfiguration -metadata: - name: webhook.pipelineloop.custom.tekton.dev - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" -webhooks: -- admissionReviewVersions: - - v1beta1 - clientConfig: - service: - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines - failurePolicy: Fail - sideEffects: None - name: webhook.pipelineloop.custom.tekton.dev - diff --git a/tekton-catalog/pipeline-loops/config/500-webhook.yaml b/tekton-catalog/pipeline-loops/config/500-webhook.yaml deleted file mode 100644 index 9f76646354..0000000000 --- a/tekton-catalog/pipeline-loops/config/500-webhook.yaml +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines - labels: - app.kubernetes.io/name: webhook - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/version: "devel" - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" - version: "devel" -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: webhook - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - labels: - app.kubernetes.io/name: webhook - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/version: "devel" - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" - app: tekton-pipelines-webhook - version: "devel" - spec: - serviceAccountName: tekton-pipelineloop-webhook - containers: - - name: webhook - image: ko://github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/cmd/webhook - env: - - name: SYSTEM_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - # If you are changing these names, you will also need to update - # the webhook's Role in 200-role.yaml to include the new - # values in the "configmaps" "get" rule. - - name: CONFIG_LOGGING_NAME - value: config-logging - - name: CONFIG_OBSERVABILITY_NAME - value: config-observability - - name: CONFIG_LEADERELECTION_NAME - value: config-leader-election - - name: WEBHOOK_SERVICE_NAME - value: tekton-pipelineloop-webhook - - name: WEBHOOK_SECRET_NAME - value: tekton-pipelineloop-webhook-certs - - name: METRICS_DOMAIN - value: tekton.dev/pipeline - ports: - - name: metrics - containerPort: 9090 - - name: profiling - containerPort: 8008 - - name: https-webhook - containerPort: 8443 - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - runAsGroup: 65532 - runAsNonRoot: true - runAsUser: 65532 - seccompProfile: - type: RuntimeDefault ---- -apiVersion: v1 -kind: Service -metadata: - labels: - app.kubernetes.io/name: webhook - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/version: "devel" - app.kubernetes.io/part-of: tekton-pipeline-loops - pipeline.tekton.dev/release: "devel" - app: tekton-pipelines-webhook - version: "devel" - name: tekton-pipelineloop-webhook - namespace: tekton-pipelines -spec: - ports: - # Define metrics and profiling for them to be accessible within service meshes. - - name: http-metrics - port: 9090 - targetPort: 9090 - - name: http-profiling - port: 8008 - targetPort: 8008 - - name: https-webhook - port: 443 - targetPort: 8443 - selector: - app.kubernetes.io/name: webhook - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops diff --git a/tekton-catalog/pipeline-loops/examples/break-task-example.yaml b/tekton-catalog/pipeline-loops/examples/break-task-example.yaml deleted file mode 100644 index e057d061e1..0000000000 --- a/tekton-catalog/pipeline-loops/examples/break-task-example.yaml +++ /dev/null @@ -1,98 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - name: pipeline - annotations: - tekton.dev/output_artifacts: '{}' - tekton.dev/input_artifacts: '{}' - tekton.dev/artifact_bucket: mlpipeline - tekton.dev/artifact_endpoint: minio-service.kubeflow:9000 - tekton.dev/artifact_endpoint_scheme: http:// - tekton.dev/artifact_items: '{"my-in-coop1": []}' - sidecar.istio.io/inject: "false" - pipelines.kubeflow.org/big_data_passing_format: $(workspaces.$TASK_NAME.path)/artifacts/$ORIG_PR_NAME/$TASKRUN_NAME/$TASK_PARAM_NAME - pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "10", "name": "param", - "optional": true, "type": "Integer"}], "name": "pipeline"}' -spec: - params: - - name: param - value: '10' - pipelineSpec: - params: - - name: param - default: '10' - tasks: - - name: pipeline-490d3-for-loop-2 - params: - - name: loop-item-param-1 - value: '[1, 2]' - - name: param - value: $(params.param) - taskSpec: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - spec: - pipelineSpec: - params: - - name: loop-item-param-1 - type: string - - name: param - type: string - tasks: - - name: my-in-coop1 - params: - - name: loop-item-param-1 - value: $(params.loop-item-param-1) - - name: param - value: $(params.param) - taskSpec: - steps: - - name: main - args: - - | - set -e - echo op1 "$0" "$1" - - $(inputs.params.loop-item-param-1) - - $(inputs.params.param) - command: - - sh - - -c - image: library/bash:4.4.23 - params: - - name: loop-item-param-1 - type: string - - name: param - type: string - metadata: - labels: - pipelines.kubeflow.org/pipelinename: '' - pipelines.kubeflow.org/generation: '' - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "my-in-coop1", - "outputs": [], "version": "my-in-coop1@sha256=8ccab3a28a39a406554d964865f2ccb0aed854a43b6de827f613eff2bccd6f8f"}' - tekton.dev/template: '' - timeout: 525600m - - name: condition-cel - params: - - name: outcome - value: $(params.loop-item-param-1) == 2 - taskRef: - name: cel_condition - apiVersion: cel.tekton.dev/v1alpha1 - kind: CEL - timeout: 525600m - - name: pipelineloop-break-operation - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: BreakTask - name: pipelineloop-break-operation - when: - - input: $(tasks.condition-cel.results.outcome) - operator: in - values: - - "true" - timeout: 525600m - parallelism: 1 - iterateParam: loop-item-param-1 - timeout: 525600m diff --git a/tekton-catalog/pipeline-loops/examples/loop-example-basic-retry.yaml b/tekton-catalog/pipeline-loops/examples/loop-example-basic-retry.yaml deleted file mode 100644 index 7d097113fc..0000000000 --- a/tekton-catalog/pipeline-loops/examples/loop-example-basic-retry.yaml +++ /dev/null @@ -1,72 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" && exit 1; - iterateParam: message ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - tekton.dev/example-loop-pipeline: '{"spec":{"pipelineSpec":{"params":[{"name":"message","type":"string"}],"tasks":[{"name":"echo-loop-task","params":[{"name":"message","value":"$(params.message)"}],"taskSpec":{"params":[{"name":"message","type":"string"}],"steps":[{"name":"echo","image":"ubuntu","imagePullPolicy":"IfNotPresent","script":"#!/usr/bin/env bash\necho \"$(params.message)\"\n"}]}}]},"iterateParam":"message"}}' - name: pr-loop-example - labels: - mylooplabels: mylooplabels -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - retries: 3 - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/loop-example-basic.yaml b/tekton-catalog/pipeline-loops/examples/loop-example-basic.yaml deleted file mode 100644 index 5fa8413794..0000000000 --- a/tekton-catalog/pipeline-loops/examples/loop-example-basic.yaml +++ /dev/null @@ -1,71 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - iterateParam: message ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - tekton.dev/example-loop-pipeline: '{"spec":{"pipelineSpec":{"params":[{"name":"message","type":"string"}],"tasks":[{"name":"echo-loop-task","params":[{"name":"message","value":"$(params.message)"}],"taskSpec":{"params":[{"name":"message","type":"string"}],"steps":[{"name":"echo","image":"ubuntu","imagePullPolicy":"IfNotPresent","script":"#!/usr/bin/env bash\necho \"$(params.message)\"\n"}]}}]},"iterateParam":"message"}}' - name: pr-loop-example - labels: - pipelines.kubeflow.org/cache_enabled: "true" -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/loop-example-basic_taskspec.yaml b/tekton-catalog/pipeline-loops/examples/loop-example-basic_taskspec.yaml deleted file mode 100644 index ca6926cad3..0000000000 --- a/tekton-catalog/pipeline-loops/examples/loop-example-basic_taskspec.yaml +++ /dev/null @@ -1,61 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - name: pr-loop-example -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskSpec: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - spec: - iterateParam: message - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/loop-example-numeric-param.yaml b/tekton-catalog/pipeline-loops/examples/loop-example-numeric-param.yaml deleted file mode 100644 index 6b9bfea3c3..0000000000 --- a/tekton-catalog/pipeline-loops/examples/loop-example-numeric-param.yaml +++ /dev/null @@ -1,79 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop -spec: - # IterationNumberParam points to the current index of the loop, starting from 1. - iterationNumberParam: message-index - iterateParam: message - pipelineSpec: - params: - - name: message - type: string - - name: message-index - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - - name: message-index - value: $(params.message-index) - taskSpec: - params: - - name: message - type: string - - name: message-index - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message-index)) $(params.message)" ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - tekton.dev/example-loop-pipeline: '{"spec":{"pipelineSpec":{"params":[{"name":"message","type":"string"}],"tasks":[{"name":"echo-loop-task","params":[{"name":"message","value":"$(params.message)"}],"taskSpec":{"params":[{"name":"message","type":"string"}],"steps":[{"name":"echo","image":"ubuntu","imagePullPolicy":"IfNotPresent","script":"#!/usr/bin/env bash\necho \"$(params.message)\"\n"}]}}]},"iterateParam":"message"}}' - name: pr-loop-example - labels: - pipelines.kubeflow.org/cache_enabled: "true" -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/loop-example-with-parallelism.yaml b/tekton-catalog/pipeline-loops/examples/loop-example-with-parallelism.yaml deleted file mode 100644 index 9d2ce4edaf..0000000000 --- a/tekton-catalog/pipeline-loops/examples/loop-example-with-parallelism.yaml +++ /dev/null @@ -1,72 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - iterateParam: message - parallelism: 2 ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - tekton.dev/example-loop-pipeline: '{"spec":{"pipelineSpec":{"params":[{"name":"message","type":"string"}],"tasks":[{"name":"echo-loop-task","params":[{"name":"message","value":"$(params.message)"}],"taskSpec":{"params":[{"name":"message","type":"string"}],"steps":[{"name":"echo","image":"ubuntu","imagePullPolicy":"IfNotPresent","script":"#!/usr/bin/env bash\necho \"$(params.message)\"\n"}]}}]},"iterateParam":"message"}}' - name: para-loop-example - labels: - mylooplabels: mylooplabels -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/loop-example-workspaces.yaml b/tekton-catalog/pipeline-loops/examples/loop-example-workspaces.yaml deleted file mode 100644 index 7e9a38bed5..0000000000 --- a/tekton-catalog/pipeline-loops/examples/loop-example-workspaces.yaml +++ /dev/null @@ -1,132 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: recipe-storage -data: - recipe: | - 1. dry roast split green gram 1 cup - 2. turn it into a fine powder somehow - 3. While it is still warm add jaggery powder 1 cup - 4. In a seperate pan, heat clarified butter to molten consistency - 5. Mix all the items, and add only as much ghee that mixture can be bound into sphere ball - 6. Tasty and healthy (great source of protein/energy) sunnandulu laddus are ready ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: shared-task-storage -spec: - resources: - requests: - storage: 16Mi - volumeMode: Filesystem - accessModes: - - ReadWriteOnce ---- -apiVersion: tekton.dev/v1 -kind: Task -metadata: - name: print-data -spec: - workspaces: - - name: storage - readOnly: true - params: - - name: filename - steps: - - name: print-recipe - image: ubuntu - script: cat $(workspaces.storage.path)/$(params.filename) ---- -apiVersion: tekton.dev/v1 -kind: Task -metadata: - name: fetch-store-data -spec: - workspaces: - - name: recipe-ws - - name: storage - params: - - name: src-filename - - name: dst-filename - steps: - - name: fetch-store-recipe-data - image: ubuntu - script: | - cp $(workspaces.recipe-ws.path)/$(params.src-filename) $(workspaces.storage.path)/$(params.dst-filename) ---- -apiVersion: tekton.dev/v1 -kind: Pipeline -metadata: - name: fetch-and-print-recipe -spec: - params: - - name: dst-filename - type: string - workspaces: - - name: shared-data - - name: recipe-store - tasks: - - name: fetch-the-recipe - taskRef: - name: fetch-store-data - params: - - name: src-filename - value: recipe.txt - - name: dst-filename - value: $(params.dst-filename) - workspaces: - - name: recipe-ws - workspace: recipe-store - - name: storage - workspace: shared-data - - name: print-the-recipe - runAfter: - - fetch-the-recipe - taskRef: - name: print-data - params: - - name: filename - value: $(params.dst-filename) - workspaces: - - name: storage - workspace: shared-data ---- -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop-ws-example -spec: - pipelineRef: - name: fetch-and-print-recipe - workspaces: - - name: recipe-store - configMap: - name: recipe-storage - items: - - key: recipe - path: recipe.txt - - name: shared-data - persistentVolumeClaim: - claimName: shared-task-storage - iterateParam: dst-filename ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - name: pr-ws-loop-example -spec: - pipelineSpec: - tasks: - - name: produce-recipies - retries: 3 - params: - - name: dst-filename - value: - - first-recipe.txt - - second-recipe.txt - - third-recipe.txt - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop-ws-example diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop-embedded.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop-embedded.yaml deleted file mode 100644 index 74d0a6c21e..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop-embedded.yaml +++ /dev/null @@ -1,96 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - name: pr-loop-example - labels: - mylooplabels: mylooplabels -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskSpec: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - spec: - iterateParam: message - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - - name: loop-task-inner - runAfter: - - echo-loop-task - params: - - name: message - value: - - I am the inner first one - - I am the inner second one - taskSpec: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - spec: - iterateParam: message - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop.yaml deleted file mode 100644 index df1879ecf7..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-nested-loop.yaml +++ /dev/null @@ -1,110 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop-inner -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - iterateParam: message ---- -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - - name: loop-task-inner - runAfter: - - echo-loop-task - params: - - name: message - value: - - I am the inner first one - - I am the inner second one - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop-inner - iterateParam: message ---- -apiVersion: tekton.dev/v1 -kind: PipelineRun -metadata: - annotations: - tekton.dev/example-loop-pipeline: '{"spec":{"pipelineSpec":{"params":[{"name":"message","type":"string"}],"tasks":[{"name":"echo-loop-task","params":[{"name":"message","value":"$(params.message)"}],"taskSpec":{"params":[{"name":"message","type":"string"}],"steps":[{"name":"echo","image":"ubuntu","imagePullPolicy":"IfNotPresent","script":"#!/usr/bin/env bash\necho \"$(params.message)\"\n"}]}}]},"iterateParam":"message"}}' - name: pr-loop-example - labels: - mylooplabels: mylooplabels -spec: - pipelineSpec: - tasks: - - name: first-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the first task before the loop task" - - name: loop-task - runAfter: - - first-task - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - taskRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop - - name: last-task - runAfter: - - loop-task - taskSpec: - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "I am the last task after the loop task" diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-arrary-value.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-arrary-value.yaml deleted file mode 100644 index 52455d7dc7..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-arrary-value.yaml +++ /dev/null @@ -1,47 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: pipelineloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-loop-task - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - iterateParam: message ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - labels: - custom.tekton.dev/pipelineLoop: pipelineloop - tekton.dev/pipeline: pr-loop-example - tekton.dev/pipelineRun: pr-loop-example - tekton.dev/pipelineTask: loop-task - name: pr-loop-example-loop-9w87k -spec: - params: - - name: message - value: - - I am the first one - - I am the second one - - I am the third one - customRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: pipelineloop diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-condition.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-condition.yaml deleted file mode 100644 index ffc6a192a7..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-condition.yaml +++ /dev/null @@ -1,74 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: conditionloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: task1 - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - results: - - name: output - steps: - - name: main - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - printf "$(params.message)" > $(results.output.path) - - name: task-ok - runAfter: - - task1 - when: - - input: "$(tasks.task1.results.output)" - operator: in - values: ["B"] - taskSpec: - steps: - - name: main - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/bin/sh - echo "job start to run..." - - name: task-fail - runAfter: - - task1 - when: - - input: "$(tasks.task1.results.output)" - operator: in - values: ["A"] - taskSpec: - steps: - - name: main - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/bin/sh - echo "continue loop" - iterateParam: message - ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - name: simplepipelinelooprun - labels: - last-loop-task: "task-fail" -spec: - params: - - name: message - value: '["A", "B", "A"]' - customRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: conditionloop diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-dict-value.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-dict-value.yaml deleted file mode 100644 index 4642c43c4a..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-dict-value.yaml +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: echoloop -spec: - pipelineSpec: - params: - - name: message-subvar-a - type: string - - name: message-subvar-b - type: string - tasks: - - name: echo-good-morning - params: - - name: message-subvar-a - value: $(params.message-subvar-a) - - name: message-subvar-b - value: $(params.message-subvar-b) - taskSpec: - params: - - name: message-subvar-a - type: string - - name: message-subvar-b - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "subvar-a: $(params.message-subvar-a)" - echo "subvar-b: $(params.message-subvar-b)" - iterateParam: message - ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - name: simplepipelinelooprun -spec: - params: - - name: message - value: '[{"a":"1", "b":"2"}, {"a":"2", "b":"1"}]' - customRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: echoloop - diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-iterate-numeric.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-iterate-numeric.yaml deleted file mode 100644 index d9332bfcc1..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-iterate-numeric.yaml +++ /dev/null @@ -1,45 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: echoloop -spec: - pipelineSpec: - params: - - name: iteration-item - type: string - tasks: - - name: echo-good-morning - params: - - name: iteration-item - value: $(params.iteration-item) - taskSpec: - params: - - name: iteration-item - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.iteration-item)" - iterateNumeric: iteration-item - ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - name: simplepipelinelooprun -spec: - params: - - name: from - value: '10' - - name: step - value: '1' - - name: to - value: '15' - customRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: echoloop - diff --git a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-string-value.yaml b/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-string-value.yaml deleted file mode 100644 index 4e57c460e3..0000000000 --- a/tekton-catalog/pipeline-loops/examples/pipelinespec-with-run-string-value.yaml +++ /dev/null @@ -1,41 +0,0 @@ -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: echoloop -spec: - pipelineSpec: - params: - - name: message - type: string - tasks: - - name: echo-good-morning - params: - - name: message - value: $(params.message) - taskSpec: - params: - - name: message - type: string - steps: - - name: echo - image: ubuntu - imagePullPolicy: IfNotPresent - script: | - #!/usr/bin/env bash - echo "$(params.message)" - iterateParam: message - ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - name: simplepipelinelooprun -spec: - params: - - name: message - value: '["I am the first one", "I am the second one", "I am the third one"]' - customRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: echoloop - diff --git a/tekton-catalog/pipeline-loops/examples/simplepipelineloop.yaml b/tekton-catalog/pipeline-loops/examples/simplepipelineloop.yaml deleted file mode 100644 index d963aec6d5..0000000000 --- a/tekton-catalog/pipeline-loops/examples/simplepipelineloop.yaml +++ /dev/null @@ -1,82 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: Task -metadata: - name: simpletask -spec: - params: - - name: word - type: string - - name: suffix - type: string - steps: - - name: echo - image: ubuntu - script: | - #!/usr/bin/env bash - echo "$(params.word)$(params.suffix)" ---- -apiVersion: tekton.dev/v1 -kind: Pipeline -metadata: - name: demo-pipeline -spec: - params: - - name: word - default: "word" - - name: suffix - default: "suffix" - tasks: - - name: simpletask - taskRef: - name: simpletask - params: - - name: word - value: $(params.word) - - name: suffix - value: $(params.suffix) - -# --- -# apiVersion: tekton.dev/v1beta1 -# kind: PipelineRun -# metadata: -# name: demo-pipeline-run-1 -# spec: -# params: -# - name: word -# value: "word" -# - name: suffix -# value: "suffix" -# pipelineRef: -# name: demo-pipeline - ---- -apiVersion: custom.tekton.dev/v1alpha1 -kind: PipelineLoop -metadata: - name: simpletaskloop -spec: - # Task to run (inline taskSpec also works) - pipelineRef: - name: demo-pipeline - # Parameter that contains the values to iterate - iterateParam: word - # Timeout (defaults to global default timeout, usually 1h00m; use "0" for no timeout) - timeout: 60s ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - name: simpletasklooprun -spec: - params: - - name: word - value: - - jump - - land - - roll - - name: suffix - value: ing - customRef: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - name: simpletaskloop diff --git a/tekton-catalog/pipeline-loops/examples/simplepipelineloop_taskspec.yaml b/tekton-catalog/pipeline-loops/examples/simplepipelineloop_taskspec.yaml deleted file mode 100644 index 6624fa7ac2..0000000000 --- a/tekton-catalog/pipeline-loops/examples/simplepipelineloop_taskspec.yaml +++ /dev/null @@ -1,58 +0,0 @@ -apiVersion: tekton.dev/v1 -kind: Task -metadata: - name: simpletask -spec: - params: - - name: word - type: string - - name: suffix - type: string - steps: - - name: echo - image: ubuntu - script: | - #!/usr/bin/env bash - echo "$(params.word)$(params.suffix)" ---- -apiVersion: tekton.dev/v1 -kind: Pipeline -metadata: - name: demo-pipeline -spec: - params: - - name: word - default: "wordi" - - name: suffix - default: "suffixi" - tasks: - - name: simplepipelinetask - taskRef: - name: simpletask - params: - - name: word - value: $(params.word) - - name: suffix - value: $(params.suffix) ---- -apiVersion: tekton.dev/v1beta1 -kind: CustomRun -metadata: - name: simpletasklooprun02 -spec: - params: - - name: word - value: - - jump - - land - - roll - - name: suffix - value: ing - customSpec: - apiVersion: custom.tekton.dev/v1alpha1 - kind: PipelineLoop - spec: - pipelineRef: - name: demo-pipeline - iterateParam: word - timeout: 60s diff --git a/tekton-catalog/pipeline-loops/go.mod b/tekton-catalog/pipeline-loops/go.mod deleted file mode 100644 index 4a49cffaa6..0000000000 --- a/tekton-catalog/pipeline-loops/go.mod +++ /dev/null @@ -1,28 +0,0 @@ -module github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops - -go 1.13 - -require ( - github.com/cenkalti/backoff/v4 v4.1.3 - github.com/google/go-cmp v0.5.9 - github.com/hashicorp/go-multierror v1.1.1 - github.com/kubeflow/kfp-tekton/tekton-catalog/cache v0.0.0 - github.com/kubeflow/kfp-tekton/tekton-catalog/objectstore v0.0.0 - github.com/tektoncd/pipeline v0.50.1 - go.uber.org/zap v1.24.0 - gomodules.xyz/jsonpatch/v2 v2.2.0 - k8s.io/api v0.27.1 - k8s.io/apimachinery v0.27.1 - k8s.io/client-go v0.27.1 - k8s.io/utils v0.0.0-20230505201702-9f6742963106 - knative.dev/pkg v0.0.0-20230418073056-dfad48eaa5d0 -) - -replace ( - github.com/kubeflow/kfp-tekton/tekton-catalog/cache => ../cache/ - github.com/kubeflow/kfp-tekton/tekton-catalog/objectstore => ../objectstore/ - k8s.io/api => k8s.io/api v0.25.9 - k8s.io/apimachinery => k8s.io/apimachinery v0.26.5 - k8s.io/client-go => k8s.io/client-go v0.25.9 - k8s.io/code-generator => k8s.io/code-generator v0.25.9 -) diff --git a/tekton-catalog/pipeline-loops/hack/boilerplate/boilerplate.go.txt b/tekton-catalog/pipeline-loops/hack/boilerplate/boilerplate.go.txt deleted file mode 100644 index 6f818683bd..0000000000 --- a/tekton-catalog/pipeline-loops/hack/boilerplate/boilerplate.go.txt +++ /dev/null @@ -1,15 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ diff --git a/tekton-catalog/pipeline-loops/hack/tools.go b/tekton-catalog/pipeline-loops/hack/tools.go deleted file mode 100644 index eae239b181..0000000000 --- a/tekton-catalog/pipeline-loops/hack/tools.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build tools - -package tools - -import ( - _ "knative.dev/pkg/hack" -) diff --git a/tekton-catalog/pipeline-loops/hack/update-codegen.sh b/tekton-catalog/pipeline-loops/hack/update-codegen.sh deleted file mode 100755 index 6551ef16fe..0000000000 --- a/tekton-catalog/pipeline-loops/hack/update-codegen.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2020 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -export GO111MODULE=on - -REPO_ROOT=$(dirname ${BASH_SOURCE})/.. -CODEGEN_PKG=${CODEGEN_PKG:-$(cd ${REPO_ROOT}; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)} - -KNATIVE_CODEGEN_PKG=${KNATIVE_CODEGEN_PKG:-$(cd ${REPO_ROOT}; ls -d -1 ./vendor/knative.dev/pkg 2>/dev/null || echo ../pkg)} - -# generate the code with: -# --output-base because this script should also be able to run inside the vendor dir of -# k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir -# instead of the $GOPATH directly. For normal projects this can be dropped. -chmod +x ${CODEGEN_PKG}/generate-groups.sh -${CODEGEN_PKG}/generate-groups.sh "deepcopy,client,informer,lister" \ - github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis \ - "pipelineloop:v1alpha1" \ - --go-header-file ${REPO_ROOT}/hack/boilerplate/boilerplate.go.txt - -# Knative Injection - -chmod +x ${KNATIVE_CODEGEN_PKG}/hack/generate-knative.sh -${KNATIVE_CODEGEN_PKG}/hack/generate-knative.sh "injection" \ - github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis \ - "pipelineloop:v1alpha1" \ - --go-header-file ${REPO_ROOT}/hack/boilerplate/boilerplate.go.txt - -# Make sure our dependencies are up-to-date -${REPO_ROOT}/hack/update-deps.sh diff --git a/tekton-catalog/pipeline-loops/hack/update-deps.sh b/tekton-catalog/pipeline-loops/hack/update-deps.sh deleted file mode 100755 index 08270dd13d..0000000000 --- a/tekton-catalog/pipeline-loops/hack/update-deps.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2019 The Knative Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -readonly ROOT_DIR=$(dirname $0)/.. -source ${ROOT_DIR}/vendor/knative.dev/test-infra/scripts/library.sh - -set -o errexit -set -o nounset -set -o pipefail - -export GO111MODULE=on -export GOFLAGS=-mod=vendor - -cd ${ROOT_DIR} - -VERSION="release-0.18" - -# The list of dependencies that we track at HEAD and periodically -# float forward in this repository. -FLOATING_DEPS=( - "knative.dev/pkg@${VERSION}" - "knative.dev/eventing@${VERSION}" - "knative.dev/test-infra@${VERSION}" -) - -# Parse flags to determine any we should pass to dep. -GO_GET=0 -while [[ $# -ne 0 ]]; do - parameter=$1 - case ${parameter} in - --upgrade) GO_GET=1 ;; - *) abort "unknown option ${parameter}" ;; - esac - shift -done -readonly GO_GET - -if (( GO_GET )); then - go get -d ${FLOATING_DEPS[@]} -fi - -# Prune modules. -go mod tidy -go mod vendor - -rm -rf $(find vendor/ -name 'OWNERS') -rm -rf $(find vendor/ -name '*_test.go') diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/controller.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/controller.go deleted file mode 100644 index eb8bf57a14..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/controller.go +++ /dev/null @@ -1,23 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package pipelineloop - -const ( - // PipelineLoopControllerName holds the name of the TaskRun controller - PipelineLoopControllerName = "PipelineLoop" - BreakTaskName = "BreakTask" -) diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/register.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/register.go deleted file mode 100644 index 523e20f5b9..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/register.go +++ /dev/null @@ -1,22 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package pipelineloop - -const ( - // GroupName is the Kubernetes resource group name for PipelineLoop types. - GroupName = "custom.tekton.dev" -) diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/doc.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/doc.go deleted file mode 100644 index 1ac17cb4b7..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/doc.go +++ /dev/null @@ -1,22 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package v1alpha1 contains API Schema definitions for the pipelineloop v1alpha1 API group -// +k8s:openapi-gen=true -// +k8s:deepcopy-gen=package,register -// +k8s:defaulter-gen=TypeMeta -// +groupName=custom.tekton.dev -package v1alpha1 diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_defaults.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_defaults.go deleted file mode 100644 index 9cbe32e539..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_defaults.go +++ /dev/null @@ -1,51 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha1 - -import ( - "context" - "os" - "strconv" - - "knative.dev/pkg/apis" -) - -var _ apis.Defaultable = (*PipelineLoop)(nil) - -// SetDefaults set any defaults for the PipelineLoop -func (tl *PipelineLoop) SetDefaults(ctx context.Context) { - tl.Spec.SetDefaults(ctx) -} - -// SetDefaults set any defaults for the PipelineLoop spec -func (tls *PipelineLoopSpec) SetDefaults(ctx context.Context) { - if tls.Parallelism == 0 { - parallelism := os.Getenv("LOOP_PARALLELISM") - if parallelism == "" { - tls.Parallelism = 1 - return - } - i, err := strconv.Atoi(parallelism) - if err != nil { - // fall back to default 1 - tls.Parallelism = 1 - return - } else { - tls.Parallelism = i - } - } -} diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_types.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_types.go deleted file mode 100644 index 4dd691ed3c..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_types.go +++ /dev/null @@ -1,167 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha1 - -import ( - "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" -) - -// +genclient -// +genclient:noStatus -// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object - -// PipelineLoop iteratively executes a Task over elements in an array. -// +k8s:openapi-gen=true -type PipelineLoop struct { - metav1.TypeMeta `json:",inline"` - // +optional - metav1.ObjectMeta `json:"metadata"` - - // Spec holds the desired state of the PipelineLoop from the client - // +optional - Spec PipelineLoopSpec `json:"spec"` -} - -// PipelineLoopSpec defines the desired state of the PipelineLoop -type PipelineLoopSpec struct { - // TaskRef is a reference to a task definition. - // +optional - // TaskRef *tektonv1.TaskRef `json:"taskRef,omitempty"` - PipelineRef *tektonv1.PipelineRef `json:"pipelineRef,omitempty"` - - // TaskSpec is a specification of a task - // +optional - PipelineSpec *tektonv1.PipelineSpec `json:"pipelineSpec,omitempty"` - - // IterateParam is the name of the task parameter that is iterated upon. - IterateParam string `json:"iterateParam"` - - // The separator for IterateParam if the IterateParam is a strings with separator char, this field is optional. - // +optional - IterateParamSeparator string `json:"iterateParamStringSeparator,omitempty"` - - // +optional - IterationNumberParam string `json:"iterationNumberParam,omitempty"` - - IterateNumeric string `json:"iterateNumeric"` - - // Time after which the TaskRun times out. - // +optional - Timeout *metav1.Duration `json:"timeout,omitempty"` - - // Parallelism represents how many pipelines can be triggered simultaneously by the loop. - // +optional - Parallelism int `json:"parallelism,omitempty"` - - // Retries represents how many times a task should be retried in case of task failure. - // +optional - Retries int `json:"retries,omitempty"` - - // PodTemplate holds pod specific configuration - // +optional - PodTemplate *pod.PodTemplate `json:"podTemplate,omitempty"` - - // +optional - ServiceAccountName string `json:"serviceAccountName,omitempty"` - - // Workspace to a volume mapping to be consumed by a PipelineRun. - // +optional - Workspaces []tektonv1.WorkspaceBinding `json:"workspaces,omitempty"` - - // TaskRunSpecs holds a set of runtime specs - // +optional - TaskRunSpecs []tektonv1.PipelineTaskRunSpec `json:"taskRunSpecs,omitempty"` -} - -// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object - -// PipelineLoopList contains a list of PipelineLoops -type PipelineLoopList struct { - metav1.TypeMeta `json:",inline"` - // +optional - metav1.ListMeta `json:"metadata,omitempty"` - Items []PipelineLoop `json:"items"` -} - -// PipelineLoopRunReason represents a reason for the Run "Succeeded" condition -type PipelineLoopRunReason string - -const ( - // PipelineLoopRunReasonStarted is the reason set when the Run has just started - PipelineLoopRunReasonStarted PipelineLoopRunReason = "Started" - - // PipelineLoopRunReasonCacheHit indicates that the Run result was fetched from cache instead of performing an actual run. - PipelineLoopRunReasonCacheHit PipelineLoopRunReason = "CacheHit" - - // PipelineLoopRunReasonRunning indicates that the Run is in progress - PipelineLoopRunReasonRunning PipelineLoopRunReason = "Running" - - // PipelineLoopRunReasonFailed indicates that one of the TaskRuns created from the Run failed - PipelineLoopRunReasonFailed PipelineLoopRunReason = "Failed" - - // PipelineLoopRunReasonRecursionLimitExceeded indicates that one of the TaskRuns created from the Run has exceeded stack limit - PipelineLoopRunReasonStackLimitExceeded PipelineLoopRunReason = "NestedCallStackLimitExceeded" - - // PipelineLoopRunReasonSucceeded indicates that all of the TaskRuns created from the Run completed successfully - PipelineLoopRunReasonSucceeded PipelineLoopRunReason = "Succeeded" - - // PipelineLoopRunReasonCancelled indicates that a Run was cancelled. - PipelineLoopRunReasonCancelled PipelineLoopRunReason = "PipelineLoopRunCancelled" - - // PipelineLoopRunReasonCouldntCancel indicates that a Run was cancelled but attempting to update - // the running TaskRun as cancelled failed. - PipelineLoopRunReasonCouldntCancel PipelineLoopRunReason = "PipelineLoopRunCouldntCancel" - - // PipelineLoopRunReasonCouldntGetPipelineLoop indicates that the associated PipelineLoop couldn't be retrieved - PipelineLoopRunReasonCouldntGetPipelineLoop PipelineLoopRunReason = "CouldntGetPipelineLoop" - - // PipelineLoopRunReasonFailedValidation indicates that the PipelineLoop failed runtime validation - PipelineLoopRunReasonFailedValidation PipelineLoopRunReason = "PipelineLoopValidationFailed" - - // PipelineLoopRunReasonInternalError indicates that the PipelineLoop failed due to an internal error in the reconciler - PipelineLoopRunReasonInternalError PipelineLoopRunReason = "PipelineLoopInternalError" -) - -func (t PipelineLoopRunReason) String() string { - return string(t) -} - -// PipelineLoopRunStatus contains the status stored in the ExtraFields of a Run that references a PipelineLoop. -type PipelineLoopRunStatus struct { - // PipelineLoopSpec contains the exact spec used to instantiate the Run - PipelineLoopSpec *PipelineLoopSpec `json:"pipelineLoopSpec,omitempty"` - // current running pipelinerun number - // +optional - CurrentRunning int `json:"currentRunning,omitempty"` - // map of PipelineLoopPipelineRunStatus with the PipelineRun name as the key - // +optional - PipelineRuns map[string]*PipelineLoopPipelineRunStatus `json:"pipelineRuns,omitempty"` -} - -// PipelineLoopPipelineRunStatus contains the iteration number for a PipelineRun, -// current running pipeline number, and the PipelineRun's Status -type PipelineLoopPipelineRunStatus struct { - // iteration number - Iteration int `json:"iteration,omitempty"` - // the current iteration item - IterationItem interface{} `json:"iterationItem,omitempty"` - // Status is the TaskRunStatus for the corresponding TaskRun - // +optional - Status *tektonv1.PipelineRunStatus `json:"status,omitempty"` -} diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation.go deleted file mode 100644 index 4db3132a37..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation.go +++ /dev/null @@ -1,72 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha1 - -import ( - "context" - "strings" - - "github.com/tektoncd/pipeline/pkg/apis/validate" - "k8s.io/apimachinery/pkg/util/validation" - "knative.dev/pkg/apis" -) - -var _ apis.Validatable = (*PipelineLoop)(nil) - -// Validate PipelineLoop -func (tl *PipelineLoop) Validate(ctx context.Context) *apis.FieldError { - if err := validate.ObjectMetadata(tl.GetObjectMeta()); err != nil { - return err.ViaField("metadata") - } - return tl.Spec.Validate(ctx) -} - -// Validate PipelineLoopSpec -func (tls *PipelineLoopSpec) Validate(ctx context.Context) *apis.FieldError { - if tls.Parallelism < 1 { - return apis.ErrInvalidValue(tls.Parallelism, "spec.Parallelism") - } - // Validate Task reference or inline task spec. - if err := validateTask(ctx, tls); err != nil { - return err - } - return nil -} - -func validateTask(ctx context.Context, tls *PipelineLoopSpec) *apis.FieldError { - // pipelineRef and taskSpec are mutually exclusive. - if (tls.PipelineRef != nil && tls.PipelineRef.Name != "") && tls.PipelineSpec != nil { - return apis.ErrMultipleOneOf("spec.pipelineRef", "spec.pipelineSpec") - } - // Check that one of pipelineRef and taskSpec is present. - if (tls.PipelineRef == nil || tls.PipelineRef.Name == "") && tls.PipelineSpec == nil { - return apis.ErrMissingOneOf("spec.pipelineRef", "spec.pipelineSpec") - } - // Validate PipelineSpec if it's present - if tls.PipelineSpec != nil { - if err := tls.PipelineSpec.Validate(ctx); err != nil { - return err.ViaField("spec.pipelineSpec") - } - } - if tls.PipelineRef != nil && tls.PipelineRef.Name != "" { - // pipelineRef name must be a valid k8s name - if errSlice := validation.IsQualifiedName(tls.PipelineRef.Name); len(errSlice) != 0 { - return apis.ErrInvalidValue(strings.Join(errSlice, ","), "spec.pipelineRef.name") - } - } - return nil -} diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation_test.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation_test.go deleted file mode 100644 index 39023382bb..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/pipelineloop_validation_test.go +++ /dev/null @@ -1,210 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha1_test - -import ( - "context" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - "github.com/tektoncd/pipeline/test/diff" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "knative.dev/pkg/apis" -) - -func TestPipelineLoop_Validate_Success(t *testing.T) { - tests := []struct { - name string - tl *pipelineloopv1alpha1.PipelineLoop - }{{ - name: "pipelineRef", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "mypipeline"}, - }, - }, - }, { - name: "pipelineSpecWithoutParam", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - IterateParam: "messages", - PipelineSpec: &tektonv1.PipelineSpec{ - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, - }, - }, - }, { - name: "pipelineSpecWithParams", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - IterateParam: "messages", - PipelineSpec: &tektonv1.PipelineSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "messages", - Type: tektonv1.ParamTypeString, - }, { - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }}, - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - Params: []tektonv1.Param{{ - Name: "messages", - Value: tektonv1.ParamValue{}, - }, { - Name: "additional-parameter", - Value: tektonv1.ParamValue{}, - }}, - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "messages", - Type: tektonv1.ParamTypeString, - }, { - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }}, - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, - }, - }, - }} - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - tc.tl.SetDefaults(ctx) - err := tc.tl.Validate(ctx) - if err != nil { - t.Errorf("Unexpected error for %s: %s", tc.name, err) - } - }) - } -} - -func TestPipelineLoop_Validate_Error(t *testing.T) { - tests := []struct { - name string - tl *pipelineloopv1alpha1.PipelineLoop - expectedError apis.FieldError - }{{ - name: "no pipelineRef or pipelineSpec", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{}, - }, - expectedError: apis.FieldError{ - Message: "expected exactly one, got neither", - Paths: []string{"spec.pipelineRef", "spec.pipelineSpec"}, - }, - }, { - name: "both pipelineRef and pipelineSpec", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "mypipeline"}, - PipelineSpec: &tektonv1.PipelineSpec{ - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, - }, - }, - expectedError: apis.FieldError{ - Message: "expected exactly one, got both", - Paths: []string{"spec.pipelineRef", "spec.pipelineSpec"}, - }, - }, { - name: "invalid pipelineRef", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "_bad"}, - }, - }, - expectedError: apis.FieldError{ - Message: "invalid value: name part must consist of alphanumeric characters, '-', '_' or '.', and must start " + - "and end with an alphanumeric character (e.g. 'MyName', or 'my.name', or '123-abc', regex used for " + - "validation is '([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]')", - Paths: []string{"spec.pipelineRef.name"}, - }, - }, { - name: "invalid pipelineSpec", - tl: &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "pipelineloop"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineSpec: &tektonv1.PipelineSpec{ - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Steps: []tektonv1.Step{{ - Name: "bad@name!", Image: "bar", - }}, - }, - }, - }}, - }, - }, - }, - expectedError: apis.FieldError{ - Message: `invalid value "bad@name!"`, - Details: "Task step name must be a valid DNS Label, For more info refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", - Paths: []string{"spec.pipelineSpec.tasks[0].taskSpec.steps[0].name"}, - }, - }} - for _, tc := range tests { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - tc.tl.SetDefaults(ctx) - err := tc.tl.Validate(ctx) - if err == nil { - t.Errorf("Expected an Error but did not get one for %s", tc.name) - } else { - if d := cmp.Diff(tc.expectedError.Error(), err.Error(), cmpopts.IgnoreUnexported(apis.FieldError{})); d != "" { - t.Errorf("Error is different from expected for %s. diff %s", tc.name, diff.PrintWantGot(d)) - } - } - }) - } -} diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/register.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/register.go deleted file mode 100644 index c16f7a23cb..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/register.go +++ /dev/null @@ -1,54 +0,0 @@ -/* -Copyright 2019 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package v1alpha1 - -import ( - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/apimachinery/pkg/runtime/schema" -) - -// SchemeGroupVersion is group version used to register these objects -var SchemeGroupVersion = schema.GroupVersion{Group: pipelineloop.GroupName, Version: "v1alpha1"} - -// Kind takes an unqualified kind and returns back a Group qualified GroupKind -func Kind(kind string) schema.GroupKind { - return SchemeGroupVersion.WithKind(kind).GroupKind() -} - -// Resource takes an unqualified resource and returns a Group qualified GroupResource -func Resource(resource string) schema.GroupResource { - return SchemeGroupVersion.WithResource(resource).GroupResource() -} - -var ( - schemeBuilder = runtime.NewSchemeBuilder(addKnownTypes) - - // AddToScheme adds Build types to the scheme. - AddToScheme = schemeBuilder.AddToScheme -) - -// Adds the list of known types to Scheme. -func addKnownTypes(scheme *runtime.Scheme) error { - scheme.AddKnownTypes(SchemeGroupVersion, - &PipelineLoop{}, - &PipelineLoopList{}, - ) - metav1.AddToGroupVersion(scheme, SchemeGroupVersion) - return nil -} diff --git a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/zz_generated.deepcopy.go b/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/zz_generated.deepcopy.go deleted file mode 100644 index 80e07a11ca..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1/zz_generated.deepcopy.go +++ /dev/null @@ -1,176 +0,0 @@ -//go:build !ignore_autogenerated -// +build !ignore_autogenerated - -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by deepcopy-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - runtime "k8s.io/apimachinery/pkg/runtime" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineLoop) DeepCopyInto(out *PipelineLoop) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineLoop. -func (in *PipelineLoop) DeepCopy() *PipelineLoop { - if in == nil { - return nil - } - out := new(PipelineLoop) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *PipelineLoop) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineLoopList) DeepCopyInto(out *PipelineLoopList) { - *out = *in - out.TypeMeta = in.TypeMeta - in.ListMeta.DeepCopyInto(&out.ListMeta) - if in.Items != nil { - in, out := &in.Items, &out.Items - *out = make([]PipelineLoop, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineLoopList. -func (in *PipelineLoopList) DeepCopy() *PipelineLoopList { - if in == nil { - return nil - } - out := new(PipelineLoopList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. -func (in *PipelineLoopList) DeepCopyObject() runtime.Object { - if c := in.DeepCopy(); c != nil { - return c - } - return nil -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineLoopPipelineRunStatus) DeepCopyInto(out *PipelineLoopPipelineRunStatus) { - *out = *in - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(tektonv1.PipelineRunStatus) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineLoopPipelineRunStatus. -func (in *PipelineLoopPipelineRunStatus) DeepCopy() *PipelineLoopPipelineRunStatus { - if in == nil { - return nil - } - out := new(PipelineLoopPipelineRunStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineLoopRunStatus) DeepCopyInto(out *PipelineLoopRunStatus) { - *out = *in - if in.PipelineLoopSpec != nil { - in, out := &in.PipelineLoopSpec, &out.PipelineLoopSpec - *out = new(PipelineLoopSpec) - (*in).DeepCopyInto(*out) - } - if in.PipelineRuns != nil { - in, out := &in.PipelineRuns, &out.PipelineRuns - *out = make(map[string]*PipelineLoopPipelineRunStatus, len(*in)) - for key, val := range *in { - var outVal *PipelineLoopPipelineRunStatus - if val == nil { - (*out)[key] = nil - } else { - in, out := &val, &outVal - *out = new(PipelineLoopPipelineRunStatus) - (*in).DeepCopyInto(*out) - } - (*out)[key] = outVal - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineLoopRunStatus. -func (in *PipelineLoopRunStatus) DeepCopy() *PipelineLoopRunStatus { - if in == nil { - return nil - } - out := new(PipelineLoopRunStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineLoopSpec) DeepCopyInto(out *PipelineLoopSpec) { - *out = *in - if in.PipelineRef != nil { - in, out := &in.PipelineRef, &out.PipelineRef - *out = new(tektonv1.PipelineRef) - **out = **in - } - if in.PipelineSpec != nil { - in, out := &in.PipelineSpec, &out.PipelineSpec - *out = new(tektonv1.PipelineSpec) - (*in).DeepCopyInto(*out) - } - if in.Timeout != nil { - in, out := &in.Timeout, &out.Timeout - *out = new(v1.Duration) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineLoopSpec. -func (in *PipelineLoopSpec) DeepCopy() *PipelineLoopSpec { - if in == nil { - return nil - } - out := new(PipelineLoopSpec) - in.DeepCopyInto(out) - return out -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/clientset.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/clientset.go deleted file mode 100644 index 5c5968c7cc..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/clientset.go +++ /dev/null @@ -1,97 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package versioned - -import ( - "fmt" - - customv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1" - discovery "k8s.io/client-go/discovery" - rest "k8s.io/client-go/rest" - flowcontrol "k8s.io/client-go/util/flowcontrol" -) - -type Interface interface { - Discovery() discovery.DiscoveryInterface - CustomV1alpha1() customv1alpha1.CustomV1alpha1Interface -} - -// Clientset contains the clients for groups. Each group has exactly one -// version included in a Clientset. -type Clientset struct { - *discovery.DiscoveryClient - customV1alpha1 *customv1alpha1.CustomV1alpha1Client -} - -// CustomV1alpha1 retrieves the CustomV1alpha1Client -func (c *Clientset) CustomV1alpha1() customv1alpha1.CustomV1alpha1Interface { - return c.customV1alpha1 -} - -// Discovery retrieves the DiscoveryClient -func (c *Clientset) Discovery() discovery.DiscoveryInterface { - if c == nil { - return nil - } - return c.DiscoveryClient -} - -// NewForConfig creates a new Clientset for the given config. -// If config's RateLimiter is not set and QPS and Burst are acceptable, -// NewForConfig will generate a rate-limiter in configShallowCopy. -func NewForConfig(c *rest.Config) (*Clientset, error) { - configShallowCopy := *c - if configShallowCopy.RateLimiter == nil && configShallowCopy.QPS > 0 { - if configShallowCopy.Burst <= 0 { - return nil, fmt.Errorf("burst is required to be greater than 0 when RateLimiter is not set and QPS is set to greater than 0") - } - configShallowCopy.RateLimiter = flowcontrol.NewTokenBucketRateLimiter(configShallowCopy.QPS, configShallowCopy.Burst) - } - var cs Clientset - var err error - cs.customV1alpha1, err = customv1alpha1.NewForConfig(&configShallowCopy) - if err != nil { - return nil, err - } - - cs.DiscoveryClient, err = discovery.NewDiscoveryClientForConfig(&configShallowCopy) - if err != nil { - return nil, err - } - return &cs, nil -} - -// NewForConfigOrDie creates a new Clientset for the given config and -// panics if there is an error in the config. -func NewForConfigOrDie(c *rest.Config) *Clientset { - var cs Clientset - cs.customV1alpha1 = customv1alpha1.NewForConfigOrDie(c) - - cs.DiscoveryClient = discovery.NewDiscoveryClientForConfigOrDie(c) - return &cs -} - -// New creates a new Clientset for the given RESTClient. -func New(c rest.Interface) *Clientset { - var cs Clientset - cs.customV1alpha1 = customv1alpha1.New(c) - - cs.DiscoveryClient = discovery.NewDiscoveryClient(c) - return &cs -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/doc.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/doc.go deleted file mode 100644 index e48c2aa446..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/doc.go +++ /dev/null @@ -1,20 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -// This package has the automatically generated clientset. -package versioned diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/clientset_generated.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/clientset_generated.go deleted file mode 100644 index ed6f401b6b..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/clientset_generated.go +++ /dev/null @@ -1,82 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package fake - -import ( - clientset "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned" - customv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1" - fakecustomv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake" - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/apimachinery/pkg/watch" - "k8s.io/client-go/discovery" - fakediscovery "k8s.io/client-go/discovery/fake" - "k8s.io/client-go/testing" -) - -// NewSimpleClientset returns a clientset that will respond with the provided objects. -// It's backed by a very simple object tracker that processes creates, updates and deletions as-is, -// without applying any validations and/or defaults. It shouldn't be considered a replacement -// for a real clientset and is mostly useful in simple unit tests. -func NewSimpleClientset(objects ...runtime.Object) *Clientset { - o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder()) - for _, obj := range objects { - if err := o.Add(obj); err != nil { - panic(err) - } - } - - cs := &Clientset{tracker: o} - cs.discovery = &fakediscovery.FakeDiscovery{Fake: &cs.Fake} - cs.AddReactor("*", "*", testing.ObjectReaction(o)) - cs.AddWatchReactor("*", func(action testing.Action) (handled bool, ret watch.Interface, err error) { - gvr := action.GetResource() - ns := action.GetNamespace() - watch, err := o.Watch(gvr, ns) - if err != nil { - return false, nil, err - } - return true, watch, nil - }) - - return cs -} - -// Clientset implements clientset.Interface. Meant to be embedded into a -// struct to get a default implementation. This makes faking out just the method -// you want to test easier. -type Clientset struct { - testing.Fake - discovery *fakediscovery.FakeDiscovery - tracker testing.ObjectTracker -} - -func (c *Clientset) Discovery() discovery.DiscoveryInterface { - return c.discovery -} - -func (c *Clientset) Tracker() testing.ObjectTracker { - return c.tracker -} - -var _ clientset.Interface = &Clientset{} - -// CustomV1alpha1 retrieves the CustomV1alpha1Client -func (c *Clientset) CustomV1alpha1() customv1alpha1.CustomV1alpha1Interface { - return &fakecustomv1alpha1.FakeCustomV1alpha1{Fake: &c.Fake} -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/doc.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/doc.go deleted file mode 100644 index 2c4903250c..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/doc.go +++ /dev/null @@ -1,20 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -// This package has the automatically generated fake clientset. -package fake diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/register.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/register.go deleted file mode 100644 index e7a48f782a..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake/register.go +++ /dev/null @@ -1,56 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package fake - -import ( - customv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - runtime "k8s.io/apimachinery/pkg/runtime" - schema "k8s.io/apimachinery/pkg/runtime/schema" - serializer "k8s.io/apimachinery/pkg/runtime/serializer" - utilruntime "k8s.io/apimachinery/pkg/util/runtime" -) - -var scheme = runtime.NewScheme() -var codecs = serializer.NewCodecFactory(scheme) -var parameterCodec = runtime.NewParameterCodec(scheme) -var localSchemeBuilder = runtime.SchemeBuilder{ - customv1alpha1.AddToScheme, -} - -// AddToScheme adds all types of this clientset into the given scheme. This allows composition -// of clientsets, like in: -// -// import ( -// "k8s.io/client-go/kubernetes" -// clientsetscheme "k8s.io/client-go/kubernetes/scheme" -// aggregatorclientsetscheme "k8s.io/kube-aggregator/pkg/client/clientset_generated/clientset/scheme" -// ) -// -// kclientset, _ := kubernetes.NewForConfig(c) -// _ = aggregatorclientsetscheme.AddToScheme(clientsetscheme.Scheme) -// -// After this, RawExtensions in Kubernetes types will serialize kube-aggregator types -// correctly. -var AddToScheme = localSchemeBuilder.AddToScheme - -func init() { - v1.AddToGroupVersion(scheme, schema.GroupVersion{Version: "v1"}) - utilruntime.Must(AddToScheme(scheme)) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/doc.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/doc.go deleted file mode 100644 index 7acc2dcf25..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/doc.go +++ /dev/null @@ -1,20 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -// This package contains the scheme of the automatically generated clientset. -package scheme diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/register.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/register.go deleted file mode 100644 index 1b4288da47..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme/register.go +++ /dev/null @@ -1,56 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package scheme - -import ( - customv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - runtime "k8s.io/apimachinery/pkg/runtime" - schema "k8s.io/apimachinery/pkg/runtime/schema" - serializer "k8s.io/apimachinery/pkg/runtime/serializer" - utilruntime "k8s.io/apimachinery/pkg/util/runtime" -) - -var Scheme = runtime.NewScheme() -var Codecs = serializer.NewCodecFactory(Scheme) -var ParameterCodec = runtime.NewParameterCodec(Scheme) -var localSchemeBuilder = runtime.SchemeBuilder{ - customv1alpha1.AddToScheme, -} - -// AddToScheme adds all types of this clientset into the given scheme. This allows composition -// of clientsets, like in: -// -// import ( -// "k8s.io/client-go/kubernetes" -// clientsetscheme "k8s.io/client-go/kubernetes/scheme" -// aggregatorclientsetscheme "k8s.io/kube-aggregator/pkg/client/clientset_generated/clientset/scheme" -// ) -// -// kclientset, _ := kubernetes.NewForConfig(c) -// _ = aggregatorclientsetscheme.AddToScheme(clientsetscheme.Scheme) -// -// After this, RawExtensions in Kubernetes types will serialize kube-aggregator types -// correctly. -var AddToScheme = localSchemeBuilder.AddToScheme - -func init() { - v1.AddToGroupVersion(Scheme, schema.GroupVersion{Version: "v1"}) - utilruntime.Must(AddToScheme(Scheme)) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/doc.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/doc.go deleted file mode 100644 index 41e872fe9a..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/doc.go +++ /dev/null @@ -1,20 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -// This package has the automatically generated typed clients. -package v1alpha1 diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/doc.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/doc.go deleted file mode 100644 index c7f6e65cab..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/doc.go +++ /dev/null @@ -1,20 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -// Package fake has the automatically generated clients. -package fake diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop.go deleted file mode 100644 index c288d8407a..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop.go +++ /dev/null @@ -1,130 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package fake - -import ( - "context" - - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - labels "k8s.io/apimachinery/pkg/labels" - schema "k8s.io/apimachinery/pkg/runtime/schema" - types "k8s.io/apimachinery/pkg/types" - watch "k8s.io/apimachinery/pkg/watch" - testing "k8s.io/client-go/testing" -) - -// FakePipelineLoops implements PipelineLoopInterface -type FakePipelineLoops struct { - Fake *FakeCustomV1alpha1 - ns string -} - -var pipelineloopsResource = schema.GroupVersionResource{Group: "custom.tekton.dev", Version: "v1alpha1", Resource: "pipelineloops"} - -var pipelineloopsKind = schema.GroupVersionKind{Group: "custom.tekton.dev", Version: "v1alpha1", Kind: "PipelineLoop"} - -// Get takes name of the pipelineLoop, and returns the corresponding pipelineLoop object, and an error if there is any. -func (c *FakePipelineLoops) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.PipelineLoop, err error) { - obj, err := c.Fake. - Invokes(testing.NewGetAction(pipelineloopsResource, c.ns, name), &v1alpha1.PipelineLoop{}) - - if obj == nil { - return nil, err - } - return obj.(*v1alpha1.PipelineLoop), err -} - -// List takes label and field selectors, and returns the list of PipelineLoops that match those selectors. -func (c *FakePipelineLoops) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.PipelineLoopList, err error) { - obj, err := c.Fake. - Invokes(testing.NewListAction(pipelineloopsResource, pipelineloopsKind, c.ns, opts), &v1alpha1.PipelineLoopList{}) - - if obj == nil { - return nil, err - } - - label, _, _ := testing.ExtractFromListOptions(opts) - if label == nil { - label = labels.Everything() - } - list := &v1alpha1.PipelineLoopList{ListMeta: obj.(*v1alpha1.PipelineLoopList).ListMeta} - for _, item := range obj.(*v1alpha1.PipelineLoopList).Items { - if label.Matches(labels.Set(item.Labels)) { - list.Items = append(list.Items, item) - } - } - return list, err -} - -// Watch returns a watch.Interface that watches the requested pipelineLoops. -func (c *FakePipelineLoops) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return c.Fake. - InvokesWatch(testing.NewWatchAction(pipelineloopsResource, c.ns, opts)) - -} - -// Create takes the representation of a pipelineLoop and creates it. Returns the server's representation of the pipelineLoop, and an error, if there is any. -func (c *FakePipelineLoops) Create(ctx context.Context, pipelineLoop *v1alpha1.PipelineLoop, opts v1.CreateOptions) (result *v1alpha1.PipelineLoop, err error) { - obj, err := c.Fake. - Invokes(testing.NewCreateAction(pipelineloopsResource, c.ns, pipelineLoop), &v1alpha1.PipelineLoop{}) - - if obj == nil { - return nil, err - } - return obj.(*v1alpha1.PipelineLoop), err -} - -// Update takes the representation of a pipelineLoop and updates it. Returns the server's representation of the pipelineLoop, and an error, if there is any. -func (c *FakePipelineLoops) Update(ctx context.Context, pipelineLoop *v1alpha1.PipelineLoop, opts v1.UpdateOptions) (result *v1alpha1.PipelineLoop, err error) { - obj, err := c.Fake. - Invokes(testing.NewUpdateAction(pipelineloopsResource, c.ns, pipelineLoop), &v1alpha1.PipelineLoop{}) - - if obj == nil { - return nil, err - } - return obj.(*v1alpha1.PipelineLoop), err -} - -// Delete takes name of the pipelineLoop and deletes it. Returns an error if one occurs. -func (c *FakePipelineLoops) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - _, err := c.Fake. - Invokes(testing.NewDeleteAction(pipelineloopsResource, c.ns, name), &v1alpha1.PipelineLoop{}) - - return err -} - -// DeleteCollection deletes a collection of objects. -func (c *FakePipelineLoops) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - action := testing.NewDeleteCollectionAction(pipelineloopsResource, c.ns, listOpts) - - _, err := c.Fake.Invokes(action, &v1alpha1.PipelineLoopList{}) - return err -} - -// Patch applies the patch and returns the patched pipelineLoop. -func (c *FakePipelineLoops) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.PipelineLoop, err error) { - obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(pipelineloopsResource, c.ns, name, pt, data, subresources...), &v1alpha1.PipelineLoop{}) - - if obj == nil { - return nil, err - } - return obj.(*v1alpha1.PipelineLoop), err -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop_client.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop_client.go deleted file mode 100644 index 5e70b8b5e7..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/fake/fake_pipelineloop_client.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package fake - -import ( - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1" - rest "k8s.io/client-go/rest" - testing "k8s.io/client-go/testing" -) - -type FakeCustomV1alpha1 struct { - *testing.Fake -} - -func (c *FakeCustomV1alpha1) PipelineLoops(namespace string) v1alpha1.PipelineLoopInterface { - return &FakePipelineLoops{c, namespace} -} - -// RESTClient returns a RESTClient that is used to communicate -// with API server by this client implementation. -func (c *FakeCustomV1alpha1) RESTClient() rest.Interface { - var ret *rest.RESTClient - return ret -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/generated_expansion.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/generated_expansion.go deleted file mode 100644 index 4209c417dd..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/generated_expansion.go +++ /dev/null @@ -1,21 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package v1alpha1 - -type PipelineLoopExpansion interface{} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop.go deleted file mode 100644 index da61135e63..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop.go +++ /dev/null @@ -1,178 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - "context" - "time" - - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - scheme "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - types "k8s.io/apimachinery/pkg/types" - watch "k8s.io/apimachinery/pkg/watch" - rest "k8s.io/client-go/rest" -) - -// PipelineLoopsGetter has a method to return a PipelineLoopInterface. -// A group's client should implement this interface. -type PipelineLoopsGetter interface { - PipelineLoops(namespace string) PipelineLoopInterface -} - -// PipelineLoopInterface has methods to work with PipelineLoop resources. -type PipelineLoopInterface interface { - Create(ctx context.Context, pipelineLoop *v1alpha1.PipelineLoop, opts v1.CreateOptions) (*v1alpha1.PipelineLoop, error) - Update(ctx context.Context, pipelineLoop *v1alpha1.PipelineLoop, opts v1.UpdateOptions) (*v1alpha1.PipelineLoop, error) - Delete(ctx context.Context, name string, opts v1.DeleteOptions) error - DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error - Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.PipelineLoop, error) - List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.PipelineLoopList, error) - Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) - Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.PipelineLoop, err error) - PipelineLoopExpansion -} - -// pipelineLoops implements PipelineLoopInterface -type pipelineLoops struct { - client rest.Interface - ns string -} - -// newPipelineLoops returns a PipelineLoops -func newPipelineLoops(c *CustomV1alpha1Client, namespace string) *pipelineLoops { - return &pipelineLoops{ - client: c.RESTClient(), - ns: namespace, - } -} - -// Get takes name of the pipelineLoop, and returns the corresponding pipelineLoop object, and an error if there is any. -func (c *pipelineLoops) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.PipelineLoop, err error) { - result = &v1alpha1.PipelineLoop{} - err = c.client.Get(). - Namespace(c.ns). - Resource("pipelineloops"). - Name(name). - VersionedParams(&options, scheme.ParameterCodec). - Do(ctx). - Into(result) - return -} - -// List takes label and field selectors, and returns the list of PipelineLoops that match those selectors. -func (c *pipelineLoops) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.PipelineLoopList, err error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - result = &v1alpha1.PipelineLoopList{} - err = c.client.Get(). - Namespace(c.ns). - Resource("pipelineloops"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Do(ctx). - Into(result) - return -} - -// Watch returns a watch.Interface that watches the requested pipelineLoops. -func (c *pipelineLoops) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - var timeout time.Duration - if opts.TimeoutSeconds != nil { - timeout = time.Duration(*opts.TimeoutSeconds) * time.Second - } - opts.Watch = true - return c.client.Get(). - Namespace(c.ns). - Resource("pipelineloops"). - VersionedParams(&opts, scheme.ParameterCodec). - Timeout(timeout). - Watch(ctx) -} - -// Create takes the representation of a pipelineLoop and creates it. Returns the server's representation of the pipelineLoop, and an error, if there is any. -func (c *pipelineLoops) Create(ctx context.Context, pipelineLoop *v1alpha1.PipelineLoop, opts v1.CreateOptions) (result *v1alpha1.PipelineLoop, err error) { - result = &v1alpha1.PipelineLoop{} - err = c.client.Post(). - Namespace(c.ns). - Resource("pipelineloops"). - VersionedParams(&opts, scheme.ParameterCodec). - Body(pipelineLoop). - Do(ctx). - Into(result) - return -} - -// Update takes the representation of a pipelineLoop and updates it. Returns the server's representation of the pipelineLoop, and an error, if there is any. -func (c *pipelineLoops) Update(ctx context.Context, pipelineLoop *v1alpha1.PipelineLoop, opts v1.UpdateOptions) (result *v1alpha1.PipelineLoop, err error) { - result = &v1alpha1.PipelineLoop{} - err = c.client.Put(). - Namespace(c.ns). - Resource("pipelineloops"). - Name(pipelineLoop.Name). - VersionedParams(&opts, scheme.ParameterCodec). - Body(pipelineLoop). - Do(ctx). - Into(result) - return -} - -// Delete takes name of the pipelineLoop and deletes it. Returns an error if one occurs. -func (c *pipelineLoops) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return c.client.Delete(). - Namespace(c.ns). - Resource("pipelineloops"). - Name(name). - Body(&opts). - Do(ctx). - Error() -} - -// DeleteCollection deletes a collection of objects. -func (c *pipelineLoops) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - var timeout time.Duration - if listOpts.TimeoutSeconds != nil { - timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second - } - return c.client.Delete(). - Namespace(c.ns). - Resource("pipelineloops"). - VersionedParams(&listOpts, scheme.ParameterCodec). - Timeout(timeout). - Body(&opts). - Do(ctx). - Error() -} - -// Patch applies the patch and returns the patched pipelineLoop. -func (c *pipelineLoops) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.PipelineLoop, err error) { - result = &v1alpha1.PipelineLoop{} - err = c.client.Patch(pt). - Namespace(c.ns). - Resource("pipelineloops"). - Name(name). - SubResource(subresources...). - VersionedParams(&opts, scheme.ParameterCodec). - Body(data). - Do(ctx). - Into(result) - return -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop_client.go b/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop_client.go deleted file mode 100644 index fce0deead6..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/typed/pipelineloop/v1alpha1/pipelineloop_client.go +++ /dev/null @@ -1,89 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by client-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/scheme" - rest "k8s.io/client-go/rest" -) - -type CustomV1alpha1Interface interface { - RESTClient() rest.Interface - PipelineLoopsGetter -} - -// CustomV1alpha1Client is used to interact with features provided by the custom.tekton.dev group. -type CustomV1alpha1Client struct { - restClient rest.Interface -} - -func (c *CustomV1alpha1Client) PipelineLoops(namespace string) PipelineLoopInterface { - return newPipelineLoops(c, namespace) -} - -// NewForConfig creates a new CustomV1alpha1Client for the given config. -func NewForConfig(c *rest.Config) (*CustomV1alpha1Client, error) { - config := *c - if err := setConfigDefaults(&config); err != nil { - return nil, err - } - client, err := rest.RESTClientFor(&config) - if err != nil { - return nil, err - } - return &CustomV1alpha1Client{client}, nil -} - -// NewForConfigOrDie creates a new CustomV1alpha1Client for the given config and -// panics if there is an error in the config. -func NewForConfigOrDie(c *rest.Config) *CustomV1alpha1Client { - client, err := NewForConfig(c) - if err != nil { - panic(err) - } - return client -} - -// New creates a new CustomV1alpha1Client for the given RESTClient. -func New(c rest.Interface) *CustomV1alpha1Client { - return &CustomV1alpha1Client{c} -} - -func setConfigDefaults(config *rest.Config) error { - gv := v1alpha1.SchemeGroupVersion - config.GroupVersion = &gv - config.APIPath = "/apis" - config.NegotiatedSerializer = scheme.Codecs.WithoutConversion() - - if config.UserAgent == "" { - config.UserAgent = rest.DefaultKubernetesUserAgent() - } - - return nil -} - -// RESTClient returns a RESTClient that is used to communicate -// with API server by this client implementation. -func (c *CustomV1alpha1Client) RESTClient() rest.Interface { - if c == nil { - return nil - } - return c.restClient -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/factory.go b/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/factory.go deleted file mode 100644 index 678a7a7b97..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/factory.go +++ /dev/null @@ -1,180 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by informer-gen. DO NOT EDIT. - -package externalversions - -import ( - reflect "reflect" - sync "sync" - time "time" - - versioned "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned" - internalinterfaces "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces" - pipelineloop "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - runtime "k8s.io/apimachinery/pkg/runtime" - schema "k8s.io/apimachinery/pkg/runtime/schema" - cache "k8s.io/client-go/tools/cache" -) - -// SharedInformerOption defines the functional option type for SharedInformerFactory. -type SharedInformerOption func(*sharedInformerFactory) *sharedInformerFactory - -type sharedInformerFactory struct { - client versioned.Interface - namespace string - tweakListOptions internalinterfaces.TweakListOptionsFunc - lock sync.Mutex - defaultResync time.Duration - customResync map[reflect.Type]time.Duration - - informers map[reflect.Type]cache.SharedIndexInformer - // startedInformers is used for tracking which informers have been started. - // This allows Start() to be called multiple times safely. - startedInformers map[reflect.Type]bool -} - -// WithCustomResyncConfig sets a custom resync period for the specified informer types. -func WithCustomResyncConfig(resyncConfig map[v1.Object]time.Duration) SharedInformerOption { - return func(factory *sharedInformerFactory) *sharedInformerFactory { - for k, v := range resyncConfig { - factory.customResync[reflect.TypeOf(k)] = v - } - return factory - } -} - -// WithTweakListOptions sets a custom filter on all listers of the configured SharedInformerFactory. -func WithTweakListOptions(tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerOption { - return func(factory *sharedInformerFactory) *sharedInformerFactory { - factory.tweakListOptions = tweakListOptions - return factory - } -} - -// WithNamespace limits the SharedInformerFactory to the specified namespace. -func WithNamespace(namespace string) SharedInformerOption { - return func(factory *sharedInformerFactory) *sharedInformerFactory { - factory.namespace = namespace - return factory - } -} - -// NewSharedInformerFactory constructs a new instance of sharedInformerFactory for all namespaces. -func NewSharedInformerFactory(client versioned.Interface, defaultResync time.Duration) SharedInformerFactory { - return NewSharedInformerFactoryWithOptions(client, defaultResync) -} - -// NewFilteredSharedInformerFactory constructs a new instance of sharedInformerFactory. -// Listers obtained via this SharedInformerFactory will be subject to the same filters -// as specified here. -// Deprecated: Please use NewSharedInformerFactoryWithOptions instead -func NewFilteredSharedInformerFactory(client versioned.Interface, defaultResync time.Duration, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) SharedInformerFactory { - return NewSharedInformerFactoryWithOptions(client, defaultResync, WithNamespace(namespace), WithTweakListOptions(tweakListOptions)) -} - -// NewSharedInformerFactoryWithOptions constructs a new instance of a SharedInformerFactory with additional options. -func NewSharedInformerFactoryWithOptions(client versioned.Interface, defaultResync time.Duration, options ...SharedInformerOption) SharedInformerFactory { - factory := &sharedInformerFactory{ - client: client, - namespace: v1.NamespaceAll, - defaultResync: defaultResync, - informers: make(map[reflect.Type]cache.SharedIndexInformer), - startedInformers: make(map[reflect.Type]bool), - customResync: make(map[reflect.Type]time.Duration), - } - - // Apply all options - for _, opt := range options { - factory = opt(factory) - } - - return factory -} - -// Start initializes all requested informers. -func (f *sharedInformerFactory) Start(stopCh <-chan struct{}) { - f.lock.Lock() - defer f.lock.Unlock() - - for informerType, informer := range f.informers { - if !f.startedInformers[informerType] { - go informer.Run(stopCh) - f.startedInformers[informerType] = true - } - } -} - -// WaitForCacheSync waits for all started informers' cache were synced. -func (f *sharedInformerFactory) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool { - informers := func() map[reflect.Type]cache.SharedIndexInformer { - f.lock.Lock() - defer f.lock.Unlock() - - informers := map[reflect.Type]cache.SharedIndexInformer{} - for informerType, informer := range f.informers { - if f.startedInformers[informerType] { - informers[informerType] = informer - } - } - return informers - }() - - res := map[reflect.Type]bool{} - for informType, informer := range informers { - res[informType] = cache.WaitForCacheSync(stopCh, informer.HasSynced) - } - return res -} - -// InternalInformerFor returns the SharedIndexInformer for obj using an internal -// client. -func (f *sharedInformerFactory) InformerFor(obj runtime.Object, newFunc internalinterfaces.NewInformerFunc) cache.SharedIndexInformer { - f.lock.Lock() - defer f.lock.Unlock() - - informerType := reflect.TypeOf(obj) - informer, exists := f.informers[informerType] - if exists { - return informer - } - - resyncPeriod, exists := f.customResync[informerType] - if !exists { - resyncPeriod = f.defaultResync - } - - informer = newFunc(f.client, resyncPeriod) - f.informers[informerType] = informer - - return informer -} - -// SharedInformerFactory provides shared informers for resources in all known -// API group versions. -type SharedInformerFactory interface { - internalinterfaces.SharedInformerFactory - ForResource(resource schema.GroupVersionResource) (GenericInformer, error) - WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool - - Custom() pipelineloop.Interface -} - -func (f *sharedInformerFactory) Custom() pipelineloop.Interface { - return pipelineloop.New(f, f.namespace, f.tweakListOptions) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/generic.go b/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/generic.go deleted file mode 100644 index 9c237c5e4c..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/generic.go +++ /dev/null @@ -1,62 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by informer-gen. DO NOT EDIT. - -package externalversions - -import ( - "fmt" - - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - schema "k8s.io/apimachinery/pkg/runtime/schema" - cache "k8s.io/client-go/tools/cache" -) - -// GenericInformer is type of SharedIndexInformer which will locate and delegate to other -// sharedInformers based on type -type GenericInformer interface { - Informer() cache.SharedIndexInformer - Lister() cache.GenericLister -} - -type genericInformer struct { - informer cache.SharedIndexInformer - resource schema.GroupResource -} - -// Informer returns the SharedIndexInformer. -func (f *genericInformer) Informer() cache.SharedIndexInformer { - return f.informer -} - -// Lister returns the GenericLister. -func (f *genericInformer) Lister() cache.GenericLister { - return cache.NewGenericLister(f.Informer().GetIndexer(), f.resource) -} - -// ForResource gives generic access to a shared informer of the matching type -// TODO extend this to unknown resources with a client pool -func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource) (GenericInformer, error) { - switch resource { - // Group=custom.tekton.dev, Version=v1alpha1 - case v1alpha1.SchemeGroupVersion.WithResource("pipelineloops"): - return &genericInformer{resource: resource.GroupResource(), informer: f.Custom().V1alpha1().PipelineLoops().Informer()}, nil - - } - - return nil, fmt.Errorf("no informer found for %v", resource) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces/factory_interfaces.go b/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces/factory_interfaces.go deleted file mode 100644 index 22748e3b5f..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces/factory_interfaces.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by informer-gen. DO NOT EDIT. - -package internalinterfaces - -import ( - time "time" - - versioned "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - runtime "k8s.io/apimachinery/pkg/runtime" - cache "k8s.io/client-go/tools/cache" -) - -// NewInformerFunc takes versioned.Interface and time.Duration to return a SharedIndexInformer. -type NewInformerFunc func(versioned.Interface, time.Duration) cache.SharedIndexInformer - -// SharedInformerFactory a small interface to allow for adding an informer without an import cycle -type SharedInformerFactory interface { - Start(stopCh <-chan struct{}) - InformerFor(obj runtime.Object, newFunc NewInformerFunc) cache.SharedIndexInformer -} - -// TweakListOptionsFunc is a function that transforms a v1.ListOptions. -type TweakListOptionsFunc func(*v1.ListOptions) diff --git a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/interface.go b/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/interface.go deleted file mode 100644 index 7545c329e1..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/interface.go +++ /dev/null @@ -1,46 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by informer-gen. DO NOT EDIT. - -package pipelineloop - -import ( - internalinterfaces "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces" - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1" -) - -// Interface provides access to each of this group's versions. -type Interface interface { - // V1alpha1 provides access to shared informers for resources in V1alpha1. - V1alpha1() v1alpha1.Interface -} - -type group struct { - factory internalinterfaces.SharedInformerFactory - namespace string - tweakListOptions internalinterfaces.TweakListOptionsFunc -} - -// New returns a new Interface. -func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface { - return &group{factory: f, namespace: namespace, tweakListOptions: tweakListOptions} -} - -// V1alpha1 returns a new v1alpha1.Interface. -func (g *group) V1alpha1() v1alpha1.Interface { - return v1alpha1.New(g.factory, g.namespace, g.tweakListOptions) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/interface.go b/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/interface.go deleted file mode 100644 index c7feca5d16..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/interface.go +++ /dev/null @@ -1,45 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by informer-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - internalinterfaces "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces" -) - -// Interface provides access to all the informers in this group version. -type Interface interface { - // PipelineLoops returns a PipelineLoopInformer. - PipelineLoops() PipelineLoopInformer -} - -type version struct { - factory internalinterfaces.SharedInformerFactory - namespace string - tweakListOptions internalinterfaces.TweakListOptionsFunc -} - -// New returns a new Interface. -func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface { - return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions} -} - -// PipelineLoops returns a PipelineLoopInformer. -func (v *version) PipelineLoops() PipelineLoopInformer { - return &pipelineLoopInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/pipelineloop.go b/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/pipelineloop.go deleted file mode 100644 index 9d15d597d5..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1/pipelineloop.go +++ /dev/null @@ -1,90 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by informer-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - "context" - time "time" - - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - versioned "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned" - internalinterfaces "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/internalinterfaces" - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - runtime "k8s.io/apimachinery/pkg/runtime" - watch "k8s.io/apimachinery/pkg/watch" - cache "k8s.io/client-go/tools/cache" -) - -// PipelineLoopInformer provides access to a shared informer and lister for -// PipelineLoops. -type PipelineLoopInformer interface { - Informer() cache.SharedIndexInformer - Lister() v1alpha1.PipelineLoopLister -} - -type pipelineLoopInformer struct { - factory internalinterfaces.SharedInformerFactory - tweakListOptions internalinterfaces.TweakListOptionsFunc - namespace string -} - -// NewPipelineLoopInformer constructs a new informer for PipelineLoop type. -// Always prefer using an informer factory to get a shared informer instead of getting an independent -// one. This reduces memory footprint and number of connections to the server. -func NewPipelineLoopInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { - return NewFilteredPipelineLoopInformer(client, namespace, resyncPeriod, indexers, nil) -} - -// NewFilteredPipelineLoopInformer constructs a new informer for PipelineLoop type. -// Always prefer using an informer factory to get a shared informer instead of getting an independent -// one. This reduces memory footprint and number of connections to the server. -func NewFilteredPipelineLoopInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { - return cache.NewSharedIndexInformer( - &cache.ListWatch{ - ListFunc: func(options v1.ListOptions) (runtime.Object, error) { - if tweakListOptions != nil { - tweakListOptions(&options) - } - return client.CustomV1alpha1().PipelineLoops(namespace).List(context.TODO(), options) - }, - WatchFunc: func(options v1.ListOptions) (watch.Interface, error) { - if tweakListOptions != nil { - tweakListOptions(&options) - } - return client.CustomV1alpha1().PipelineLoops(namespace).Watch(context.TODO(), options) - }, - }, - &pipelineloopv1alpha1.PipelineLoop{}, - resyncPeriod, - indexers, - ) -} - -func (f *pipelineLoopInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { - return NewFilteredPipelineLoopInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) -} - -func (f *pipelineLoopInformer) Informer() cache.SharedIndexInformer { - return f.factory.InformerFor(&pipelineloopv1alpha1.PipelineLoop{}, f.defaultInformer) -} - -func (f *pipelineLoopInformer) Lister() v1alpha1.PipelineLoopLister { - return v1alpha1.NewPipelineLoopLister(f.Informer().GetIndexer()) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/injection/client/client.go b/tekton-catalog/pipeline-loops/pkg/client/injection/client/client.go deleted file mode 100644 index c395d6358a..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/injection/client/client.go +++ /dev/null @@ -1,49 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package client - -import ( - context "context" - - versioned "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned" - rest "k8s.io/client-go/rest" - injection "knative.dev/pkg/injection" - logging "knative.dev/pkg/logging" -) - -func init() { - injection.Default.RegisterClient(withClient) -} - -// Key is used as the key for associating information with a context.Context. -type Key struct{} - -func withClient(ctx context.Context, cfg *rest.Config) context.Context { - return context.WithValue(ctx, Key{}, versioned.NewForConfigOrDie(cfg)) -} - -// Get extracts the versioned.Interface client from the context. -func Get(ctx context.Context) versioned.Interface { - untyped := ctx.Value(Key{}) - if untyped == nil { - logging.FromContext(ctx).Panic( - "Unable to fetch github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned.Interface from context.") - } - return untyped.(versioned.Interface) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/injection/client/fake/fake.go b/tekton-catalog/pipeline-loops/pkg/client/injection/client/fake/fake.go deleted file mode 100644 index e145d76a0c..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/injection/client/fake/fake.go +++ /dev/null @@ -1,54 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - fake "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake" - client "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/client" - runtime "k8s.io/apimachinery/pkg/runtime" - rest "k8s.io/client-go/rest" - injection "knative.dev/pkg/injection" - logging "knative.dev/pkg/logging" -) - -func init() { - injection.Fake.RegisterClient(withClient) -} - -func withClient(ctx context.Context, cfg *rest.Config) context.Context { - ctx, _ = With(ctx) - return ctx -} - -func With(ctx context.Context, objects ...runtime.Object) (context.Context, *fake.Clientset) { - cs := fake.NewSimpleClientset(objects...) - return context.WithValue(ctx, client.Key{}, cs), cs -} - -// Get extracts the Kubernetes client from the context. -func Get(ctx context.Context) *fake.Clientset { - untyped := ctx.Value(client.Key{}) - if untyped == nil { - logging.FromContext(ctx).Panic( - "Unable to fetch github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned/fake.Clientset from context.") - } - return untyped.(*fake.Clientset) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/factory.go b/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/factory.go deleted file mode 100644 index c388f72993..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/factory.go +++ /dev/null @@ -1,56 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package factory - -import ( - context "context" - - externalversions "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions" - client "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/client" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" - logging "knative.dev/pkg/logging" -) - -func init() { - injection.Default.RegisterInformerFactory(withInformerFactory) -} - -// Key is used as the key for associating information with a context.Context. -type Key struct{} - -func withInformerFactory(ctx context.Context) context.Context { - c := client.Get(ctx) - opts := make([]externalversions.SharedInformerOption, 0, 1) - if injection.HasNamespaceScope(ctx) { - opts = append(opts, externalversions.WithNamespace(injection.GetNamespaceScope(ctx))) - } - return context.WithValue(ctx, Key{}, - externalversions.NewSharedInformerFactoryWithOptions(c, controller.GetResyncPeriod(ctx), opts...)) -} - -// Get extracts the InformerFactory from the context. -func Get(ctx context.Context) externalversions.SharedInformerFactory { - untyped := ctx.Value(Key{}) - if untyped == nil { - logging.FromContext(ctx).Panic( - "Unable to fetch github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions.SharedInformerFactory from context.") - } - return untyped.(externalversions.SharedInformerFactory) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/fake/fake.go b/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/fake/fake.go deleted file mode 100644 index 78b0275cd2..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/fake/fake.go +++ /dev/null @@ -1,45 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - externalversions "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions" - fake "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/client/fake" - factory "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" -) - -var Get = factory.Get - -func init() { - injection.Fake.RegisterInformerFactory(withInformerFactory) -} - -func withInformerFactory(ctx context.Context) context.Context { - c := fake.Get(ctx) - opts := make([]externalversions.SharedInformerOption, 0, 1) - if injection.HasNamespaceScope(ctx) { - opts = append(opts, externalversions.WithNamespace(injection.GetNamespaceScope(ctx))) - } - return context.WithValue(ctx, factory.Key{}, - externalversions.NewSharedInformerFactoryWithOptions(c, controller.GetResyncPeriod(ctx), opts...)) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/fake/fake.go b/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/fake/fake.go deleted file mode 100644 index 8215d250e2..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/fake/fake.go +++ /dev/null @@ -1,40 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package fake - -import ( - context "context" - - fake "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory/fake" - pipelineloop "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" -) - -var Get = pipelineloop.Get - -func init() { - injection.Fake.RegisterInformer(withInformer) -} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := fake.Get(ctx) - inf := f.Custom().V1alpha1().PipelineLoops() - return context.WithValue(ctx, pipelineloop.Key{}, inf), inf.Informer() -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/pipelineloop.go b/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/pipelineloop.go deleted file mode 100644 index 03ba08012e..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/pipelineloop.go +++ /dev/null @@ -1,52 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by injection-gen. DO NOT EDIT. - -package pipelineloop - -import ( - context "context" - - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1" - factory "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/informers/factory" - controller "knative.dev/pkg/controller" - injection "knative.dev/pkg/injection" - logging "knative.dev/pkg/logging" -) - -func init() { - injection.Default.RegisterInformer(withInformer) -} - -// Key is used for associating the Informer inside the context.Context. -type Key struct{} - -func withInformer(ctx context.Context) (context.Context, controller.Informer) { - f := factory.Get(ctx) - inf := f.Custom().V1alpha1().PipelineLoops() - return context.WithValue(ctx, Key{}, inf), inf.Informer() -} - -// Get extracts the typed informer from the context. -func Get(ctx context.Context) v1alpha1.PipelineLoopInformer { - untyped := ctx.Value(Key{}) - if untyped == nil { - logging.FromContext(ctx).Panic( - "Unable to fetch github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/informers/externalversions/pipelineloop/v1alpha1.PipelineLoopInformer from context.") - } - return untyped.(v1alpha1.PipelineLoopInformer) -} diff --git a/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/expansion_generated.go b/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/expansion_generated.go deleted file mode 100644 index 41cb668add..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/expansion_generated.go +++ /dev/null @@ -1,27 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by lister-gen. DO NOT EDIT. - -package v1alpha1 - -// PipelineLoopListerExpansion allows custom methods to be added to -// PipelineLoopLister. -type PipelineLoopListerExpansion interface{} - -// PipelineLoopNamespaceListerExpansion allows custom methods to be added to -// PipelineLoopNamespaceLister. -type PipelineLoopNamespaceListerExpansion interface{} diff --git a/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/pipelineloop.go b/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/pipelineloop.go deleted file mode 100644 index b56d805181..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1/pipelineloop.go +++ /dev/null @@ -1,94 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Code generated by lister-gen. DO NOT EDIT. - -package v1alpha1 - -import ( - v1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - "k8s.io/apimachinery/pkg/api/errors" - "k8s.io/apimachinery/pkg/labels" - "k8s.io/client-go/tools/cache" -) - -// PipelineLoopLister helps list PipelineLoops. -type PipelineLoopLister interface { - // List lists all PipelineLoops in the indexer. - List(selector labels.Selector) (ret []*v1alpha1.PipelineLoop, err error) - // PipelineLoops returns an object that can list and get PipelineLoops. - PipelineLoops(namespace string) PipelineLoopNamespaceLister - PipelineLoopListerExpansion -} - -// pipelineLoopLister implements the PipelineLoopLister interface. -type pipelineLoopLister struct { - indexer cache.Indexer -} - -// NewPipelineLoopLister returns a new PipelineLoopLister. -func NewPipelineLoopLister(indexer cache.Indexer) PipelineLoopLister { - return &pipelineLoopLister{indexer: indexer} -} - -// List lists all PipelineLoops in the indexer. -func (s *pipelineLoopLister) List(selector labels.Selector) (ret []*v1alpha1.PipelineLoop, err error) { - err = cache.ListAll(s.indexer, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.PipelineLoop)) - }) - return ret, err -} - -// PipelineLoops returns an object that can list and get PipelineLoops. -func (s *pipelineLoopLister) PipelineLoops(namespace string) PipelineLoopNamespaceLister { - return pipelineLoopNamespaceLister{indexer: s.indexer, namespace: namespace} -} - -// PipelineLoopNamespaceLister helps list and get PipelineLoops. -type PipelineLoopNamespaceLister interface { - // List lists all PipelineLoops in the indexer for a given namespace. - List(selector labels.Selector) (ret []*v1alpha1.PipelineLoop, err error) - // Get retrieves the PipelineLoop from the indexer for a given namespace and name. - Get(name string) (*v1alpha1.PipelineLoop, error) - PipelineLoopNamespaceListerExpansion -} - -// pipelineLoopNamespaceLister implements the PipelineLoopNamespaceLister -// interface. -type pipelineLoopNamespaceLister struct { - indexer cache.Indexer - namespace string -} - -// List lists all PipelineLoops in the indexer for a given namespace. -func (s pipelineLoopNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.PipelineLoop, err error) { - err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { - ret = append(ret, m.(*v1alpha1.PipelineLoop)) - }) - return ret, err -} - -// Get retrieves the PipelineLoop from the indexer for a given namespace and name. -func (s pipelineLoopNamespaceLister) Get(name string) (*v1alpha1.PipelineLoop, error) { - obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) - if err != nil { - return nil, err - } - if !exists { - return nil, errors.NewNotFound(v1alpha1.Resource("pipelineloop"), name) - } - return obj.(*v1alpha1.PipelineLoop), nil -} diff --git a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/controller.go b/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/controller.go deleted file mode 100644 index fdf5c09192..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/controller.go +++ /dev/null @@ -1,230 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package pipelinelooprun - -import ( - "context" - "fmt" - "os" - "os/signal" - "strconv" - "syscall" - "time" - - backoff "github.com/cenkalti/backoff/v4" - taskCache "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg" - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/db" - cl "github.com/kubeflow/kfp-tekton/tekton-catalog/objectstore/pkg/writer" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop" - pipelineLoopV1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - pipelineLoopClient "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/client" - pipelineLoopInformer "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop" - pipelineClient "github.com/tektoncd/pipeline/pkg/client/injection/client" - pipelineRunInformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun" - customRunInformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/customrun" - customRunReconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/customrun" - pipelineController "github.com/tektoncd/pipeline/pkg/controller" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - metaV1 "k8s.io/apimachinery/pkg/apis/meta/v1" - - "k8s.io/client-go/kubernetes" - "k8s.io/client-go/tools/cache" - "k8s.io/utils/clock" - kubeClient "knative.dev/pkg/client/injection/kube/client" - "knative.dev/pkg/configmap" - "knative.dev/pkg/controller" - "knative.dev/pkg/logging" - "knative.dev/pkg/system" -) - -func loadObjectStoreConfig(ctx context.Context, kubeClientSet kubernetes.Interface, o *cl.ObjectStoreConfig) (bool, error) { - configMap, err := kubeClientSet.CoreV1().ConfigMaps(system.Namespace()). - Get(ctx, "object-store-config", metaV1.GetOptions{}) - if err != nil { - return false, err - } - var enable bool - if enable, err = strconv.ParseBool(configMap.Data["enable"]); err != nil || !enable { - return false, err - } - - o.AccessKey = configMap.Data["accessKey"] - o.SecretKey = configMap.Data["secretKey"] - o.Region = configMap.Data["region"] - o.ServiceEndpoint = configMap.Data["serviceEndpoint"] - o.DefaultBucketName = configMap.Data["defaultBucketName"] - o.CreateBucket = false - o.Token = configMap.Data["token"] - return true, nil -} - -func loadCacheConfig(ctx context.Context, kubeClientSet kubernetes.Interface, p *db.ConnectionParams) (bool, error) { - configMap, err := kubeClientSet.CoreV1().ConfigMaps(system.Namespace()).Get(ctx, - "cache-config", metaV1.GetOptions{}) - if err != nil { - return true, err - } - if configMap.Data["disabled"] == "true" { - return true, nil - } - p.DbName = configMap.Data["dbName"] - p.DbDriver = configMap.Data["driver"] - p.DbHost = configMap.Data["host"] - p.DbPort = configMap.Data["port"] - p.DbExtraParams = configMap.Data["extraParams"] - p.DbUser = configMap.Data["user"] - p.DbPwd = configMap.Data["password"] - timeout, err := time.ParseDuration(configMap.Data["timeout"]) - if err != nil { - return true, fmt.Errorf("invalid value passed for timeout: %v", err) - } - p.Timeout = timeout - return false, nil -} - -var params db.ConnectionParams - -func initCache(ctx context.Context, kubeClientSet kubernetes.Interface, params db.ConnectionParams) *taskCache.TaskCacheStore { - logger := logging.FromContext(ctx) - disabled, err := loadCacheConfig(ctx, kubeClientSet, ¶ms) - if err != nil { - logger.Errorf("ConfigMap cache-config could not be loaded. "+ - "Cache store disabled. Error : %v", err) - } - cacheStore := &taskCache.TaskCacheStore{Params: params} - if disabled { - cacheStore.Disabled = true - } - if !cacheStore.Disabled { - logger.Infof("Cache store Params: %#v", params) - b := backoff.NewExponentialBackOff() - b.MaxElapsedTime = params.Timeout - var operation = func() error { - return cacheStore.Connect() - } - err := backoff.Retry(operation, b) - if err != nil { - cacheStore.Disabled = true - logger.Errorf("Failed to connect to cache store backend, cache store disabled. err: %v", err) - } else { - logger.Infof("Cache store connected to db with Params: %#v", params) - } - } - return cacheStore -} - -func initLogger(ctx context.Context, kubeClientSet kubernetes.Interface) *zap.SugaredLogger { - var logger = logging.FromContext(ctx) - loggerConfig := cl.ObjectStoreConfig{} - objectStoreLogger := cl.Logger{ - MaxSize: 1024 * 100, // TODO make it configurable via a configmap. - } - enabled, err := loadObjectStoreConfig(ctx, kubeClientSet, &loggerConfig) - if err == nil && enabled { - err = objectStoreLogger.LoadDefaults(loggerConfig) - if err == nil { - _ = objectStoreLogger.Writer.CreateNewBucket(loggerConfig.DefaultBucketName) - } else { - logger.Errorf("error connecting to the object store, %v", err) - } - } - if err == nil && enabled { - logger.Info("Loading object store logger...") - w := zapcore.NewMultiWriteSyncer( - zapcore.AddSync(os.Stdout), - zapcore.AddSync(&objectStoreLogger), - ) - core := zapcore.NewCore( - zapcore.NewJSONEncoder(zap.NewProductionEncoderConfig()), - w, - zap.InfoLevel, - ) - logger = zap.New(core).Sugar() - logger.Info("First log msg with object store logger.") - - // set up SIGHUP to send logs to object store before shutdown. - signal.Ignore(syscall.SIGHUP, syscall.SIGTERM, syscall.SIGINT) - c := make(chan os.Signal, 3) - signal.Notify(c, syscall.SIGTERM) - signal.Notify(c, syscall.SIGINT) - signal.Notify(c, syscall.SIGHUP) - - go func() { - for { - <-c - err = objectStoreLogger.Close() - fmt.Printf("Synced with object store... %v", err) - os.Exit(0) - } - }() - } else { - logger.Errorf("Object store logging unavailable, %v ", err) - } - return logger -} - -// NewController instantiates a new controller.Impl from knative.dev/pkg/controller -func NewController(namespace string) func(context.Context, configmap.Watcher) *controller.Impl { - return func(ctx context.Context, cmw configmap.Watcher) *controller.Impl { - kubeClientSet := kubeClient.Get(ctx) - pipelineClientSet := pipelineClient.Get(ctx) - pipelineLoopClientSet := pipelineLoopClient.Get(ctx) - customRunInformer := customRunInformer.Get(ctx) - pipelineLoopInformer := pipelineLoopInformer.Get(ctx) - pipelineRunInformer := pipelineRunInformer.Get(ctx) - logger := initLogger(ctx, kubeClientSet) - ctx = logging.WithLogger(ctx, logger) - cacheStore := initCache(ctx, kubeClientSet, params) - c := &Reconciler{ - KubeClientSet: kubeClientSet, - pipelineClientSet: pipelineClientSet, - pipelineloopClientSet: pipelineLoopClientSet, - customRunLister: customRunInformer.Lister(), - pipelineLoopLister: pipelineLoopInformer.Lister(), - pipelineRunLister: pipelineRunInformer.Lister(), - cacheStore: cacheStore, - clock: clock.RealClock{}, - } - - impl := customRunReconciler.NewImpl(ctx, c, func(impl *controller.Impl) controller.Options { - return controller.Options{ - AgentName: "customrun-pipelineloop", - } - }) - - logger.Info("Setting up event handlers") - - // Add event handler for Runs of Pipelineloop - customRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ - FilterFunc: pipelineController.FilterCustomRunRef(pipelineLoopV1alpha1.SchemeGroupVersion.String(), pipelineloop.PipelineLoopControllerName), - Handler: controller.HandleAll(impl.Enqueue), - }) - // Add event handler for Runs of BreakTask - customRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ - FilterFunc: pipelineController.FilterCustomRunRef(pipelineLoopV1alpha1.SchemeGroupVersion.String(), pipelineloop.BreakTaskName), - Handler: controller.HandleAll(impl.Enqueue), - }) - // Add event handler for PipelineRuns controlled by Run - pipelineRunInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ - FilterFunc: pipelineController.FilterOwnerCustomRunRef(customRunInformer.Lister(), pipelineLoopV1alpha1.SchemeGroupVersion.String(), pipelineloop.PipelineLoopControllerName), - Handler: controller.HandleAll(impl.EnqueueControllerOf), - }) - - return impl - } -} diff --git a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelineloop_range_test.go b/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelineloop_range_test.go deleted file mode 100644 index c0aebedde6..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelineloop_range_test.go +++ /dev/null @@ -1,363 +0,0 @@ -/* -Copyright 2022 The Kubeflow Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package pipelinelooprun - -import ( - "context" - "sort" - "testing" - - "github.com/google/go-cmp/cmp" - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/test" - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - tektonv1beta1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - "github.com/tektoncd/pipeline/test/diff" - "github.com/tektoncd/pipeline/test/names" - corev1 "k8s.io/api/core/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - _ "knative.dev/pkg/system/testing" -) - -var expectedPipelineRunWithRange1 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "n-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": "-1", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "n-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "iteration", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "-1"}, - }}, - }, -} - -var expectedPipelineRunWithRange2 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "n-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": "1", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "n-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "iteration", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "1"}, - }}, - }, -} -var expectedPipelineRunWithRange3 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "n-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": "0", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "n-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "iteration", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "0"}, - }}, - }, -} - -func TestReconcilePipelineLoopRunRange(t *testing.T) { - - testcases := []struct { - name string - from string - to string - step string - expectedStatus corev1.ConditionStatus - expectedReason pipelineloopv1alpha1.PipelineLoopRunReason - expectedPipelineruns []*tektonv1.PipelineRun - expectedEvents []string - }{{ - name: "Case from = to", - from: "1", - to: "1", - step: "1", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange2}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from 0 to 0 and non zero step increment", - from: "0", - step: "1", - to: "0", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange3}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from < to and +ve step increment", - from: "-1", - step: "1", - to: "0", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange1}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from < to and step == 0", - from: "-1", - step: "0", - to: "0", - expectedStatus: corev1.ConditionFalse, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - expectedPipelineruns: []*tektonv1.PipelineRun{}, - expectedEvents: []string{"Normal Started ", "Warning Failed Cannot determine number of iterations: invalid values step: 0 found in runs"}, - }, { - name: "Case to - from < step and step > 0", - from: "1", - step: "1", - to: "-1", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange2}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case to - from < step and step > 0", - from: "0", - step: "1", - to: "-1", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange3}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from = to", - from: "-1", - step: "1", - to: "-1", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange1}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from > to and non -ve step increment", - from: "1", - step: "0", - to: "-1", - expectedStatus: corev1.ConditionFalse, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - expectedPipelineruns: []*tektonv1.PipelineRun{}, - expectedEvents: []string{"Normal Started ", "Warning Failed Cannot determine number of iterations: invalid values step: 0 found in runs"}, - }, { - name: "Case step == 0", - from: "0", - step: "0", - to: "-1", - expectedStatus: corev1.ConditionFalse, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - expectedPipelineruns: []*tektonv1.PipelineRun{}, - expectedEvents: []string{"Normal Started ", "Warning Failed Cannot determine number of iterations: invalid values step: 0 found in runs"}, - }, { - name: "Case from > to and -ve step increment", - from: "1", - step: "-1", - to: "-1", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange2}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from > to and -ve step increment", - from: "0", - step: "-1", - to: "-1", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange3}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from == 0\n", - from: "0\n", - step: "-1", - to: "-1\n", - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithRange3}, - expectedEvents: []string{"Normal Started ", "Normal Running Iterations completed: 0"}, - }, { - name: "Case from == abc\n", - from: "abc", - step: "-1", - to: "edf", - expectedStatus: corev1.ConditionFalse, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - expectedPipelineruns: []*tektonv1.PipelineRun{}, - expectedEvents: []string{"Normal Started ", "Warning Failed Cannot determine number of iterations: input \"to\" is not a number"}, - }} - - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - names.TestingSeed() - run := specifyLoopRange(tc.from, tc.to, tc.step, runPipelineLoopWithIterateNumeric) - d := test.Data{ - CustomRuns: []*tektonv1beta1.CustomRun{run}, - Pipelines: []*tektonv1.Pipeline{nPipeline}, - PipelineRuns: []*tektonv1.PipelineRun{}, - } - - testAssets, _ := getPipelineLoopController(t, d, []*pipelineloopv1alpha1.PipelineLoop{nPipelineLoop}) - c := testAssets.Controller - clients := testAssets.Clients - - if err := c.Reconciler.Reconcile(ctx, getCustomRunName(run)); err != nil { - t.Fatalf("Error reconciling: %s", err) - } - - // Fetch the updated Run - reconciledRun, err := clients.Pipeline.TektonV1beta1().CustomRuns(run.Namespace).Get(ctx, run.Name, metav1.GetOptions{}) - if err != nil { - t.Fatalf("Error getting reconciled run from fake client: %s", err) - } - - // Verify that the Run has the expected status and reason. - checkRunCondition(t, reconciledRun, tc.expectedStatus, tc.expectedReason) - - // Verify that a PipelineRun was or was not created depending on the test. - // If the number of expected PipelineRuns is greater than the original number of PipelineRuns - // then the test expects a new PipelineRun to be created. The new PipelineRun must be the - // last one in the list of expected PipelineRuns. - createdPipelineruns := getCreatedPipelinerun(t, clients) - // All the arrays and sub arrays are sorted to ensure there are no sporadic failures - // resulting from mismatch due to different ordering of items. - sort.Slice(createdPipelineruns, func(i, j int) bool { - return createdPipelineruns[i].Name < createdPipelineruns[j].Name - }) - for _, createdPipelinerun := range createdPipelineruns { - sort.Slice(createdPipelinerun.Spec.Params, func(i, j int) bool { - return createdPipelinerun.Spec.Params[i].Name < createdPipelinerun.Spec.Params[j].Name - }) - if createdPipelinerun.Spec.PipelineSpec != nil { - sort.Slice(createdPipelinerun.Spec.PipelineSpec.Params, func(i, j int) bool { - return createdPipelinerun.Spec.PipelineSpec.Params[i].Name < createdPipelinerun.Spec.PipelineSpec.Params[j].Name - }) - sort.Slice(createdPipelinerun.Spec.PipelineSpec.Tasks, func(i, j int) bool { - return createdPipelinerun.Spec.PipelineSpec.Tasks[i].Name < createdPipelinerun.Spec.PipelineSpec.Tasks[j].Name - }) - for _, t := range createdPipelinerun.Spec.PipelineSpec.Tasks { - sort.Slice(t.Params, func(i, j int) bool { - return t.Params[i].Name < t.Params[j].Name - }) - if t.TaskSpec != nil { - sort.Slice(t.TaskSpec.Params, func(i, j int) bool { - return t.TaskSpec.Params[i].Name < t.TaskSpec.Params[j].Name - }) - } - } - } - } - if len(tc.expectedPipelineruns) > 0 { - if len(createdPipelineruns) == 0 { - t.Errorf("A PipelineRun should have been created but was not") - } else { - pipelineRunsExpectedToBeCreated := make([]*tektonv1.PipelineRun, len(createdPipelineruns)) - i := 0 - for _, pr := range tc.expectedPipelineruns { - if pr.Labels["deleted"] != "True" { - pipelineRunsExpectedToBeCreated[i] = pr - i = i + 1 // skip the pr that were retried. - } - } - - if d := cmp.Diff(pipelineRunsExpectedToBeCreated, createdPipelineruns); d != "" { - t.Errorf("Expected PipelineRun was not created. Diff %s", diff.PrintWantGot(d)) - } - } - } else { - if len(createdPipelineruns) > 0 { - t.Errorf("A PipelineRun was created which was not expected") - } - } - - // Verify expected events were created. - if err := checkEvents(testAssets.Recorder, tc.name, tc.expectedEvents); err != nil { - t.Errorf(err.Error()) - } - }) - } -} diff --git a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun.go b/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun.go deleted file mode 100644 index 7e0b6b06f0..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun.go +++ /dev/null @@ -1,1248 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package pipelinelooprun - -import ( - "context" - "crypto/md5" - "encoding/json" - "fmt" - "log" - "os" - "reflect" - "strconv" - "strings" - "time" - - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/utils/clock" - - duckv1 "knative.dev/pkg/apis/duck/v1" - - "github.com/hashicorp/go-multierror" - cache "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg" - "github.com/kubeflow/kfp-tekton/tekton-catalog/cache/pkg/model" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop" - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - pipelineloopclientset "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/clientset/versioned" - listerspipelineloop "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/listers/pipelineloop/v1alpha1" - "github.com/tektoncd/pipeline/pkg/apis/config" - "github.com/tektoncd/pipeline/pkg/apis/pipeline" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - tektonv1beta1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - clientset "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - customRunReconciler "github.com/tektoncd/pipeline/pkg/client/injection/reconciler/pipeline/v1beta1/customrun" - listers "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" - listersV1beta1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1beta1" - "github.com/tektoncd/pipeline/pkg/names" - "github.com/tektoncd/pipeline/pkg/reconciler/events" - tkstatus "github.com/tektoncd/pipeline/pkg/status" - "go.uber.org/zap" - "gomodules.xyz/jsonpatch/v2" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/labels" - "k8s.io/apimachinery/pkg/runtime/schema" - "k8s.io/apimachinery/pkg/types" - "k8s.io/client-go/kubernetes" - "knative.dev/pkg/apis" - "knative.dev/pkg/logging" - pkgreconciler "knative.dev/pkg/reconciler" -) - -const ( - // pipelineLoopLabelKey is the label identifier for a PipelineLoop. This label is added to the Run and its PipelineRuns. - pipelineLoopLabelKey = "/pipelineLoop" - - // pipelineLoopRunLabelKey is the label identifier for a Run. This label is added to the Run's PipelineRuns. - pipelineLoopRunLabelKey = "/run" - - // parentPRKey is the label identifier for the original Pipelnerun who created the Run. This label is added to the Run's PipelineRuns. - parentPRKey = "/parentPipelineRun" - - // originalPRKey is the label identifier for the original Pipelnerun (first Pipelinerun) - originalPRKey = "/originalPipelineRun" - - // pipelineLoopIterationLabelKey is the label identifier for the iteration number. This label is added to the Run's PipelineRuns. - pipelineLoopIterationLabelKey = "/pipelineLoopIteration" - - // pipelineLoopCurrentIterationItemAnnotationKey is the annotation identifier for the iteration items (string array). This annotation is added to the Run's PipelineRuns. - pipelineLoopCurrentIterationItemAnnotationKey = "/pipelineLoopCurrentIterationItem" - - // LabelKeyWorkflowRunId is the label identifier a pipelinerun is managed by the Kubeflow Pipeline persistent agent. - LabelKeyWorkflowRunId = "pipeline/runid" - - DefaultNestedStackDepth = 30 - DefaultIterationLimit = 10000 - - MaxNestedStackDepthKey = "maxNestedStackDepth" - IterationLimitEnvKey = "IterationLimit" - - defaultIterationParamStrSeparator = "," -) - -// Reconciler implements controller.Reconciler for Configuration resources. -type Reconciler struct { - KubeClientSet kubernetes.Interface - pipelineClientSet clientset.Interface - pipelineloopClientSet pipelineloopclientset.Interface - customRunLister listersV1beta1.CustomRunLister - pipelineLoopLister listerspipelineloop.PipelineLoopLister - pipelineRunLister listers.PipelineRunLister - cacheStore *cache.TaskCacheStore - clock clock.RealClock -} -type CacheKey struct { - Params []tektonv1.Param `json:"params"` - PipelineLoopSpec *pipelineloopv1alpha1.PipelineLoopSpec `json:"pipelineSpec"` -} - -var ( - // Check that our Reconciler implements runreconciler.Interface - _ customRunReconciler.Interface = (*Reconciler)(nil) - cancelPatchBytes []byte - iterationLimit int = DefaultIterationLimit -) - -func init() { - var err error - patches := []jsonpatch.JsonPatchOperation{{ - Operation: "add", - Path: "/spec/status", - Value: tektonv1.PipelineRunSpecStatusCancelled, - }} - cancelPatchBytes, err = json.Marshal(patches) - if err != nil { - log.Fatalf("failed to marshal patch bytes in order to cancel: %v", err) - } - iterationLimitEnv, ok := os.LookupEnv(IterationLimitEnvKey) - if ok { - iterationLimitNum, err := strconv.Atoi(iterationLimitEnv) - if err == nil { - iterationLimit = iterationLimitNum - } - } -} - -func isCachingEnabled(run *tektonv1beta1.CustomRun) bool { - return run.ObjectMeta.Labels["pipelines.kubeflow.org/cache_enabled"] == "true" -} - -func paramConvertTo(ctx context.Context, p *tektonv1beta1.Param, sink *tektonv1.Param) { - sink.Name = p.Name - newValue := tektonv1.ParamValue{} - if p.Value.Type != "" { - newValue.Type = tektonv1.ParamType(p.Value.Type) - } else { - newValue.Type = tektonv1.ParamType(v1beta1.ParamTypeString) - } - newValue.StringVal = p.Value.StringVal - newValue.ArrayVal = p.Value.ArrayVal - newValue.ObjectVal = p.Value.ObjectVal - sink.Value = newValue -} - -func v1ParamsConversion(ctx context.Context, v1beta1Params tektonv1beta1.Params) []tektonv1.Param { - v1Params := []tektonv1.Param{} - for _, param := range v1beta1Params { - v1Param := tektonv1.Param{} - paramConvertTo(ctx, ¶m, &v1Param) - v1Params = append(v1Params, v1Param) - } - return v1Params -} - -// ReconcileKind compares the actual state with the desired, and attempts to converge the two. -// It then updates the Status block of the CustomRun resource with the current status of the resource. -func (c *Reconciler) ReconcileKind(ctx context.Context, customRun *tektonv1beta1.CustomRun) pkgreconciler.Event { - var merr error - logger := logging.FromContext(ctx) - logger.Infof("Reconciling CustomRun %s/%s at %v", customRun.Namespace, customRun.Name, time.Now()) - if customRun.Spec.CustomRef != nil && customRun.Spec.CustomSpec != nil { - logger.Errorf("CustomRun %s/%s can provide one of CustomRun.Spec.CustomRef/CustomRun.Spec.CustomSpec", customRun.Namespace, customRun.Name) - return nil - } - if customRun.Spec.CustomSpec == nil && customRun.Spec.CustomRef == nil { - logger.Errorf("CustomRun %s/%s does not provide a spec or ref.", customRun.Namespace, customRun.Name) - return nil - } - if (customRun.Spec.CustomRef != nil && customRun.Spec.CustomRef.Kind == pipelineloop.BreakTaskName) || - (customRun.Spec.CustomSpec != nil && customRun.Spec.CustomSpec.Kind == pipelineloop.BreakTaskName) { - if !customRun.IsDone() { - customRun.Status.InitializeConditions() - customRun.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded.String(), - "Break task is a dummy task.") - } - logger.Infof("Break task encountered %s", customRun.Name) - return nil - } - // Check that the CustomRun references a PipelineLoop CRD. The logic is controller.go should ensure that only this type of Run - // is reconciled this controller but it never hurts to do some bullet-proofing. - if customRun.Spec.CustomRef != nil && - (customRun.Spec.CustomRef.APIVersion != pipelineloopv1alpha1.SchemeGroupVersion.String() || - customRun.Spec.CustomRef.Kind != pipelineloop.PipelineLoopControllerName) { - logger.Errorf("Received control for a CustomRun %s/%s/%v that does not reference a PipelineLoop custom CRD ref", customRun.Namespace, customRun.Name, customRun.Spec.CustomRef) - return nil - } - - if customRun.Spec.CustomSpec != nil && - (customRun.Spec.CustomSpec.APIVersion != pipelineloopv1alpha1.SchemeGroupVersion.String() || - customRun.Spec.CustomSpec.Kind != pipelineloop.PipelineLoopControllerName) { - logger.Errorf("Received control for a CustomRun %s/%s that does not reference a PipelineLoop custom CRD spec", customRun.Namespace, customRun.Name) - return nil - } - logger.Infof("Received control for a CustomRun %s/%s %-v", customRun.Namespace, customRun.Name, customRun.Spec.CustomSpec) - // If the CustomRun has not started, initialize the Condition and set the start time. - if !customRun.HasStarted() { - logger.Infof("Starting new CustomRun %s/%s", customRun.Namespace, customRun.Name) - customRun.Status.InitializeConditions() - // In case node time was not synchronized, when controller has been scheduled to other nodes. - if customRun.Status.StartTime.Sub(customRun.CreationTimestamp.Time) < 0 { - logger.Warnf("Run %s createTimestamp %s is after the CustomRun started %s", customRun.Name, customRun.CreationTimestamp, customRun.Status.StartTime) - customRun.Status.StartTime = &customRun.CreationTimestamp - } - // Emit events. During the first reconcile the status of the CustomRun may change twice - // from not Started to Started and then to Running, so we need to sent the event here - // and at the end of 'Reconcile' again. - // We also want to send the "Started" event as soon as possible for anyone who may be waiting - // on the event to perform user facing initialisations, such has reset a CI check status - afterCondition := customRun.Status.GetCondition(apis.ConditionSucceeded) - events.Emit(ctx, nil, afterCondition, customRun) - } - - // Store the condition before reconcile - beforeCondition := customRun.Status.GetCondition(apis.ConditionSucceeded) - - status := &pipelineloopv1alpha1.PipelineLoopRunStatus{} - if err := customRun.Status.DecodeExtraFields(status); err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonInternalError.String(), - "Internal error calling DecodeExtraFields: %v", err) - logger.Errorf("DecodeExtraFields error: %v", err.Error()) - } - - if customRun.IsDone() { - if customRun.IsSuccessful() && !c.cacheStore.Disabled && isCachingEnabled(customRun) { - marshal, err := json.Marshal(CacheKey{ - PipelineLoopSpec: status.PipelineLoopSpec, - Params: v1ParamsConversion(ctx, customRun.Spec.Params), - }) - if err == nil { - hashSum := fmt.Sprintf("%x", md5.Sum(marshal)) - resultBytes, err1 := json.Marshal(customRun.Status.Results) - if err1 != nil { - return fmt.Errorf("error while marshalling result to cache for CustomRun: %s, %w", customRun.Name, err) - } - err = c.cacheStore.Put(&model.TaskCache{ - TaskHashKey: hashSum, - TaskOutput: string(resultBytes), - }) - if err != nil { - return fmt.Errorf("error while adding result to cache for CustomRun: %s, %w", customRun.Name, err) - } - logger.Infof("cached the results of successful CustomRun %s, with key: %s", customRun.Name, hashSum) - } - } - logger.Infof("CustomRun %s/%s is done", customRun.Namespace, customRun.Name) - return nil - } - // Reconcile the Run - if err := c.reconcile(ctx, customRun, status); err != nil { - logger.Errorf("Reconcile error: %v", err.Error()) - merr = multierror.Append(merr, err) - } - - if err := c.updateLabelsAndAnnotations(ctx, customRun); err != nil { - logger.Warn("Failed to update CustomRun labels/annotations", zap.Error(err)) - merr = multierror.Append(merr, err) - } - - if err := customRun.Status.EncodeExtraFields(status); err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonInternalError.String(), - "Internal error calling EncodeExtraFields: %v", err) - logger.Errorf("EncodeExtraFields error: %v", err.Error()) - } - - afterCondition := customRun.Status.GetCondition(apis.ConditionSucceeded) - events.Emit(ctx, beforeCondition, afterCondition, customRun) - return merr -} - -func EnableCustomTaskFeatureFlag(ctx context.Context) context.Context { - defaults, _ := config.NewDefaultsFromMap(map[string]string{}) - featureFlags, _ := config.NewFeatureFlagsFromMap(map[string]string{}) - c := &config.Config{ - Defaults: defaults, - FeatureFlags: featureFlags, - } - return config.ToContext(ctx, c) -} - -// Check if this pipelineLoop starts other pipelineLoop(s) -func isNestedPipelineLoop(pipelineLoopSpec *pipelineloopv1alpha1.PipelineLoopSpec) bool { - if pipelineLoopSpec.PipelineSpec == nil { - return false - } - for _, t := range pipelineLoopSpec.PipelineSpec.Tasks { - if t.TaskSpec != nil { - if t.TaskSpec.Kind == "PipelineLoop" { - return true - } - } else if t.TaskRef != nil { - if t.TaskRef.Kind == "PipelineLoop" { - return true - } - } - } - return false -} - -func getMaxNestedStackDepth(pipelineLoopMeta *metav1.ObjectMeta) (int, error) { - maxNestedStackDepth := pipelineLoopMeta.Annotations[MaxNestedStackDepthKey] - if maxNestedStackDepth != "" { - atoi, err := strconv.Atoi(maxNestedStackDepth) - return atoi, err - } - return DefaultNestedStackDepth, nil -} - -func (c *Reconciler) setMaxNestedStackDepth(ctx context.Context, pipelineLoopSpec *pipelineloopv1alpha1.PipelineLoopSpec, customRun *tektonv1beta1.CustomRun, depth int) { - logger := logging.FromContext(ctx) - - if pipelineLoopSpec.PipelineSpec == nil { - return - } - for k, t := range pipelineLoopSpec.PipelineSpec.Tasks { - if t.TaskSpec != nil { - if t.TaskSpec.Kind == "PipelineLoop" { - if len(t.TaskSpec.Metadata.Annotations) == 0 { - t.TaskSpec.Metadata.Annotations = map[string]string{MaxNestedStackDepthKey: fmt.Sprint(depth)} - } else { - t.TaskSpec.Metadata.Annotations[MaxNestedStackDepthKey] = fmt.Sprint(depth) - } - pipelineLoopSpec.PipelineSpec.Tasks[k].TaskSpec.Metadata.Annotations = map[string]string{MaxNestedStackDepthKey: fmt.Sprint(depth)} - } - } else if t.TaskRef != nil { - if t.TaskRef.Kind == "PipelineLoop" { - tl, err := c.pipelineloopClientSet.CustomV1alpha1().PipelineLoops(customRun.Namespace).Get(ctx, t.TaskRef.Name, metav1.GetOptions{}) - if err == nil && tl != nil { - if len(tl.ObjectMeta.Annotations) == 0 { - tl.ObjectMeta.Annotations = map[string]string{MaxNestedStackDepthKey: fmt.Sprint(depth)} - } else { - tl.ObjectMeta.Annotations[MaxNestedStackDepthKey] = fmt.Sprint(depth) - } - _, err := c.pipelineloopClientSet.CustomV1alpha1().PipelineLoops(customRun.Namespace).Update(ctx, tl, metav1.UpdateOptions{}) - if err != nil { - logger.Errorf("Error while updating pipelineloop nested stack depth, %v", err) - } - } else if err != nil { - logger.Warnf("Unable to fetch pipelineLoop wiht name: %s error: %v", t.TaskRef.Name, err) - } - } - } - } -} - -func (c *Reconciler) reconcile(ctx context.Context, customRun *tektonv1beta1.CustomRun, status *pipelineloopv1alpha1.PipelineLoopRunStatus) error { - ctx = EnableCustomTaskFeatureFlag(ctx) - logger := logging.FromContext(ctx) - var hashSum string - // Get the PipelineLoop referenced by the CustomRun - pipelineLoopMeta, pipelineLoopSpec, err := c.getPipelineLoop(ctx, customRun) - if err != nil { - return nil - } - // Store the fetched PipelineLoopSpec on the CustomRun for auditing - storePipelineLoopSpec(status, pipelineLoopSpec) - - // Propagate labels and annotations from PipelineLoop to Run. - propagatePipelineLoopLabelsAndAnnotations(customRun, pipelineLoopMeta) - - pipelineLoopSpec.SetDefaults(ctx) - // Validate PipelineLoop spec - if err := pipelineLoopSpec.Validate(ctx); err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation.String(), - "PipelineLoop %s/%s can't be CustomRun; it has an invalid spec: %s", - pipelineLoopMeta.Namespace, pipelineLoopMeta.Name, err) - return nil - } - - // Determine how many iterations of the Task will be done. - totalIterations, iterationElements, err := computeIterations(customRun, pipelineLoopSpec) - if err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation.String(), - "Cannot determine number of iterations: %s", err) - return nil - } - if totalIterations > iterationLimit { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation.String(), - "Total number of iterations exceeds the limit: %d", iterationLimit) - return nil - } - - // Update status of PipelineRuns. Return the PipelineRun representing the highest loop iteration. - highestIteration, currentRunningPrs, failedPrs, err := c.updatePipelineRunStatus(ctx, iterationElements, customRun, status) - if err != nil { - return fmt.Errorf("error updating PipelineRun status for CustomRun %s/%s: %w", customRun.Namespace, customRun.Name, err) - } - if !c.cacheStore.Disabled && isCachingEnabled(customRun) { - marshal, err := json.Marshal(CacheKey{ - PipelineLoopSpec: pipelineLoopSpec, - Params: v1ParamsConversion(ctx, customRun.Spec.Params), - }) - if marshal != nil && err == nil { - hashSum = fmt.Sprintf("%x", md5.Sum(marshal)) - taskCache, err := c.cacheStore.Get(hashSum) - if err == nil && taskCache != nil { - logger.Infof("Found a cached entry, for customRun: %s, with key:", customRun.Name, hashSum) - err := json.Unmarshal([]byte(taskCache.TaskOutput), &customRun.Status.Results) - if err != nil { - logger.Errorf("error while unmarshal of task output. %v", err) - } - customRun.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonCacheHit.String(), - "A cached result of the previous run was found.") - return nil - } - } - if err != nil { - logger.Warnf("failed marshalling the spec, for pipelineloop: %s", pipelineLoopMeta.Name) - } - } - if highestIteration > 0 { - updateRunStatus(customRun, "last-idx", fmt.Sprintf("%d", highestIteration)) - updateRunStatus(customRun, "last-elem", fmt.Sprintf("%s", iterationElements[highestIteration-1])) - } - // CustomRun is cancelled, just cancel all the running instance and return - if customRun.IsCancelled() { - if len(failedPrs) > 0 { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonFailed.String(), - "CustomRun %s/%s was failed", - customRun.Namespace, customRun.Name) - } else { - reason := pipelineloopv1alpha1.PipelineLoopRunReasonCancelled.String() - if customRun.HasTimedOut(c.clock) { // This check is only possible if we are on tekton 0.27.0 + - reason = string(tektonv1beta1.CustomRunReasonTimedOut) - } - customRun.Status.MarkCustomRunFailed(reason, "CustomRun %s/%s was cancelled", customRun.Namespace, customRun.Name) - } - - for _, currentRunningPr := range currentRunningPrs { - logger.Infof("CustomRun %s/%s is cancelled. Cancelling PipelineRun %s.", customRun.Namespace, customRun.Name, currentRunningPr.Name) - if _, err := c.pipelineClientSet.TektonV1().PipelineRuns(customRun.Namespace).Patch(ctx, currentRunningPr.Name, types.JSONPatchType, cancelPatchBytes, metav1.PatchOptions{}); err != nil { - customRun.Status.MarkCustomRunRunning(pipelineloopv1alpha1.PipelineLoopRunReasonCouldntCancel.String(), - "Failed to patch PipelineRun `%s` with cancellation: %v", currentRunningPr.Name, err) - return nil - } - } - return nil - } - - // CustomRun may be marked succeeded already by updatePipelineRunStatus - if customRun.IsSuccessful() { - return nil - } - - retriesDone := len(customRun.Status.RetriesStatus) - retries := customRun.Spec.Retries - if retriesDone < retries && failedPrs != nil && len(failedPrs) > 0 { - logger.Infof("RetriesDone: %d, Total Retries: %d", retriesDone, retries) - customRun.Status.RetriesStatus = append(customRun.Status.RetriesStatus, tektonv1beta1.CustomRunStatus{ - Status: duckv1.Status{ - ObservedGeneration: 0, - Conditions: customRun.Status.Conditions.DeepCopy(), - Annotations: nil, - }, - CustomRunStatusFields: tektonv1beta1.CustomRunStatusFields{ - StartTime: customRun.Status.StartTime.DeepCopy(), - CompletionTime: customRun.Status.CompletionTime.DeepCopy(), - Results: customRun.Status.Results, - RetriesStatus: nil, - ExtraFields: runtime.RawExtension{}, - }, - }) - // Without immediately updating here, wrong number of retries are performed. - _, err := c.pipelineClientSet.TektonV1beta1().CustomRuns(customRun.Namespace).UpdateStatus(ctx, customRun, metav1.UpdateOptions{}) - if err != nil { - return err - } - for _, failedPr := range failedPrs { - // PipelineRun do not support a retry, we dispose off old PR and create a fresh one. - // instead of deleting we just label it deleted=True. - deletedLabel := map[string]string{"deleted": "True"} - mergePatch := map[string]interface{}{ - "metadata": map[string]interface{}{ - "labels": deletedLabel, - }, - } - patch, err := json.Marshal(mergePatch) - if err != nil { - return err - } - _, _ = c.pipelineClientSet.TektonV1().PipelineRuns(failedPr.Namespace). - Patch(ctx, failedPr.Name, types.MergePatchType, patch, metav1.PatchOptions{}) - pr, err := c.createPipelineRun(ctx, logger, pipelineLoopSpec, customRun, highestIteration, iterationElements) - if err != nil { - return fmt.Errorf("error creating PipelineRun from CustomRun %s while retrying: %w", customRun.Name, err) - } - status.PipelineRuns[pr.Name] = &pipelineloopv1alpha1.PipelineLoopPipelineRunStatus{ - Iteration: highestIteration, - IterationItem: iterationElements[highestIteration-1], - Status: getPipelineRunStatusWithoutPipelineSpec(&pr.Status), - } - logger.Infof("Retried failed pipelineRun: %s with new pipelineRun: %s", failedPr.Name, pr.Name) - } - return nil - } - - // Check the status of the PipelineRun for the highest iteration. - if len(failedPrs) > 0 { - for _, failedPr := range failedPrs { - if status.CurrentRunning == 0 { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonFailed.String(), - "PipelineRun %s has failed", failedPr.Name) - } else { - customRun.Status.MarkCustomRunRunning(pipelineloopv1alpha1.PipelineLoopRunReasonRunning.String(), - "PipelineRun %s has failed", failedPr.Name) - } - } - return nil - } - - // Mark customRun status Running - customRun.Status.MarkCustomRunRunning(pipelineloopv1alpha1.PipelineLoopRunReasonRunning.String(), - "Iterations completed: %d", highestIteration-len(currentRunningPrs)) - - // Move on to the next iteration (or the first iteration if there was no PipelineRun). - // Check if the CustomRun is done. - nextIteration := highestIteration + 1 - if nextIteration > totalIterations { - // Still running which we already marked, just waiting - if len(currentRunningPrs) > 0 { - logger.Infof("Already started all pipelineruns for the loop, totally %d pipelineruns, waiting for complete.", totalIterations) - return nil - } - // All task finished - customRun.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded.String(), - "All PipelineRuns completed successfully") - updateRunStatus(customRun, "condition", "succeeded") - return nil - } - // Before starting up another PipelineRun, check if the customRun was cancelled. - if customRun.IsCancelled() { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonCancelled.String(), - "CustomRun %s/%s was cancelled", - customRun.Namespace, customRun.Name) - return nil - } - actualParallelism := 1 - // if Parallelism is bigger then totalIterations means there's no limit - if pipelineLoopSpec.Parallelism > totalIterations { - actualParallelism = totalIterations - } else if pipelineLoopSpec.Parallelism > 0 { - actualParallelism = pipelineLoopSpec.Parallelism - } - if len(currentRunningPrs) >= actualParallelism { - logger.Infof("Currently %d pipelinerun started, meet parallelism %d, waiting...", len(currentRunningPrs), actualParallelism) - return nil - } - - // Create PipelineRun to customRun this iteration based on parallelism - for i := 0; i < actualParallelism-len(currentRunningPrs); i++ { - if isNestedPipelineLoop(pipelineLoopSpec) { - maxNestedStackDepth, err := getMaxNestedStackDepth(pipelineLoopMeta) - if err != nil { - logger.Errorf("Error parsing max nested stack depth value: %v", err.Error()) - maxNestedStackDepth = DefaultNestedStackDepth - } - if maxNestedStackDepth > 0 { - maxNestedStackDepth = maxNestedStackDepth - 1 - c.setMaxNestedStackDepth(ctx, pipelineLoopSpec, customRun, maxNestedStackDepth) - } else if maxNestedStackDepth <= 0 { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonStackLimitExceeded.String(), "nested stack depth limit reached.") - return nil - } - } - - pr, err := c.createPipelineRun(ctx, logger, pipelineLoopSpec, customRun, nextIteration, iterationElements) - if err != nil { - return fmt.Errorf("error creating PipelineRun from CustomRun %s: %w", customRun.Name, err) - } - status.PipelineRuns[pr.Name] = &pipelineloopv1alpha1.PipelineLoopPipelineRunStatus{ - Iteration: nextIteration, - IterationItem: iterationElements[nextIteration-1], - Status: getPipelineRunStatusWithoutPipelineSpec(&pr.Status), - } - nextIteration++ - if nextIteration > totalIterations { - logger.Infof("Started all pipelineruns for the loop, totally %d pipelineruns.", totalIterations) - return nil - } - } - - return nil -} - -func (c *Reconciler) getPipelineLoop(ctx context.Context, customRun *tektonv1beta1.CustomRun) (*metav1.ObjectMeta, *pipelineloopv1alpha1.PipelineLoopSpec, error) { - pipelineLoopMeta := metav1.ObjectMeta{} - pipelineLoopSpec := pipelineloopv1alpha1.PipelineLoopSpec{} - if customRun.Spec.CustomRef != nil && customRun.Spec.CustomRef.Name != "" { - // Use the k8 client to get the PipelineLoop rather than the lister. This avoids a timing issue where - // the PipelineLoop is not yet in the lister cache if it is created at nearly the same time as the Run. - // See https://github.com/tektoncd/pipeline/issues/2740 for discussion on this issue. - // - // tl, err := c.pipelineLoopLister.PipelineLoops(customRun.Namespace).Get(customRun.Spec.Ref.Name) - tl, err := c.pipelineloopClientSet.CustomV1alpha1().PipelineLoops(customRun.Namespace).Get(ctx, customRun.Spec.CustomRef.Name, metav1.GetOptions{}) - if err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonCouldntGetPipelineLoop.String(), - "Error retrieving PipelineLoop for CustomRun %s/%s: %s", - customRun.Namespace, customRun.Name, err) - return nil, nil, fmt.Errorf("error retrieving PipelineLoop for CustomRun %s: %w", fmt.Sprintf("%s/%s", customRun.Namespace, customRun.Name), err) - } - pipelineLoopMeta = tl.ObjectMeta - pipelineLoopSpec = tl.Spec - } else if customRun.Spec.CustomSpec != nil { - err := json.Unmarshal(customRun.Spec.CustomSpec.Spec.Raw, &pipelineLoopSpec) - if err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonCouldntGetPipelineLoop.String(), - "Error unmarshal PipelineLoop spec for CustomRun %s/%s: %s", - customRun.Namespace, customRun.Name, err) - return nil, nil, fmt.Errorf("error unmarshal PipelineLoop spec for CustomRun %s: %w", fmt.Sprintf("%s/%s", customRun.Namespace, customRun.Name), err) - } - pipelineLoopMeta = metav1.ObjectMeta{Name: customRun.Name, - Namespace: customRun.Namespace, - OwnerReferences: customRun.OwnerReferences, - Labels: customRun.Spec.CustomSpec.Metadata.Labels, - Annotations: customRun.Spec.CustomSpec.Metadata.Annotations} - } else { - // CustomRun does not require name but for PipelineLoop it does. - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonCouldntGetPipelineLoop.String(), - "Missing spec.customRef.name for CustomRun %s/%s", - customRun.Namespace, customRun.Name) - return nil, nil, fmt.Errorf("missing spec.customRef.name for CustomRun %s", fmt.Sprintf("%s/%s", customRun.Namespace, customRun.Name)) - } - - if pipelineLoopSpec.ServiceAccountName == "" && customRun.Spec.ServiceAccountName != "" && customRun.Spec.ServiceAccountName != "default" { - pipelineLoopSpec.ServiceAccountName = customRun.Spec.ServiceAccountName - } - return &pipelineLoopMeta, &pipelineLoopSpec, nil -} - -func updateRunStatus(customRun *tektonv1beta1.CustomRun, resultName string, resultVal string) bool { - indexResultLastIdx := -1 - // if CustomRun already has resultName, then update it else append. - for i, res := range customRun.Status.Results { - if res.Name == resultName { - indexResultLastIdx = i - } - } - if indexResultLastIdx >= 0 { - customRun.Status.Results[indexResultLastIdx] = tektonv1beta1.CustomRunResult{ - Name: resultName, - Value: resultVal, - } - } else { - customRun.Status.Results = append(customRun.Status.Results, tektonv1beta1.CustomRunResult{ - Name: resultName, - Value: resultVal, - }) - } - return true -} - -func (c *Reconciler) createPipelineRun(ctx context.Context, logger *zap.SugaredLogger, tls *pipelineloopv1alpha1.PipelineLoopSpec, customRun *tektonv1beta1.CustomRun, iteration int, iterationElements []interface{}) (*tektonv1.PipelineRun, error) { - - // Create name for PipelineRun from CustomRun name plus iteration number. - prName := names.SimpleNameGenerator.RestrictLengthWithRandomSuffix(fmt.Sprintf("%s-%s", customRun.Name, fmt.Sprintf("%05d", iteration))) - pipelineRunAnnotations := getPipelineRunAnnotations(customRun) - currentIndex := iteration - 1 - if currentIndex > len(iterationElements) { - currentIndex = len(iterationElements) - 1 - } - currentIterationItemBytes, _ := json.Marshal(iterationElements[currentIndex]) - pipelineRunAnnotations[pipelineloop.GroupName+pipelineLoopCurrentIterationItemAnnotationKey] = string(currentIterationItemBytes) - pr := &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: prName, - Namespace: customRun.Namespace, - OwnerReferences: []metav1.OwnerReference{*metav1.NewControllerRef(customRun, - schema.GroupVersionKind{Group: "tekton.dev", Version: "v1beta1", Kind: "CustomRun"})}, - Labels: getPipelineRunLabels(customRun, strconv.Itoa(iteration)), - Annotations: pipelineRunAnnotations, - }, - Spec: tektonv1.PipelineRunSpec{ - Params: getParameters(customRun, tls, iteration, string(currentIterationItemBytes)), - Timeouts: nil, - TaskRunTemplate: tektonv1.PipelineTaskRunTemplate{ - ServiceAccountName: tls.ServiceAccountName, - PodTemplate: tls.PodTemplate, - }, - Workspaces: tls.Workspaces, - TaskRunSpecs: tls.TaskRunSpecs, - }} - if tls.Timeout != nil { - pr.Spec.Timeouts = &tektonv1.TimeoutFields{Pipeline: tls.Timeout} - } - if tls.PipelineRef != nil { - pr.Spec.PipelineRef = &tektonv1.PipelineRef{ - Name: tls.PipelineRef.Name, - // Kind: tls.PipelineRef.Kind, - } - } else if tls.PipelineSpec != nil { - pr.Spec.PipelineSpec = tls.PipelineSpec - } - - logger.Infof("Creating a new PipelineRun object %s", prName) - return c.pipelineClientSet.TektonV1().PipelineRuns(customRun.Namespace).Create(ctx, pr, metav1.CreateOptions{}) - -} - -func (c *Reconciler) updateLabelsAndAnnotations(ctx context.Context, customRun *tektonv1beta1.CustomRun) error { - newCustomRun, err := c.customRunLister.CustomRuns(customRun.Namespace).Get(customRun.Name) - if err != nil { - return fmt.Errorf("error getting CustomRun %s when updating labels/annotations: %w", customRun.Name, err) - } - if !reflect.DeepEqual(customRun.ObjectMeta.Labels, newCustomRun.ObjectMeta.Labels) || !reflect.DeepEqual(customRun.ObjectMeta.Annotations, newCustomRun.ObjectMeta.Annotations) { - mergePatch := map[string]interface{}{ - "metadata": map[string]interface{}{ - "labels": customRun.ObjectMeta.Labels, - "annotations": customRun.ObjectMeta.Annotations, - }, - } - patch, err := json.Marshal(mergePatch) - if err != nil { - return err - } - _, err = c.pipelineClientSet.TektonV1beta1().CustomRuns(customRun.Namespace).Patch(ctx, customRun.Name, types.MergePatchType, patch, metav1.PatchOptions{}) - return err - } - return nil -} - -func (c *Reconciler) cancelAllPipelineRuns(ctx context.Context, customRun *tektonv1beta1.CustomRun) error { - logger := logging.FromContext(ctx) - pipelineRunLabels := getPipelineRunLabels(customRun, "") - currentRunningPrs, err := c.pipelineRunLister.PipelineRuns(customRun.Namespace).List(labels.SelectorFromSet(pipelineRunLabels)) - if err != nil { - return fmt.Errorf("could not list PipelineRuns %#v", err) - } - for _, currentRunningPr := range currentRunningPrs { - if !currentRunningPr.IsDone() && !currentRunningPr.IsCancelled() { - logger.Infof("Cancelling PipelineRun %s.", currentRunningPr.Name) - if _, err := c.pipelineClientSet.TektonV1().PipelineRuns(customRun.Namespace).Patch(ctx, currentRunningPr.Name, types.JSONPatchType, cancelPatchBytes, metav1.PatchOptions{}); err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonCouldntCancel.String(), - "Failed to patch PipelineRun `%s` with cancellation: %v", currentRunningPr.Name, err) - return nil - } - } - } - return nil -} - -func (c *Reconciler) updatePipelineRunStatus(ctx context.Context, iterationElements []interface{}, customRun *tektonv1beta1.CustomRun, status *pipelineloopv1alpha1.PipelineLoopRunStatus) (int, []*tektonv1.PipelineRun, []*tektonv1.PipelineRun, error) { - logger := logging.FromContext(ctx) - highestIteration := 0 - var currentRunningPrs []*tektonv1.PipelineRun - var failedPrs []*tektonv1.PipelineRun - if status.PipelineRuns == nil { - status.PipelineRuns = make(map[string]*pipelineloopv1alpha1.PipelineLoopPipelineRunStatus) - } - pipelineRunLabels := getPipelineRunLabels(customRun, "") - pipelineRuns, err := c.pipelineRunLister.PipelineRuns(customRun.Namespace).List(labels.SelectorFromSet(pipelineRunLabels)) - if err != nil { - return 0, nil, nil, fmt.Errorf("could not list PipelineRuns %#v", err) - } - if len(pipelineRuns) == 0 { - return 0, nil, nil, nil - } - status.CurrentRunning = 0 - for _, pr := range pipelineRuns { - lbls := pr.GetLabels() - if lbls["deleted"] == "True" { - // PipelineRun is already retried, skipping... - continue - } - iterationStr := lbls[pipelineloop.GroupName+pipelineLoopIterationLabelKey] - iteration, err := strconv.Atoi(iterationStr) - if err != nil { - customRun.Status.MarkCustomRunFailed(pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation.String(), - "Error converting iteration number in PipelineRun %s: %#v", pr.Name, err) - logger.Errorf("Error converting iteration number in PipelineRun %s: %#v", pr.Name, err) - return 0, nil, nil, nil - } - // when we just create pr in a forloop, the started time may be empty - if !pr.IsDone() { - status.CurrentRunning++ - currentRunningPrs = append(currentRunningPrs, pr) - } - if pr.IsDone() && !pr.Status.GetCondition(apis.ConditionSucceeded).IsTrue() { - failedPrs = append(failedPrs, pr) - } - - // Mark customRun successful if the condition are met. - // if the last loop task is skipped, but the highestIterationPr successed. Mark customRun success. - // lastLoopTask := highestIterationPr.ObjectMeta.Annotations["last-loop-task"] - lastLoopTask := "" - for key, val := range customRun.ObjectMeta.Labels { - if key == "last-loop-task" { - lastLoopTask = val - } - } - if lastLoopTask != "" { - skippedTaskList := pr.Status.SkippedTasks - for _, task := range skippedTaskList { - if task.Name == lastLoopTask { - // Mark customRun successful and stop the loop pipelinerun - customRun.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded.String(), - "PipelineRuns completed successfully with the conditions are met") - updateRunStatus(customRun, "condition", "pass") - } - } - } - status.PipelineRuns[pr.Name] = &pipelineloopv1alpha1.PipelineLoopPipelineRunStatus{ - Iteration: iteration, - IterationItem: iterationElements[iteration-1], - Status: getPipelineRunStatusWithoutPipelineSpec(&pr.Status), - } - if iteration > highestIteration { - highestIteration = iteration - } - taskrunstatuses := make(map[string]*tektonv1.PipelineRunTaskRunStatus) - runstatuses := make(map[string]*tektonv1.PipelineRunRunStatus) - if pr.Status.ChildReferences != nil { - //fetch taskruns/runs status specifically for pipelineloop-break-operation first - for _, child := range pr.Status.ChildReferences { - if strings.HasPrefix(child.PipelineTaskName, "pipelineloop-break-operation") { - switch child.Kind { - case "TaskRun": - tr, err := tkstatus.GetTaskRunStatusForPipelineTask(ctx, c.pipelineClientSet, customRun.Namespace, child) - if err != nil { - logger.Errorf("can not get status for TaskRun, %v", err) - return 0, nil, nil, fmt.Errorf("could not get TaskRun %s."+ - " %#v", child.Name, err) - } - taskrunstatuses[child.Name] = &tektonv1.PipelineRunTaskRunStatus{ - PipelineTaskName: child.PipelineTaskName, - WhenExpressions: child.WhenExpressions, - Status: tr.DeepCopy(), - } - case "Run": - run, err := tkstatus.GetCustomRunStatusForPipelineTask(ctx, c.pipelineClientSet, customRun.Namespace, child) - if err != nil { - logger.Errorf("can not get status for Run, %v", err) - return 0, nil, nil, fmt.Errorf("could not get Run %s."+ - " %#v", child.Name, err) - } - runstatuses[child.Name] = &tektonv1.PipelineRunRunStatus{ - PipelineTaskName: child.PipelineTaskName, - WhenExpressions: child.WhenExpressions, - Status: run.DeepCopy(), - } - case "CustomRun": - run, err := tkstatus.GetCustomRunStatusForPipelineTask(ctx, c.pipelineClientSet, customRun.Namespace, child) - if err != nil { - logger.Errorf("can not get status for CustomRun, %v", err) - return 0, nil, nil, fmt.Errorf("could not get CustomRun %s."+ - " %#v", child.Name, err) - } - runstatuses[child.Name] = &tektonv1.PipelineRunRunStatus{ - PipelineTaskName: child.PipelineTaskName, - WhenExpressions: child.WhenExpressions, - Status: run.DeepCopy(), - } - default: - //ignore - } - } - } - } - for _, runStatus := range runstatuses { - if strings.HasPrefix(runStatus.PipelineTaskName, "pipelineloop-break-operation") { - if runStatus.Status != nil && !runStatus.Status.GetCondition(apis.ConditionSucceeded).IsUnknown() { - err = c.cancelAllPipelineRuns(ctx, customRun) - if err != nil { - return 0, nil, nil, fmt.Errorf("could not cancel PipelineRuns belonging to customRun %s."+ - " %#v", customRun.Name, err) - } - // Mark customRun successful and stop the loop pipelinerun - customRun.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded.String(), - "PipelineRuns completed successfully with the conditions are met") - updateRunStatus(customRun, "condition", "pass") - break - } - } - } - for _, taskRunStatus := range taskrunstatuses { - if strings.HasPrefix(taskRunStatus.PipelineTaskName, "pipelineloop-break-operation") { - if !taskRunStatus.Status.GetCondition(apis.ConditionSucceeded).IsUnknown() { - err = c.cancelAllPipelineRuns(ctx, customRun) - if err != nil { - return 0, nil, nil, fmt.Errorf("could not cancel PipelineRuns belonging to task customRun %s."+ - " %#v", customRun.Name, err) - } - // Mark customRun successful and stop the loop pipelinerun - customRun.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded.String(), - "PipelineRuns completed successfully with the conditions are met") - updateRunStatus(customRun, "condition", "pass") - break - } - } - } - } - return highestIteration, currentRunningPrs, failedPrs, nil -} - -func getIntegerParamValue(parm v1beta1.Param) (int, error) { - fromStr := strings.TrimSuffix(parm.Value.StringVal, "\n") - fromStr = strings.Trim(fromStr, " ") - retVal, err := strconv.Atoi(fromStr) - if err != nil { - err = fmt.Errorf("input \"%s\" is not a number", parm.Name) - } - return retVal, err -} - -func computeIterations(run *tektonv1beta1.CustomRun, tls *pipelineloopv1alpha1.PipelineLoopSpec) (int, []interface{}, error) { - // Find the iterate parameter. - numberOfIterations := -1 - from := 0 - step := 1 - to := 0 - fromProvided := false - toProvided := false - iterationElements := []interface{}{} - iterationParamStr := "" - iterationParamStrSeparator := "" - var err error - for _, p := range run.Spec.Params { - if p.Name == "from" { - from, err = getIntegerParamValue(p) - if err == nil { - fromProvided = true - } - } - if p.Name == "step" { - step, err = getIntegerParamValue(p) - if err != nil { - return 0, iterationElements, err - } - } - if p.Name == "to" { - to, err = getIntegerParamValue(p) - if err == nil { - toProvided = true - } - } - if p.Name == tls.IterateParam { - if p.Value.Type == v1beta1.ParamTypeString { - iterationParamStr = p.Value.StringVal - } - if p.Value.Type == v1beta1.ParamTypeArray { - numberOfIterations = len(p.Value.ArrayVal) - for _, v := range p.Value.ArrayVal { - iterationElements = append(iterationElements, v) - } - break - } - } - if p.Name == tls.IterateParamSeparator { - iterationParamStrSeparator = p.Value.StringVal - } - - } - if iterationParamStr != "" { - // Transfer p.Value to Array. - err = nil //reset the err - if iterationParamStrSeparator != "" { - stringArr := strings.Split(iterationParamStr, iterationParamStrSeparator) - numberOfIterations = len(stringArr) - for _, v := range stringArr { - iterationElements = append(iterationElements, v) - } - } else { - var stringArr []string - var ints []int - var dictsString []map[string]string - var dictsInt []map[string]int - errString := json.Unmarshal([]byte(iterationParamStr), &stringArr) - errInt := json.Unmarshal([]byte(iterationParamStr), &ints) - errDictString := json.Unmarshal([]byte(iterationParamStr), &dictsString) - errDictInt := json.Unmarshal([]byte(iterationParamStr), &dictsInt) - if errString != nil && errInt != nil && errDictString != nil && errDictInt != nil { - //try the default separator comma (,) in last - if strings.Contains(iterationParamStr, defaultIterationParamStrSeparator) { - stringArr := strings.Split(iterationParamStr, defaultIterationParamStrSeparator) - numberOfIterations = len(stringArr) - for _, v := range stringArr { - iterationElements = append(iterationElements, v) - } - } else { - return 0, iterationElements, fmt.Errorf("the value of the iterate parameter %q can not transfer to array", tls.IterateParam) - } - } - if errString == nil { - numberOfIterations = len(stringArr) - for _, v := range stringArr { - iterationElements = append(iterationElements, v) - } - } else if errInt == nil { - numberOfIterations = len(ints) - for _, v := range ints { - iterationElements = append(iterationElements, v) - } - } else if errDictString == nil { - numberOfIterations = len(dictsString) - for _, v := range dictsString { - iterationElements = append(iterationElements, v) - } - } else if errDictInt == nil { - numberOfIterations = len(dictsInt) - for _, v := range dictsInt { - iterationElements = append(iterationElements, v) - } - } - } - } - if from != to && fromProvided && toProvided { - if step == 0 { - return 0, iterationElements, fmt.Errorf("invalid values step: %d found in runs", step) - } - if (to-from < step && step > 0) || (to-from > step && step < 0) { - // This is a special case, to emulate "python's enumerate" behaviour see issue #935 - numberOfIterations = 1 - iterationElements = append(iterationElements, from) - return numberOfIterations, iterationElements, nil - } - if (from > to && step > 0) || (from < to && step < 0) { - return 0, iterationElements, fmt.Errorf("invalid values for from:%d, to:%d & step: %d found in runs", from, to, step) - } - numberOfIterations = 0 - if step < 0 && from > to { - for i := from; i >= to; i = i + step { - numberOfIterations = numberOfIterations + 1 - iterationElements = append(iterationElements, i) - } - } else { - for i := from; i <= to; i = i + step { - numberOfIterations = numberOfIterations + 1 - iterationElements = append(iterationElements, i) - } - } - } - if from == to && step != 0 && fromProvided && toProvided { - // This is a special case, to emulate "python's enumerate" behaviour see issue #935 - numberOfIterations = 1 - iterationElements = append(iterationElements, from) - } - return numberOfIterations, iterationElements, err -} - -func getParameters(customRun *tektonv1beta1.CustomRun, tls *pipelineloopv1alpha1.PipelineLoopSpec, iteration int, currentIterationItem string) []tektonv1.Param { - var out []tektonv1.Param - if tls.IterateParam != "" { - // IterateParam defined - var iterationParam, iterationParamStrSeparator *v1beta1.Param - var item, separator v1beta1.Param - for i, p := range customRun.Spec.Params { - if p.Name == tls.IterateParam { - if p.Value.Type == v1beta1.ParamTypeArray { - out = append(out, tektonv1.Param{ - Name: p.Name, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: p.Value.ArrayVal[iteration-1]}, - }) - } - if p.Value.Type == v1beta1.ParamTypeString { - item = p - iterationParam = &item - } - } else if p.Name == tls.IterateParamSeparator { - separator = p - iterationParamStrSeparator = &separator - } else { - v1Param := tektonv1.Param{} - ctx := context.Background() - paramConvertTo(ctx, &customRun.Spec.Params[i], &v1Param) - out = append(out, v1Param) - } - } - if iterationParam != nil { - if iterationParamStrSeparator != nil && iterationParamStrSeparator.Value.StringVal != "" { - iterationParamStr := iterationParam.Value.StringVal - stringArr := strings.Split(iterationParamStr, iterationParamStrSeparator.Value.StringVal) - out = append(out, tektonv1.Param{ - Name: iterationParam.Name, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: stringArr[iteration-1]}, - }) - - } else { - var stringArr []string - var ints []int - var dictsString []map[string]string - var dictsInt []map[string]int - iterationParamStr := iterationParam.Value.StringVal - errString := json.Unmarshal([]byte(iterationParamStr), &stringArr) - errInt := json.Unmarshal([]byte(iterationParamStr), &ints) - errDictString := json.Unmarshal([]byte(iterationParamStr), &dictsString) - errDictInt := json.Unmarshal([]byte(iterationParamStr), &dictsInt) - if errString == nil { - out = append(out, tektonv1.Param{ - Name: iterationParam.Name, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: stringArr[iteration-1]}, - }) - } else if errInt == nil { - out = append(out, tektonv1.Param{ - Name: iterationParam.Name, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: strconv.Itoa(ints[iteration-1])}, - }) - } else if errDictString == nil { - for dictParam := range dictsString[iteration-1] { - out = append(out, tektonv1.Param{ - Name: iterationParam.Name + "-subvar-" + dictParam, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: dictsString[iteration-1][dictParam]}, - }) - } - } else if errDictInt == nil { - for dictParam := range dictsInt[iteration-1] { - out = append(out, tektonv1.Param{ - Name: iterationParam.Name + "-subvar-" + dictParam, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: strconv.Itoa(dictsInt[iteration-1][dictParam])}, - }) - } - } else { - //try the default separator "," - if strings.Contains(iterationParamStr, defaultIterationParamStrSeparator) { - stringArr := strings.Split(iterationParamStr, defaultIterationParamStrSeparator) - out = append(out, tektonv1.Param{ - Name: iterationParam.Name, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: stringArr[iteration-1]}, - }) - } - } - } - } - } else { - // IterateNumeric defined - IterateStrings := []string{"from", "step", "to"} - for i, p := range customRun.Spec.Params { - if _, found := Find(IterateStrings, p.Name); !found { - v1Param := tektonv1.Param{} - ctx := context.Background() - paramConvertTo(ctx, &customRun.Spec.Params[i], &v1Param) - out = append(out, v1Param) - } - } - } - if tls.IterationNumberParam != "" { - out = append(out, tektonv1.Param{ - Name: tls.IterationNumberParam, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: strconv.Itoa(iteration)}, - }) - } - if tls.IterateNumeric != "" { - out = append(out, tektonv1.Param{ - Name: tls.IterateNumeric, - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: currentIterationItem}, - }) - } - return out -} - -func getPipelineRunAnnotations(customRun *tektonv1beta1.CustomRun) map[string]string { - // Propagate annotations from CustomRun to PipelineRun. - annotations := make(map[string]string, len(customRun.ObjectMeta.Annotations)+1) - for key, val := range customRun.ObjectMeta.Annotations { - annotations[key] = val - } - return annotations -} - -// Find takes a slice and looks for an element in it. If found it will -// return it's key, otherwise it will return -1 and a bool of false. -func Find(slice []string, val string) (int, bool) { - for i, item := range slice { - if item == val { - return i, true - } - } - return -1, false -} - -func getPipelineRunLabels(customRun *tektonv1beta1.CustomRun, iterationStr string) map[string]string { - // Propagate labels from CustomRun to PipelineRun. - labels := make(map[string]string, len(customRun.ObjectMeta.Labels)+1) - ignoreLabelsKey := []string{"tekton.dev/pipelineRun", "tekton.dev/pipelineTask", "tekton.dev/pipeline", "custom.tekton.dev/pipelineLoopIteration"} - for key, val := range customRun.ObjectMeta.Labels { - if _, found := Find(ignoreLabelsKey, key); !found { - labels[key] = val - } - } - // Note: The CustomRun label uses the normal Tekton group name. - labels[pipeline.GroupName+pipelineLoopRunLabelKey] = customRun.Name - if iterationStr != "" { - labels[pipelineloop.GroupName+pipelineLoopIterationLabelKey] = iterationStr - } - labels[pipelineloop.GroupName+parentPRKey] = customRun.ObjectMeta.Labels["tekton.dev/pipelineRun"] - - var prOriginalName string - if _, ok := customRun.ObjectMeta.Labels[pipelineloop.GroupName+originalPRKey]; ok { - prOriginalName = customRun.ObjectMeta.Labels[pipelineloop.GroupName+originalPRKey] - } else { - prOriginalName = customRun.ObjectMeta.Labels["tekton.dev/pipelineRun"] - } - labels[pipelineloop.GroupName+originalPRKey] = prOriginalName - // Empty the RunId reference from the KFP persistent agent because LabelKeyWorkflowRunId should be unique across all pipelineruns - _, ok := labels[LabelKeyWorkflowRunId] - if ok { - delete(labels, LabelKeyWorkflowRunId) - } - return labels -} - -func propagatePipelineLoopLabelsAndAnnotations(customRun *tektonv1beta1.CustomRun, pipelineLoopMeta *metav1.ObjectMeta) { - // Propagate labels from PipelineLoop to customRun. - if customRun.ObjectMeta.Labels == nil { - customRun.ObjectMeta.Labels = make(map[string]string, len(pipelineLoopMeta.Labels)+1) - } - for key, value := range pipelineLoopMeta.Labels { - customRun.ObjectMeta.Labels[key] = value - } - customRun.ObjectMeta.Labels[pipelineloop.GroupName+pipelineLoopLabelKey] = pipelineLoopMeta.Name - - // Propagate annotations from PipelineLoop to Run. - if customRun.ObjectMeta.Annotations == nil { - customRun.ObjectMeta.Annotations = make(map[string]string, len(pipelineLoopMeta.Annotations)) - } - for key, value := range pipelineLoopMeta.Annotations { - customRun.ObjectMeta.Annotations[key] = value - } -} - -func storePipelineLoopSpec(status *pipelineloopv1alpha1.PipelineLoopRunStatus, tls *pipelineloopv1alpha1.PipelineLoopSpec) { - // Only store the PipelineLoopSpec once, if it has never been set before. - if status.PipelineLoopSpec == nil { - status.PipelineLoopSpec = tls - } -} - -// Storing PipelineSpec and TaskSpec in PipelineRunStatus is a source of significant memory consumption and OOM failures. -// Additionally, performance of status update in customRun reconciler is impacted. -// PipelineSpec and TaskSpec seems to be redundant in this place. -// See issue: https://github.com/kubeflow/kfp-tekton/issues/962 -func getPipelineRunStatusWithoutPipelineSpec(status *tektonv1.PipelineRunStatus) *tektonv1.PipelineRunStatus { - s := status.DeepCopy() - s.PipelineSpec = nil - return s -} diff --git a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun_test.go b/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun_test.go deleted file mode 100644 index f968b7dc69..0000000000 --- a/tekton-catalog/pipeline-loops/pkg/reconciler/pipelinelooprun/pipelinelooprun_test.go +++ /dev/null @@ -1,2360 +0,0 @@ -/* -Copyright 2020 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package pipelinelooprun - -import ( - "context" - "encoding/json" - "fmt" - "os" - "sort" - "strings" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop" - pipelineloopv1alpha1 "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/apis/pipelineloop/v1alpha1" - fakeclient "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/client/fake" - fakepipelineloopinformer "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/pkg/client/injection/informers/pipelineloop/v1alpha1/pipelineloop/fake" - "github.com/kubeflow/kfp-tekton/tekton-catalog/pipeline-loops/test" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/pod" - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - tektonv1beta1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - ttesting "github.com/tektoncd/pipeline/pkg/reconciler/testing" - "github.com/tektoncd/pipeline/test/diff" - "github.com/tektoncd/pipeline/test/names" - corev1 "k8s.io/api/core/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/apimachinery/pkg/types" - ktesting "k8s.io/client-go/testing" - "k8s.io/client-go/tools/record" - "knative.dev/pkg/apis" - "knative.dev/pkg/configmap/informer" - "knative.dev/pkg/controller" - "knative.dev/pkg/logging" - "knative.dev/pkg/reconciler" - "knative.dev/pkg/system" - _ "knative.dev/pkg/system/testing" -) - -var ( - namespace = "" - trueB = true -) - -func initCacheParams() { - tmp := os.TempDir() - params.DbDriver = "sqlite" - params.DbName = tmp + "/testing.db" - params.Timeout = 2 * time.Second -} - -func init() { - initCacheParams() -} - -func getCustomRunName(customRun *tektonv1beta1.CustomRun) string { - return strings.Join([]string{customRun.Namespace, customRun.Name}, "/") -} - -func loopRunning(customRun *tektonv1beta1.CustomRun) *tektonv1beta1.CustomRun { - customRunWithStatus := customRun.DeepCopy() - customRunWithStatus.Status.InitializeConditions() - customRunWithStatus.Status.MarkCustomRunRunning(pipelineloopv1alpha1.PipelineLoopRunReasonRunning.String(), "") - return customRunWithStatus -} - -func loopSucceeded(customRun *tektonv1beta1.CustomRun) *tektonv1beta1.CustomRun { - customRunWithStatus := customRun.DeepCopy() - customRunWithStatus.Status.InitializeConditions() - customRunWithStatus.Status.MarkCustomRunSucceeded(pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded.String(), "") - return customRunWithStatus -} - -func successful(pr *tektonv1.PipelineRun) *tektonv1.PipelineRun { - prWithStatus := pr.DeepCopy() - prWithStatus.Status.SetCondition(&apis.Condition{ - Type: apis.ConditionSucceeded, - Status: corev1.ConditionTrue, - Reason: tektonv1.PipelineRunReasonSuccessful.String(), - Message: "All Steps have completed executing", - }) - return prWithStatus -} - -func successfulWithSkipedTasks(pr *tektonv1.PipelineRun) *tektonv1.PipelineRun { - prWithStatus := pr.DeepCopy() - prWithStatus.Status.SetCondition(&apis.Condition{ - Type: apis.ConditionSucceeded, - Status: corev1.ConditionTrue, - Reason: tektonv1.PipelineRunReasonSuccessful.String(), - Message: "Tasks Completed: 2 (Failed: 0, Cancelled 0), Skipped: 1", - }) - prWithStatus.Status.SkippedTasks = []tektonv1.SkippedTask{{ - Name: "task-fail", - }} - return prWithStatus -} - -func failed(pr *tektonv1.PipelineRun) *tektonv1.PipelineRun { - prWithStatus := pr.DeepCopy() - prWithStatus.Status.SetCondition(&apis.Condition{ - Type: apis.ConditionSucceeded, - Status: corev1.ConditionFalse, - Reason: tektonv1.PipelineRunReasonFailed.String(), - Message: "Something went wrong", - }) - return prWithStatus -} - -func setRetries(customRun *tektonv1beta1.CustomRun, retries int) *tektonv1beta1.CustomRun { - customRun.Spec.Retries = retries - return customRun -} - -func setDeleted(pr *tektonv1.PipelineRun) *tektonv1.PipelineRun { - pr.Labels["deleted"] = "True" - return pr -} - -// getPipelineLoopController returns an instance of the PipelineLoop controller/reconciler that has been seeded with -// d, where d represents the state of the system (existing resources) needed for the test. -func getPipelineLoopController(t *testing.T, d test.Data, pipelineloops []*pipelineloopv1alpha1.PipelineLoop) (test.Assets, func()) { - ctx, _ := ttesting.SetupFakeContext(t) - ctx, cancel := context.WithCancel(ctx) - c, informers := test.SeedTestData(t, ctx, d) - - client := fakeclient.Get(ctx) - client.PrependReactor("*", "pipelineloops", test.AddToInformer(t, fakepipelineloopinformer.Get(ctx).Informer().GetIndexer())) - for _, tl := range pipelineloops { - tl := tl.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := client.CustomV1alpha1().PipelineLoops(tl.Namespace).Create(ctx, tl, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - - configMapWatcher := informer.NewInformedWatcher(c.Kube, system.Namespace()) - ctl := NewController(namespace)(ctx, configMapWatcher) - - if la, ok := ctl.Reconciler.(reconciler.LeaderAware); ok { - la.Promote(reconciler.UniversalBucket(), func(reconciler.Bucket, types.NamespacedName) {}) - } - if err := configMapWatcher.Start(ctx.Done()); err != nil { - t.Fatalf("error starting configmap watcher: %v", err) - } - - return test.Assets{ - Logger: logging.FromContext(ctx), - Controller: ctl, - Clients: c, - Informers: informers, - Recorder: controller.GetEventRecorder(ctx).(*record.FakeRecorder), - }, cancel -} - -func getCreatedPipelinerun(t *testing.T, clients test.Clients) []*tektonv1.PipelineRun { - t.Log("actions", clients.Pipeline.Actions()) - var createdPr []*tektonv1.PipelineRun - for _, a := range clients.Pipeline.Actions() { - if a.GetVerb() == "create" { - obj := a.(ktesting.CreateAction).GetObject() - if pr, ok := obj.(*tektonv1.PipelineRun); ok { - createdPr = append(createdPr, pr) - } - } - } - return createdPr -} - -func checkEvents(fr *record.FakeRecorder, testName string, wantEvents []string) error { - // The fake recorder runs in a go routine, so the timeout is here to avoid waiting - // on the channel forever if fewer than expected events are received. - // We only hit the timeout in case of failure of the test, so the actual value - // of the timeout is not so relevant. It's only used when tests are going to fail. - timer := time.NewTimer(1 * time.Second) - foundEvents := []string{} - for ii := 0; ii < len(wantEvents)+1; ii++ { - // We loop over all the events that we expect. Once they are all received - // we exit the loop. If we never receive enough events, the timeout takes us - // out of the loop. - select { - case event := <-fr.Events: - foundEvents = append(foundEvents, event) - if ii > len(wantEvents)-1 { - return fmt.Errorf(`Received extra event "%s" for test "%s"`, event, testName) - } - wantEvent := wantEvents[ii] - if !(strings.HasPrefix(event, wantEvent)) { - return fmt.Errorf(`Expected event "%s" but got "%s" instead for test "%s"`, wantEvent, event, testName) - } - case <-timer.C: - if len(foundEvents) > len(wantEvents) { - return fmt.Errorf(`Received %d events but %d expected for test "%s". Found events: %#v`, len(foundEvents), len(wantEvents), testName, foundEvents) - } - } - } - return nil -} - -func checkRunCondition(t *testing.T, customRun *tektonv1beta1.CustomRun, expectedStatus corev1.ConditionStatus, expectedReason pipelineloopv1alpha1.PipelineLoopRunReason) { - condition := customRun.Status.GetCondition(apis.ConditionSucceeded) - if condition == nil { - t.Error("Condition missing in CustomRun") - } else { - if condition.Status != expectedStatus { - t.Errorf("Expected CustomRun status to be %v but was %v", expectedStatus, condition) - } - if condition.Reason != expectedReason.String() { - t.Errorf("Expected reason to be %q but was %q", expectedReason.String(), condition.Reason) - } - } - if customRun.Status.StartTime == nil { - t.Errorf("Expected CustomRun start time to be set but it wasn't") - } - if expectedStatus == corev1.ConditionUnknown { - if customRun.Status.CompletionTime != nil { - t.Errorf("Expected CustomRun completion time to not be set but it was") - } - } else if customRun.Status.CompletionTime == nil { - t.Errorf("Expected CustomRun completion time to be set but it wasn't") - } -} - -func checkRunStatus(t *testing.T, customRun *tektonv1beta1.CustomRun, expectedStatus map[string]pipelineloopv1alpha1.PipelineLoopPipelineRunStatus) { - status := &pipelineloopv1alpha1.PipelineLoopRunStatus{} - if err := customRun.Status.DecodeExtraFields(status); err != nil { - t.Errorf("DecodeExtraFields error: %v", err.Error()) - } - t.Log("pipelineruns", status.PipelineRuns) - if len(status.PipelineRuns) != len(expectedStatus) { - t.Errorf("Expected CustomRun status to include %d PipelineRuns but found %d: %v", len(expectedStatus), len(status.PipelineRuns), status.PipelineRuns) - return - } - for expectedPipelineRunName, expectedPipelineRunStatus := range expectedStatus { - actualPipelineRunStatus, exists := status.PipelineRuns[expectedPipelineRunName] - if !exists { - t.Errorf("Expected CustomRun status to include PipelineRun status for PipelineRun %s", expectedPipelineRunName) - continue - } - if actualPipelineRunStatus.Iteration != expectedPipelineRunStatus.Iteration { - t.Errorf("CustomRun status for PipelineRun %s has iteration number %d instead of %d", - expectedPipelineRunName, actualPipelineRunStatus.Iteration, expectedPipelineRunStatus.Iteration) - } - acturalIterationItem, error := json.Marshal(actualPipelineRunStatus.IterationItem) - expectedIterationItem, _ := json.Marshal(expectedPipelineRunStatus.IterationItem) - if error != nil || string(acturalIterationItem) != string(expectedIterationItem) { - t.Errorf("CustomRun status for PipelineRun %s has iteration item %v instead of %v", - expectedPipelineRunName, actualPipelineRunStatus.IterationItem, expectedPipelineRunStatus.IterationItem) - } - if d := cmp.Diff(expectedPipelineRunStatus.Status, actualPipelineRunStatus.Status, cmpopts.IgnoreTypes(apis.Condition{}.LastTransitionTime.Inner.Time)); d != "" { - t.Errorf("CustomRun status for PipelineRun %s is incorrect. Diff %s", expectedPipelineRunName, diff.PrintWantGot(d)) - } - } -} - -var aPipeline = &tektonv1.Pipeline{ - ObjectMeta: metav1.ObjectMeta{Name: "a-pipeline", Namespace: "foo"}, - Spec: tektonv1.PipelineSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "current-item", - Type: tektonv1.ParamTypeString, - }, { - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }}, - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, -} - -var aPipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "a-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - IterateParam: "current-item", - IterateParamSeparator: "separator", - }, -} - -var aPipelineLoop2 = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "a-pipelineloop2", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - IterateParam: "current-item", - IterateParamSeparator: "separator", - IterationNumberParam: "additional-parameter", - }, -} - -var wsPipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "ws-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - IterateParam: "current-item", - Workspaces: []tektonv1.WorkspaceBinding{{ - Name: "test", - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{Name: "test"}, - Items: []corev1.KeyToPath{}, - }, - }}, - }, -} - -var newPipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "new-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - IterateParam: "current-item", - ServiceAccountName: "default", - PodTemplate: &pod.PodTemplate{ - HostAliases: []corev1.HostAlias{{ - IP: "0.0.0.0", - Hostnames: []string{"localhost"}, - }}, - HostNetwork: true, - }, - TaskRunSpecs: []tektonv1.PipelineTaskRunSpec{{ - PipelineTaskName: "test-task", - ServiceAccountName: "test", - PodTemplate: &pod.PodTemplate{ - HostAliases: []corev1.HostAlias{{ - IP: "0.0.0.0", - Hostnames: []string{"localhost"}, - }}, - HostNetwork: true, - }, - }}, - }, -} - -var nPipeline = &tektonv1.Pipeline{ - ObjectMeta: metav1.ObjectMeta{Name: "n-pipeline", Namespace: "foo"}, - Spec: tektonv1.PipelineSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "iteration", - Type: tektonv1.ParamTypeString, - }, { - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }}, - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, -} - -var paraPipeline = &tektonv1.Pipeline{ - ObjectMeta: metav1.ObjectMeta{Name: "para-pipeline", Namespace: "foo"}, - Spec: tektonv1.PipelineSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "current-item", - Type: tektonv1.ParamTypeString, - }, { - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }}, - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, -} - -func getInnerLoopByte(pipelineLoopSpec pipelineloopv1alpha1.PipelineLoopSpec) []byte { - innerLoop, err := json.Marshal(pipelineLoopSpec) - if err != nil { - fmt.Println(fmt.Errorf("error while marshalling pipelineLoop %s", err.Error()).Error()) - panic(err) - } - return innerLoop -} - -var ePipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "e-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineSpec: ¶Pipeline.Spec, - IterateParam: "current-item", - }, -} - -var nestedPipeline = &tektonv1.Pipeline{ - ObjectMeta: metav1.ObjectMeta{Name: "nestedPipeline", Namespace: "foo"}, - Spec: tektonv1.PipelineSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }, { - Name: "iteration", - Type: tektonv1.ParamTypeString, - }}, - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TypeMeta: runtime.TypeMeta{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - }, - Spec: runtime.RawExtension{ - Raw: getInnerLoopByte(ePipelineLoop.Spec), - }, - }, - }}, - }, -} - -func setPipelineNestedStackDepth(pipeline *tektonv1.Pipeline, depth int) *tektonv1.Pipeline { - pl := pipeline.DeepCopy() - pl.Spec.Tasks[0].TaskSpec.Metadata.Annotations = map[string]string{MaxNestedStackDepthKey: fmt.Sprint(depth)} - return pl -} - -var paraPipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "para-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "para-pipeline"}, - IterateParam: "current-item", - Parallelism: 2, - }, -} - -var nPipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "n-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "n-pipeline"}, - IterateNumeric: "iteration", - }, -} - -var nestedPipelineLoop = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "nested-pipelineloop", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineSpec: &nestedPipeline.Spec, - IterateParam: "current-item", - }, -} - -func setPipelineLoopNestedStackDepth(pl *pipelineloopv1alpha1.PipelineLoop, depth int) *pipelineloopv1alpha1.PipelineLoop { - plCopy := pl.DeepCopy() - plCopy.Spec.PipelineSpec = &setPipelineNestedStackDepth(nestedPipeline, depth).Spec - return plCopy -} - -var aPipelineLoopWithInlineTask = &pipelineloopv1alpha1.PipelineLoop{ - ObjectMeta: metav1.ObjectMeta{Name: "a-pipelineloop-with-inline-task", Namespace: "foo"}, - Spec: pipelineloopv1alpha1.PipelineLoopSpec{ - PipelineSpec: &tektonv1.PipelineSpec{ - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "current-item", - Type: tektonv1.ParamTypeString, - }, { - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }}, - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, - IterateParam: "current-item", - Timeout: &metav1.Duration{Duration: 5 * time.Minute}, - }, -} - -var runWsPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-ws-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "ws-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "ws-pipelineloop", - }, - }, -} - -var runNewPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-new-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "new-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "new-pipelineloop", - }, - }, -} - -var runPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runPipelineLoop2 = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop2", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop2", - }, - }, -} - -var runNestedPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "nested-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation12": "myTestAnnotationValue12", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1"}}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomSpec: &tektonv1beta1.EmbeddedCustomRunSpec{ - TypeMeta: runtime.TypeMeta{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - }, - Spec: runtime.RawExtension{ - Raw: getInnerLoopByte(nestedPipelineLoop.Spec), - }, - }, - }, -} - -func setRunNestedStackDepth(run *tektonv1beta1.CustomRun, depth int) *tektonv1beta1.CustomRun { - r := run.DeepCopy() - r.Spec.CustomSpec.Metadata.Annotations = map[string]string{MaxNestedStackDepthKey: fmt.Sprint(depth)} - return r -} - -var paraRunPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "para-pipelineloop", - }, - }, -} - -var runPipelineLoopWithInDictParams = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `[{"a":1,"b":2}, {"a":2,"b":1}]`}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runPipelineLoopWithInStringParams = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `["item1", "item2"]`}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runPipelineLoopWithInStringSeparatorParams = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "item1|item2"}, - }, { - Name: "separator", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "|"}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runPipelineLoopWithSpaceSeparatorParams = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "item1 item2"}, - }, { - Name: "separator", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: " "}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runPipelineLoopWithSpaceParam = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: " "}, - }, { - Name: "separator", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: ","}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runPipelineLoopWithDefaultSeparatorParams = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "item1,item2"}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -func specifyLoopRange(from, to, step string, r *tektonv1beta1.CustomRun) *tektonv1beta1.CustomRun { - t := r.DeepCopy() - for n, i := range r.Spec.Params { - if i.Name == "from" { - t.Spec.Params[n].Value = tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: from} - } - if i.Name == "to" { - t.Spec.Params[n].Value = tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: to} - } - if i.Name == "step" { - t.Spec.Params[n].Value = tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: step} - } - } - return t -} - -var runPipelineLoopWithIterateNumeric = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "n-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "from", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `1`}, - }, { - Name: "step", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `1`}, - }, { - Name: "to", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `3`}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "n-pipelineloop", - }, - }, -} - -var runPipelineLoopWithInlineTask = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-with-inline-task", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop-with-inline-task", - }, - }, -} - -var runWithMissingPipelineLoopName = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "bad-run-pipelineloop-missing", - Namespace: "foo", - }, - Spec: tektonv1beta1.CustomRunSpec{ - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - // missing Name - }, - }, -} - -var runWithNonexistentPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "bad-run-pipelineloop-not-found", - Namespace: "foo", - }, - Spec: tektonv1beta1.CustomRunSpec{ - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "no-such-pipelineloop", - }, - }, -} - -var runWithInvalidRange = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-invalid-range", - Namespace: "foo", - }, - Spec: tektonv1beta1.CustomRunSpec{ - // current-item, which is the iterate parameter, is missing from parameters - Params: []tektonv1beta1.Param{{ - Name: "from", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `-11`}, - }, { - Name: "step", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `1`}, - }, { - Name: "to", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: `-13`}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var runWithIterateParamNotAnArray = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "bad-run-iterate-param-not-an-array", - Namespace: "foo", - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - // Value of iteration parameter must be an array so this is an error. - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "item1"}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var expectedPipelineRunIterationDict = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": "{\"a\":1,\"b\":2}", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item-subvar-a", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "1"}, - }, { - Name: "current-item-subvar-b", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "2"}, - }}, - }, -} - -var expectedParaPipelineRun = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "", - "custom.tekton.dev/parentPipelineRun": "", - "custom.tekton.dev/pipelineLoop": "para-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "para-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -var expectedParaPipelineRun1 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00002-mz4c7", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "", - "custom.tekton.dev/parentPipelineRun": "", - "custom.tekton.dev/pipelineLoop": "para-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "2", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item2"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "para-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item2"}, - }}, - }, -} - -var expectedPipelineRunIteration1 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -var expectedPipelineRunIterationEmptySpace = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `" item1 "`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: " item1 "}, - }}, - }, -} - -var expectedPipelineRunIterationWithWhiteSpace = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `" "`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: " "}, - }}, - }, -} - -var expectedPipelineRunFailed = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-failed", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -var expectedPipelineRunRetry = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -var expectedPipelineRunIterateNumeric1 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "n-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": "1", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "n-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "iteration", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "1"}, - }}, - }, -} - -var expectedPipelineRunIterateNumeric2 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "n-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": "-10", - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "n-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "iteration", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "-10"}, - }}, - }, -} - -var expectedPipelineRunIterateNumericParam = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop2", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "1"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -// Note: The pipelinerun for the second iteration has the same random suffix as the first due to the resetting of the seed on each test. -var expectedPipelineRunIteration2 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00002-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "2", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item2"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item2"}, - }, { - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }}, - }, -} - -var expectedPipelineRunWithWorkSpace = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-ws-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-ws-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "ws-pipelineloop", - "tekton.dev/run": "run-ws-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{ - Name: "a-pipeline", - }, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - Workspaces: []tektonv1.WorkspaceBinding{{ - Name: "test", - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{Name: "test"}, - Items: []corev1.KeyToPath{}, - }, - }}, - }, -} - -var expectedPipelineRunWithPodTemplate = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-new-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-new-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "pr-loop-example", - "custom.tekton.dev/parentPipelineRun": "pr-loop-example", - "custom.tekton.dev/pipelineLoop": "new-pipelineloop", - "tekton.dev/run": "run-new-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{ - Name: "a-pipeline", - }, - Params: []tektonv1.Param{{ - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - TaskRunTemplate: tektonv1.PipelineTaskRunTemplate{ - ServiceAccountName: "default", - PodTemplate: &pod.PodTemplate{ - HostAliases: []corev1.HostAlias{{ - IP: "0.0.0.0", - Hostnames: []string{"localhost"}, - }}, - HostNetwork: true, - }, - }, - TaskRunSpecs: []tektonv1.PipelineTaskRunSpec{{ - PipelineTaskName: "test-task", - ServiceAccountName: "test", - PodTemplate: &pod.PodTemplate{ - HostAliases: []corev1.HostAlias{{ - IP: "0.0.0.0", - Hostnames: []string{"localhost"}, - }}, - HostNetwork: true, - }, - }}, - }, -} - -var expectedPipelineRunWithInlineTaskIteration1 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-with-inline-task-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop-with-inline-task", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "", - "custom.tekton.dev/parentPipelineRun": "", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop-with-inline-task", - "tekton.dev/run": "run-pipelineloop-with-inline-task", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineSpec: &tektonv1.PipelineSpec{ - Tasks: []tektonv1.PipelineTask{{ - Name: "mytask", - TaskSpec: &tektonv1.EmbeddedTask{ - TaskSpec: tektonv1.TaskSpec{ - Params: []tektonv1.ParamSpec{{ - Name: "additional-parameter", - Type: tektonv1.ParamTypeString, - }, { - Name: "current-item", - Type: tektonv1.ParamTypeString, - }}, - Steps: []tektonv1.Step{{ - Name: "foo", Image: "bar", - }}, - }, - }, - }}, - }, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - Timeouts: &tektonv1.TimeoutFields{Pipeline: &metav1.Duration{Duration: 5 * time.Minute}}, - }, -} -var expectedNestedPipelineRun = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "nested-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "nested-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "", - "custom.tekton.dev/parentPipelineRun": "", - "custom.tekton.dev/pipelineLoop": "nested-pipelineloop", - "tekton.dev/run": "nested-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - }, - Annotations: map[string]string{ - "myTestAnnotation12": "myTestAnnotationValue12", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineSpec: &setPipelineNestedStackDepth(nestedPipeline, 29).Spec, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -var conditionRunPipelineLoop = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "last-loop-task": "task-fail", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeArray, ArrayVal: []string{"item1", "item2"}}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -var expectedConditionPipelineRunIteration1 = &tektonv1.PipelineRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop-00001-9l9zj", - Namespace: "foo", - OwnerReferences: []metav1.OwnerReference{{ - APIVersion: "tekton.dev/v1beta1", - Kind: "CustomRun", - Name: "run-pipelineloop", - Controller: &trueB, - BlockOwnerDeletion: &trueB, - }}, - Labels: map[string]string{ - "custom.tekton.dev/originalPipelineRun": "", - "custom.tekton.dev/parentPipelineRun": "", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/run": "run-pipelineloop", - "custom.tekton.dev/pipelineLoopIteration": "1", - "myTestLabel": "myTestLabelValue", - "last-loop-task": "task-fail", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - "custom.tekton.dev/pipelineLoopCurrentIterationItem": `"item1"`, - }, - }, - Spec: tektonv1.PipelineRunSpec{ - PipelineRef: &tektonv1.PipelineRef{Name: "a-pipeline"}, - Params: []tektonv1.Param{{ - Name: "additional-parameter", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "stuff"}, - }, { - Name: "current-item", - Value: tektonv1.ParamValue{Type: tektonv1.ParamTypeString, StringVal: "item1"}, - }}, - }, -} - -var runPipelineLoopWithInStringSeparatorEmptySpaceParams = &tektonv1beta1.CustomRun{ - ObjectMeta: metav1.ObjectMeta{ - Name: "run-pipelineloop", - Namespace: "foo", - Labels: map[string]string{ - "myTestLabel": "myTestLabelValue", - "custom.tekton.dev/pipelineLoop": "a-pipelineloop", - "tekton.dev/pipeline": "pr-loop-example", - "tekton.dev/pipelineRun": "pr-loop-example", - "tekton.dev/pipelineTask": "loop-task", - }, - Annotations: map[string]string{ - "myTestAnnotation": "myTestAnnotationValue", - }, - }, - Spec: tektonv1beta1.CustomRunSpec{ - Params: []tektonv1beta1.Param{{ - Name: "current-item", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: " item1 | item2 "}, - }, { - Name: "separator", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "|"}, - }, { - Name: "additional-parameter", - Value: tektonv1beta1.ParamValue{Type: tektonv1beta1.ParamTypeString, StringVal: "stuff"}, - }}, - CustomRef: &tektonv1beta1.TaskRef{ - APIVersion: pipelineloopv1alpha1.SchemeGroupVersion.String(), - Kind: pipelineloop.PipelineLoopControllerName, - Name: "a-pipelineloop", - }, - }, -} - -func TestReconcilePipelineLoopRun(t *testing.T) { - - testcases := []struct { - name string - pipeline *tektonv1.Pipeline - pipelineloop *pipelineloopv1alpha1.PipelineLoop - run *tektonv1beta1.CustomRun - pipelineruns []*tektonv1.PipelineRun - expectedStatus corev1.ConditionStatus - expectedReason pipelineloopv1alpha1.PipelineLoopRunReason - expectedPipelineruns []*tektonv1.PipelineRun - expectedEvents []string - }{{ - name: "Reconcile a new run with a pipelineloop that references a pipeline", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoop, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIteration1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and a dict params", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithInDictParams, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIterationDict}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and a string params", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithInStringParams, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIteration1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and a string params with separator", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithInStringSeparatorParams, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIteration1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and an empty space string params with separator", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithInStringSeparatorEmptySpaceParams, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIterationEmptySpace}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and a string params with whitespace separator", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithSpaceSeparatorParams, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIteration1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with iterateNumeric defined", - pipeline: nPipeline, - pipelineloop: nPipelineLoop, - run: runPipelineLoopWithIterateNumeric, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIterateNumeric1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with -ve numeric range defined", - pipeline: nPipeline, - pipelineloop: nPipelineLoop, - run: specifyLoopRange("-10", "-15", "-1", runPipelineLoopWithIterateNumeric), - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIterateNumeric2}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with iterationNumberParam defined", - pipeline: aPipeline, - pipelineloop: aPipelineLoop2, - run: runPipelineLoop2, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIterateNumericParam}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop that contains an inline task", - pipelineloop: aPipelineLoopWithInlineTask, - run: runPipelineLoopWithInlineTask, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithInlineTaskIteration1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop that contains a workspace", - pipeline: aPipeline, - pipelineloop: wsPipelineLoop, - run: runWsPipelineLoop, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithWorkSpace}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a run after all PipelineRuns have succeeded", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: loopRunning(runPipelineLoop), - pipelineruns: []*tektonv1.PipelineRun{successful(expectedPipelineRunIteration1), successful(expectedPipelineRunIteration2)}, - expectedStatus: corev1.ConditionTrue, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded, - expectedPipelineruns: []*tektonv1.PipelineRun{successful(expectedPipelineRunIteration1), successful(expectedPipelineRunIteration2)}, - expectedEvents: []string{"Normal Succeeded All PipelineRuns completed successfully"}, - }, { - name: "Reconcile a run after the first PipelineRun has failed", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: loopRunning(runPipelineLoop), - pipelineruns: []*tektonv1.PipelineRun{failed(expectedPipelineRunIteration1)}, - expectedStatus: corev1.ConditionFalse, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonFailed, - expectedPipelineruns: []*tektonv1.PipelineRun{failed(expectedPipelineRunIteration1)}, - expectedEvents: []string{"Warning Failed PipelineRun " + expectedPipelineRunIteration1.Name + " has failed"}, - }, { - name: "Reconcile a run with retries after the first PipelineRun has failed", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: loopRunning(setRetries(runPipelineLoop, 1)), - pipelineruns: []*tektonv1.PipelineRun{failed(expectedPipelineRunFailed)}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{setDeleted(failed(expectedPipelineRunFailed)), expectedPipelineRunRetry}, - }, { - name: "Reconcile a new run with a pipelineloop with Parallelism specified", - pipeline: paraPipeline, - pipelineloop: paraPipelineLoop, - run: paraRunPipelineLoop, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedParaPipelineRun, expectedParaPipelineRun1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a nested pipelineloop", - pipeline: nestedPipeline, - pipelineloop: nestedPipelineLoop, - run: runNestedPipelineLoop, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedNestedPipelineRun}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a recursive pipelineloop with max nested stack depth 0", - pipeline: setPipelineNestedStackDepth(nestedPipeline, 0), - pipelineloop: setPipelineLoopNestedStackDepth(nestedPipelineLoop, 0), - run: setRunNestedStackDepth(runNestedPipelineLoop, 0), - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionFalse, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonStackLimitExceeded, - expectedPipelineruns: []*tektonv1.PipelineRun{}, - expectedEvents: []string{"Normal Started ", "Warning Failed nested stack depth limit reached."}, - }, { - name: "Reconcile a run with condition pipelinerun, and the first PipelineRun condition check failed", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: loopRunning(conditionRunPipelineLoop), - pipelineruns: []*tektonv1.PipelineRun{successfulWithSkipedTasks(expectedConditionPipelineRunIteration1)}, - expectedStatus: corev1.ConditionTrue, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded, - expectedPipelineruns: []*tektonv1.PipelineRun{successfulWithSkipedTasks(expectedConditionPipelineRunIteration1)}, - expectedEvents: []string{"Normal Succeeded PipelineRuns completed successfully with the conditions are met"}, - }, { - name: "Reconcile a new run with a pipelineloop that contains a PodTemplate, ServiceAccountName, TaskRunSpecs", - pipeline: aPipeline, - pipelineloop: newPipelineLoop, - run: runNewPipelineLoop, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunWithPodTemplate}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and a string params without separator", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithDefaultSeparatorParams, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIteration1}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, { - name: "Reconcile a new run with a pipelineloop and a string params without separator", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: runPipelineLoopWithSpaceParam, - pipelineruns: []*tektonv1.PipelineRun{}, - expectedStatus: corev1.ConditionUnknown, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonRunning, - expectedPipelineruns: []*tektonv1.PipelineRun{expectedPipelineRunIterationWithWhiteSpace}, - expectedEvents: []string{"Normal Started", "Normal Running Iterations completed: 0"}, - }, - } - - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - names.TestingSeed() - - optionalPipeline := []*tektonv1.Pipeline{tc.pipeline} - if tc.pipeline == nil { - optionalPipeline = nil - } - - d := test.Data{ - CustomRuns: []*tektonv1beta1.CustomRun{tc.run}, - Pipelines: optionalPipeline, - PipelineRuns: tc.pipelineruns, - } - - testAssets, _ := getPipelineLoopController(t, d, []*pipelineloopv1alpha1.PipelineLoop{tc.pipelineloop}) - c := testAssets.Controller - clients := testAssets.Clients - - if err := c.Reconciler.Reconcile(ctx, getCustomRunName(tc.run)); err != nil { - t.Fatalf("Error reconciling: %s", err) - } - - // Fetch the updated Run - reconciledRun, err := clients.Pipeline.TektonV1beta1().CustomRuns(tc.run.Namespace).Get(ctx, tc.run.Name, metav1.GetOptions{}) - if err != nil { - t.Fatalf("Error getting reconciled run from fake client: %s", err) - } - - // Verify that the Run has the expected status and reason. - checkRunCondition(t, reconciledRun, tc.expectedStatus, tc.expectedReason) - - // Verify that a PipelineRun was or was not created depending on the test. - // If the number of expected PipelineRuns is greater than the original number of PipelineRuns - // then the test expects a new PipelineRun to be created. The new PipelineRun must be the - // last one in the list of expected PipelineRuns. - createdPipelineruns := getCreatedPipelinerun(t, clients) - // All the arrays and sub arrays are sorted to ensure there are no sporadic failures - // resulting from mismatch due to different ordering of items. - sort.Slice(createdPipelineruns, func(i, j int) bool { - return createdPipelineruns[i].Name < createdPipelineruns[j].Name - }) - for _, createdPipelinerun := range createdPipelineruns { - sort.Slice(createdPipelinerun.Spec.Params, func(i, j int) bool { - return createdPipelinerun.Spec.Params[i].Name < createdPipelinerun.Spec.Params[j].Name - }) - if createdPipelinerun.Spec.PipelineSpec != nil { - sort.Slice(createdPipelinerun.Spec.PipelineSpec.Params, func(i, j int) bool { - return createdPipelinerun.Spec.PipelineSpec.Params[i].Name < createdPipelinerun.Spec.PipelineSpec.Params[j].Name - }) - sort.Slice(createdPipelinerun.Spec.PipelineSpec.Tasks, func(i, j int) bool { - return createdPipelinerun.Spec.PipelineSpec.Tasks[i].Name < createdPipelinerun.Spec.PipelineSpec.Tasks[j].Name - }) - for _, t := range createdPipelinerun.Spec.PipelineSpec.Tasks { - sort.Slice(t.Params, func(i, j int) bool { - return t.Params[i].Name < t.Params[j].Name - }) - if t.TaskSpec != nil { - sort.Slice(t.TaskSpec.Params, func(i, j int) bool { - return t.TaskSpec.Params[i].Name < t.TaskSpec.Params[j].Name - }) - } - } - } - } - if len(tc.expectedPipelineruns) > len(tc.pipelineruns) { - if len(createdPipelineruns) == 0 { - t.Errorf("A PipelineRun should have been created but was not") - } else { - pipelineRunsExpectedToBeCreated := make([]*tektonv1.PipelineRun, len(createdPipelineruns)) - i := 0 - for _, pr := range tc.expectedPipelineruns { - if pr.Labels["deleted"] != "True" { - pipelineRunsExpectedToBeCreated[i] = pr - i = i + 1 // skip the pr that were retried. - } - } - - if d := cmp.Diff(pipelineRunsExpectedToBeCreated, createdPipelineruns); d != "" { - t.Errorf("Expected PipelineRun was not created. Diff %s", diff.PrintWantGot(d)) - } - } - } else { - if len(createdPipelineruns) > 0 { - t.Errorf("A PipelineRun was created which was not expected") - } - } - - // Verify Run status contains status for all PipelineRuns. - _, iterationElements, _ := computeIterations(tc.run, &tc.pipelineloop.Spec) - expectedPipelineRuns := map[string]pipelineloopv1alpha1.PipelineLoopPipelineRunStatus{} - i := 1 - for _, pr := range tc.expectedPipelineruns { - expectedPipelineRuns[pr.Name] = pipelineloopv1alpha1.PipelineLoopPipelineRunStatus{Iteration: i, IterationItem: iterationElements[i-1], Status: &pr.Status} - if pr.Labels["deleted"] != "True" { - i = i + 1 // iteration remain same, incase previous pr was a retry. - } - } - checkRunStatus(t, reconciledRun, expectedPipelineRuns) - - // Verify expected events were created. - if err := checkEvents(testAssets.Recorder, tc.name, tc.expectedEvents); err != nil { - t.Errorf(err.Error()) - } - }) - } -} - -func TestReconcilePipelineLoopRunFailures(t *testing.T) { - testcases := []struct { - name string - pipelineloop *pipelineloopv1alpha1.PipelineLoop - customRun *tektonv1beta1.CustomRun - reason pipelineloopv1alpha1.PipelineLoopRunReason - wantEvents []string - }{{ - name: "missing PipelineLoop name", - customRun: runWithMissingPipelineLoopName, - reason: pipelineloopv1alpha1.PipelineLoopRunReasonCouldntGetPipelineLoop, - wantEvents: []string{ - "Normal Started ", - "Warning Failed Missing spec.ref.name for Run", - }, - }, { - name: "nonexistent PipelineLoop", - customRun: runWithNonexistentPipelineLoop, - reason: pipelineloopv1alpha1.PipelineLoopRunReasonCouldntGetPipelineLoop, - wantEvents: []string{ - "Normal Started ", - "Warning Failed Error retrieving PipelineLoop", - }, - }, { - name: "invalid range", - pipelineloop: aPipelineLoop, - customRun: runWithInvalidRange, - reason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - wantEvents: []string{ - "Normal Started ", - `Warning Failed Cannot determine number of iterations: invalid values for from:-11, to:-13 & step: 1 found in runs`, - }, - }, { - name: "invalid range 2", - pipelineloop: aPipelineLoop, - customRun: specifyLoopRange("10", "12", "-1", runWithInvalidRange), - reason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - wantEvents: []string{ - "Normal Started ", - `Warning Failed Cannot determine number of iterations: invalid values for from:10, to:12 & step: -1 found in runs`, - }, - }, { - name: "iterate parameter not an array", - pipelineloop: aPipelineLoop, - customRun: runWithIterateParamNotAnArray, - reason: pipelineloopv1alpha1.PipelineLoopRunReasonFailedValidation, - wantEvents: []string{ - "Normal Started ", - `Warning Failed Cannot determine number of iterations: the value of the iterate parameter "current-item" can not transfer to array`, - }, - }} - testcases = testcases[len(testcases)-1:] - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - - d := test.Data{ - CustomRuns: []*tektonv1beta1.CustomRun{tc.customRun}, - } - - optionalPipelineLoop := []*pipelineloopv1alpha1.PipelineLoop{tc.pipelineloop} - if tc.pipelineloop == nil { - optionalPipelineLoop = nil - } - - testAssets, _ := getPipelineLoopController(t, d, optionalPipelineLoop) - c := testAssets.Controller - clients := testAssets.Clients - - if err := c.Reconciler.Reconcile(ctx, getCustomRunName(tc.customRun)); err != nil { - t.Fatalf("Error reconciling: %s", err) - } - - // Fetch the updated Run - reconciledRun, err := clients.Pipeline.TektonV1beta1().CustomRuns(tc.customRun.Namespace).Get(ctx, tc.customRun.Name, metav1.GetOptions{}) - if err != nil { - t.Fatalf("Error getting reconciled run from fake client: %s", err) - } - - // Verify that the Run is in Failed status and both the start time and the completion time are set. - checkRunCondition(t, reconciledRun, corev1.ConditionFalse, tc.reason) - if reconciledRun.Status.StartTime == nil { - t.Fatalf("Expected Run start time to be set but it wasn't") - } - if reconciledRun.Status.CompletionTime == nil { - t.Fatalf("Expected Run completion time to be set but it wasn't") - } - - if err := checkEvents(testAssets.Recorder, tc.name, tc.wantEvents); err != nil { - t.Errorf(err.Error()) - } - }) - } -} - -func enableCacheForRun(run *tektonv1beta1.CustomRun) *tektonv1beta1.CustomRun { - run.ObjectMeta.Labels["pipelines.kubeflow.org/cache_enabled"] = "true" - return run -} - -func disableCacheForRun(run *tektonv1beta1.CustomRun) *tektonv1beta1.CustomRun { - run.ObjectMeta.Labels["pipelines.kubeflow.org/cache_enabled"] = "false" - return run -} - -func enableCacheForPr(pr *tektonv1.PipelineRun) *tektonv1.PipelineRun { - pr.ObjectMeta.Labels["pipelines.kubeflow.org/cache_enabled"] = "true" - return pr -} - -func disableCacheForPr(pr *tektonv1.PipelineRun) *tektonv1.PipelineRun { - pr.ObjectMeta.Labels["pipelines.kubeflow.org/cache_enabled"] = "false" - return pr -} - -func TestReconcilePipelineLoopRunCachedRun(t *testing.T) { - testcases := []struct { - name string - pipeline *tektonv1.Pipeline - pipelineloop *pipelineloopv1alpha1.PipelineLoop - run *tektonv1beta1.CustomRun - pipelineruns []*tektonv1.PipelineRun - expectedStatus corev1.ConditionStatus - expectedReason pipelineloopv1alpha1.PipelineLoopRunReason - expectedEvents []string - }{{ - name: "Reconcile a run successfully", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: enableCacheForRun(loopSucceeded(runPipelineLoop)), - pipelineruns: []*tektonv1.PipelineRun{successful(enableCacheForPr(expectedPipelineRunIteration1)), successful(enableCacheForPr(expectedPipelineRunIteration2))}, - expectedStatus: corev1.ConditionTrue, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonSucceeded, - expectedEvents: []string{}, - }, { - name: "Test fetch from cache for previously successful Run.", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - run: enableCacheForRun(runPipelineLoop), - expectedStatus: corev1.ConditionTrue, - expectedReason: pipelineloopv1alpha1.PipelineLoopRunReasonCacheHit, - expectedEvents: []string{"Normal Started ", "Normal Succeeded A cached result of the previous run was found."}, - }} - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - names.TestingSeed() - optionalPipeline := []*tektonv1.Pipeline{tc.pipeline} - status := &pipelineloopv1alpha1.PipelineLoopRunStatus{} - tc.pipelineloop.Spec.SetDefaults(ctx) - status.PipelineLoopSpec = &tc.pipelineloop.Spec - err := tc.run.Status.EncodeExtraFields(status) - if err != nil { - t.Fatal("Failed to encode spec in the pipelineSpec:", err) - } - if tc.pipeline == nil { - optionalPipeline = nil - } - cm := corev1.ConfigMap{ - ObjectMeta: metav1.ObjectMeta{ - Name: "cache-config", - Namespace: system.Namespace(), - }, - Data: map[string]string{"driver": "sqlite", "dbName": "/tmp/testing2.db", "timeout": "2s"}, - } - d := test.Data{ - CustomRuns: []*tektonv1beta1.CustomRun{tc.run}, - Pipelines: optionalPipeline, - PipelineRuns: tc.pipelineruns, - ConfigMaps: []*corev1.ConfigMap{&cm}, - } - - testAssets, _ := getPipelineLoopController(t, d, []*pipelineloopv1alpha1.PipelineLoop{tc.pipelineloop}) - c := testAssets.Controller - clients := testAssets.Clients - - if err := c.Reconciler.Reconcile(ctx, getCustomRunName(tc.run)); err != nil { - t.Fatalf("Error reconciling: %s", err) - } - // Fetch the updated Run - reconciledRun, err := clients.Pipeline.TektonV1beta1().CustomRuns(tc.run.Namespace).Get(ctx, tc.run.Name, metav1.GetOptions{}) - if err != nil { - t.Fatalf("Error getting reconciled run from fake client: %s", err) - } - // Verify that the Run has the expected status and reason. - checkRunCondition(t, reconciledRun, tc.expectedStatus, tc.expectedReason) - // Verify expected events were created. - if err := checkEvents(testAssets.Recorder, tc.name, tc.expectedEvents); err != nil { - t.Errorf(err.Error()) - } - }) - } -} -func checkRunResult(t *testing.T, run *tektonv1beta1.CustomRun, expectedResult []tektonv1beta1.CustomRunResult) { - if len(run.Status.Results) != len(expectedResult) { - t.Errorf("Expected Run results to include %d results but found %d: %v", len(expectedResult), len(run.Status.Results), run.Status.Results) - //return - } - - if d := cmp.Diff(expectedResult, run.Status.Results); d != "" { - t.Errorf("Run result for is incorrect. Diff %s", diff.PrintWantGot(d)) - } -} - -func TestReconcilePipelineLoopRunLastElemResult(t *testing.T) { - testcases := []struct { - name string - pipeline *tektonv1.Pipeline - pipelineloop *pipelineloopv1alpha1.PipelineLoop - customRun *tektonv1beta1.CustomRun - pipelineruns []*tektonv1.PipelineRun - expectedResult []tektonv1beta1.CustomRunResult - }{{ - name: "Reconcile a new run with a pipelineloop that references a pipeline", - pipeline: aPipeline, - pipelineloop: aPipelineLoop, - customRun: disableCacheForRun(runPipelineLoop), - pipelineruns: []*tektonv1.PipelineRun{disableCacheForPr(successful(expectedPipelineRunIteration1)), disableCacheForPr(successful(expectedPipelineRunIteration2))}, - expectedResult: []tektonv1beta1.CustomRunResult{{Name: "last-idx", Value: "2"}, {Name: "last-elem", Value: "item2"}, {Name: "condition", Value: "succeeded"}}, - }} - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ctx := context.Background() - names.TestingSeed() - optionalPipeline := []*tektonv1.Pipeline{tc.pipeline} - status := &pipelineloopv1alpha1.PipelineLoopRunStatus{} - tc.pipelineloop.Spec.SetDefaults(ctx) - status.PipelineLoopSpec = &tc.pipelineloop.Spec - err := tc.customRun.Status.EncodeExtraFields(status) - if err != nil { - t.Fatal("Failed to encode spec in the pipelineSpec:", err) - } - if tc.pipeline == nil { - optionalPipeline = nil - } - - d := test.Data{ - CustomRuns: []*tektonv1beta1.CustomRun{tc.customRun}, - Pipelines: optionalPipeline, - PipelineRuns: tc.pipelineruns, - } - - testAssets, _ := getPipelineLoopController(t, d, []*pipelineloopv1alpha1.PipelineLoop{tc.pipelineloop}) - c := testAssets.Controller - clients := testAssets.Clients - - if err := c.Reconciler.Reconcile(ctx, getCustomRunName(tc.customRun)); err != nil { - t.Fatalf("Error reconciling: %s", err) - } - // Fetch the updated Run - reconciledRun, err := clients.Pipeline.TektonV1beta1().CustomRuns(tc.customRun.Namespace).Get(ctx, tc.customRun.Name, metav1.GetOptions{}) - if err != nil { - t.Fatalf("Error getting reconciled run from fake client: %s", err) - } - checkRunResult(t, reconciledRun, tc.expectedResult) - }) - } -} diff --git a/tekton-catalog/pipeline-loops/test/controller.go b/tekton-catalog/pipeline-loops/test/controller.go deleted file mode 100644 index 7642c8aab0..0000000000 --- a/tekton-catalog/pipeline-loops/test/controller.go +++ /dev/null @@ -1,276 +0,0 @@ -/* -Copyright 2019 The Knative Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package test - -import ( - "context" - "fmt" - "sync/atomic" - "testing" - - // Link in the fakes so they get injected into injection.Fake - tektonv1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" - fakepipelineclientset "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/fake" - informers "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1" - informersv1beta1 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1beta1" - fakepipelineclient "github.com/tektoncd/pipeline/pkg/client/injection/client/fake" - fakepipelineinformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipeline/fake" - fakepipelineruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/fake" - faketaskinformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/task/fake" - faketaskruninformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/taskrun/fake" - fakeCustomRunInformer "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1beta1/customrun/fake" - fakeresourceclientset "github.com/tektoncd/pipeline/pkg/client/resource/clientset/versioned/fake" - fakeresourceclient "github.com/tektoncd/pipeline/pkg/client/resource/injection/client/fake" - cloudeventclient "github.com/tektoncd/pipeline/pkg/reconciler/events/cloudevent" - "go.uber.org/zap" - corev1 "k8s.io/api/core/v1" - apierrs "k8s.io/apimachinery/pkg/api/errors" - "k8s.io/apimachinery/pkg/api/meta" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime" - coreinformers "k8s.io/client-go/informers/core/v1" - fakekubeclientset "k8s.io/client-go/kubernetes/fake" - ktesting "k8s.io/client-go/testing" - "k8s.io/client-go/tools/cache" - "k8s.io/client-go/tools/record" - fakekubeclient "knative.dev/pkg/client/injection/kube/client/fake" - fakeconfigmapinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake" - fakepodinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/pod/fake" - fakeserviceaccountinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake" - "knative.dev/pkg/controller" -) - -// Data represents the desired state of the system (i.e. existing resources) to seed controllers -// with. -type Data struct { - PipelineRuns []*tektonv1.PipelineRun - Pipelines []*tektonv1.Pipeline - TaskRuns []*tektonv1.TaskRun - Tasks []*tektonv1.Task - CustomRuns []*v1beta1.CustomRun - Pods []*corev1.Pod - Namespaces []*corev1.Namespace - ConfigMaps []*corev1.ConfigMap - ServiceAccounts []*corev1.ServiceAccount -} - -// Clients holds references to clients which are useful for reconciler tests. -type Clients struct { - Pipeline *fakepipelineclientset.Clientset - Resource *fakeresourceclientset.Clientset - Kube *fakekubeclientset.Clientset - CloudEvents cloudeventclient.CEClient -} - -// Informers holds references to informers which are useful for reconciler tests. -type Informers struct { - PipelineRun informers.PipelineRunInformer - Pipeline informers.PipelineInformer - TaskRun informers.TaskRunInformer - CustomRun informersv1beta1.CustomRunInformer - Task informers.TaskInformer - Pod coreinformers.PodInformer - ConfigMap coreinformers.ConfigMapInformer - ServiceAccount coreinformers.ServiceAccountInformer -} - -// Assets holds references to the controller, logs, clients, and informers. -type Assets struct { - Logger *zap.SugaredLogger - Controller *controller.Impl - Clients Clients - Informers Informers - Recorder *record.FakeRecorder - Ctx context.Context -} - -func AddToInformer(t *testing.T, store cache.Store) func(ktesting.Action) (bool, runtime.Object, error) { - return func(action ktesting.Action) (bool, runtime.Object, error) { - switch a := action.(type) { - case ktesting.CreateActionImpl: - if err := store.Add(a.GetObject()); err != nil { - t.Fatal(err) - } - - case ktesting.UpdateActionImpl: - objMeta, err := meta.Accessor(a.GetObject()) - if err != nil { - return true, nil, err - } - - // Look up the old copy of this resource and perform the optimistic concurrency check. - old, exists, err := store.GetByKey(objMeta.GetNamespace() + "/" + objMeta.GetName()) - if err != nil { - return true, nil, err - } else if !exists { - // Let the client return the error. - return false, nil, nil - } - oldMeta, err := meta.Accessor(old) - if err != nil { - return true, nil, err - } - // If the resource version is mismatched, then fail with a conflict. - if oldMeta.GetResourceVersion() != objMeta.GetResourceVersion() { - return true, nil, apierrs.NewConflict( - a.Resource.GroupResource(), objMeta.GetName(), - fmt.Errorf("resourceVersion mismatch, got: %v, wanted: %v", - objMeta.GetResourceVersion(), oldMeta.GetResourceVersion())) - } - - // Update the store with the new object when it's fine. - if err := store.Update(a.GetObject()); err != nil { - t.Fatal(err) - } - } - return false, nil, nil - } -} - -// SeedTestData returns Clients and Informers populated with the -// given Data. -// nolint: golint -func SeedTestData(t *testing.T, ctx context.Context, d Data) (Clients, Informers) { - c := Clients{ - Kube: fakekubeclient.Get(ctx), - Pipeline: fakepipelineclient.Get(ctx), - Resource: fakeresourceclient.Get(ctx), - CloudEvents: cloudeventclient.Get(ctx), - } - // Every time a resource is modified, change the metadata.resourceVersion. - PrependResourceVersionReactor(&c.Pipeline.Fake) - - i := Informers{ - PipelineRun: fakepipelineruninformer.Get(ctx), - Pipeline: fakepipelineinformer.Get(ctx), - TaskRun: faketaskruninformer.Get(ctx), - CustomRun: fakeCustomRunInformer.Get(ctx), - Task: faketaskinformer.Get(ctx), - Pod: fakepodinformer.Get(ctx), - ConfigMap: fakeconfigmapinformer.Get(ctx), - ServiceAccount: fakeserviceaccountinformer.Get(ctx), - } - - // Attach reactors that add resource mutations to the appropriate - // informer index, and simulate optimistic concurrency failures when - // the resource version is mismatched. - c.Pipeline.PrependReactor("*", "pipelineruns", AddToInformer(t, i.PipelineRun.Informer().GetIndexer())) - for _, pr := range d.PipelineRuns { - pr := pr.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Pipeline.TektonV1().PipelineRuns(pr.Namespace).Create(ctx, pr, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Pipeline.PrependReactor("*", "pipelines", AddToInformer(t, i.Pipeline.Informer().GetIndexer())) - for _, p := range d.Pipelines { - p := p.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Pipeline.TektonV1().Pipelines(p.Namespace).Create(ctx, p, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Pipeline.PrependReactor("*", "taskruns", AddToInformer(t, i.TaskRun.Informer().GetIndexer())) - for _, tr := range d.TaskRuns { - tr := tr.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Pipeline.TektonV1().TaskRuns(tr.Namespace).Create(ctx, tr, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Pipeline.PrependReactor("*", "tasks", AddToInformer(t, i.Task.Informer().GetIndexer())) - for _, ta := range d.Tasks { - ta := ta.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Pipeline.TektonV1().Tasks(ta.Namespace).Create(ctx, ta, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Pipeline.PrependReactor("*", "customruns", AddToInformer(t, i.CustomRun.Informer().GetIndexer())) - for _, run := range d.CustomRuns { - run := run.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Pipeline.TektonV1beta1().CustomRuns(run.Namespace).Create(ctx, run, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Kube.PrependReactor("*", "pods", AddToInformer(t, i.Pod.Informer().GetIndexer())) - for _, p := range d.Pods { - p := p.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Kube.CoreV1().Pods(p.Namespace).Create(ctx, p, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - for _, n := range d.Namespaces { - n := n.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Kube.CoreV1().Namespaces().Create(ctx, n, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Kube.PrependReactor("*", "configmaps", AddToInformer(t, i.ConfigMap.Informer().GetIndexer())) - for _, cm := range d.ConfigMaps { - cm := cm.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Kube.CoreV1().ConfigMaps(cm.Namespace).Create(ctx, cm, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Kube.PrependReactor("*", "serviceaccounts", AddToInformer(t, i.ServiceAccount.Informer().GetIndexer())) - for _, sa := range d.ServiceAccounts { - sa := sa.DeepCopy() // Avoid assumptions that the informer's copy is modified. - if _, err := c.Kube.CoreV1().ServiceAccounts(sa.Namespace).Create(ctx, sa, metav1.CreateOptions{}); err != nil { - t.Fatal(err) - } - } - c.Pipeline.ClearActions() - c.Kube.ClearActions() - return c, i -} - -type ResourceVersionReactor struct { - count int64 -} - -func (r *ResourceVersionReactor) Handles(action ktesting.Action) bool { - body := func(o runtime.Object) bool { - objMeta, err := meta.Accessor(o) - if err != nil { - return false - } - val := atomic.AddInt64(&r.count, 1) - objMeta.SetResourceVersion(fmt.Sprintf("%05d", val)) - return false - } - - switch o := action.(type) { - case ktesting.CreateActionImpl: - return body(o.GetObject()) - case ktesting.UpdateActionImpl: - return body(o.GetObject()) - default: - return false - } -} - -// React is noop-function -func (r *ResourceVersionReactor) React(action ktesting.Action) (handled bool, ret runtime.Object, err error) { - return false, nil, nil -} - -var _ ktesting.Reactor = (*ResourceVersionReactor)(nil) - -// PrependResourceVersionReactor will instrument a client-go testing Fake -// with a reactor that simulates resourceVersion changes on mutations. -// This does not work with patches. -func PrependResourceVersionReactor(f *ktesting.Fake) { - f.ReactionChain = append([]ktesting.Reactor{&ResourceVersionReactor{}}, f.ReactionChain...) -}