diff --git a/README.md b/README.md index 81b0d08a133..d0c98a59086 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,7 @@ The CRDs involved are: * [PipelineParams](#pipelineparams) * [TaskRun](#taskrun) * [PipelineRun](#pipelinerun) +* [Resources](#resources) High level details of this design: @@ -41,6 +42,7 @@ High level details of this design: easily is powerful (e.g. see failures easily, dig into logs, e.g. like [the Jenkins test analyzer plugin](https://wiki.jenkins.io/display/JENKINS/Test+Results+Analyzer+Plugin)) * [Tasks](#tasks) can depend on artifacts, output and parameters created by other tasks. +* [Resources](#resources) are the artifacts used as inputs and outputs of TaskRuns. ## Task @@ -57,9 +59,7 @@ with additional input types and clearly defined outputs. `Pipeline` describes a graph of [Tasks](#task) to execute. It defines the DAG and expresses how all inputs (including [PipelineParams](#pipelineparams) and outputs -from previous `Tasks`) feed into each `Task`. It allows for fan in and fan out, and -ordering can be expressed explicitly using `prev` and `next`, or it can be inferred -from a `Task’s` inputs. +from previous `Tasks`) feed into each `Task`. Dependencies between parameters or inputs/outputs are expressed as references to k8s objects. @@ -70,9 +70,7 @@ can be invoked with many different instances of `PipelineParams`, which can allo for scenarios such as running against PRs and against a user’s personal setup. `PipelineParams` can control: -* What **sources** the `Pipeline` runs against * Which **serviceAccount** to use (provided to all tasks) -* What **artifact** stores are used (e.g. Docker registries) * Where **results** are stored (e.g. in GCS) ## TaskRun @@ -129,3 +127,16 @@ completes (or fails). When the `PipelineRun` has completed, the `taskRuns` field will contain references to all `TaskRuns` which were executed and their next and previous `TaskRuns`. + +### Resources + +`Resources` in a pipelines are the set of objects that are going to be used +as inputs and outputs of a `TaskRun`. + +* `Resources` is created directly in a pipeline configuration and bound +to `TaskRun` as an input and/or output source. +* The (optional) `passedConstraint` key on an `input source` defines a set of previous task names. +* When the `passedConstraint` key is specified on an input source, only the version of +the resource that passed through the defined list of tasks is used. +* The `passedConstraint` allows for `Tasks` to fan in and fan out, and ordering can be expressed explicitly +using this key since a task needing a resource from a another task would have to run after. \ No newline at end of file diff --git a/config/crds/pipeline_v1beta1_pipeline.yaml b/config/crds/pipeline_v1beta1_pipeline.yaml index b721a22a986..d1dab89896a 100644 --- a/config/crds/pipeline_v1beta1_pipeline.yaml +++ b/config/crds/pipeline_v1beta1_pipeline.yaml @@ -22,40 +22,60 @@ spec: type: object spec: properties: + resources: + items: + properties: + name: + type: string + resourceRef: + properties: + apiVersion: + type: string + name: + type: string + required: + - name + type: object + required: + - name + - resourceRef + type: object + type: array tasks: items: properties: - artifactStoreBindings: + inputSourceBindings: items: properties: - storeKey: + name: type: string - storeName: + passedConstraints: + items: + type: string + type: array + sourceKey: type: string required: - - storeName - - storeKey + - name + - sourceKey type: object type: array name: type: string - nextTasks: - items: - type: string - type: array - paramBindings: + outputSourceBindings: items: properties: - inputName: - type: string - taskName: + name: type: string - taskOutputName: + passedConstraints: + items: + type: string + type: array + sourceKey: type: string required: - - inputName - - taskName - - taskOutputName + - name + - sourceKey type: object type: array params: @@ -70,22 +90,6 @@ spec: - value type: object type: array - prevTasks: - items: - type: string - type: array - sourceBindings: - items: - properties: - inputName: - type: string - sourceKey: - type: string - required: - - inputName - - sourceKey - type: object - type: array taskRef: properties: apiVersion: @@ -102,6 +106,7 @@ spec: type: array required: - tasks + - resources type: object status: type: object diff --git a/config/crds/pipeline_v1beta1_pipelineparams.yaml b/config/crds/pipeline_v1beta1_pipelineparams.yaml index c60d6f71f80..ff3917e74d5 100644 --- a/config/crds/pipeline_v1beta1_pipelineparams.yaml +++ b/config/crds/pipeline_v1beta1_pipelineparams.yaml @@ -22,21 +22,6 @@ spec: type: object spec: properties: - artifactStores: - items: - properties: - name: - type: string - type: - type: string - url: - type: string - required: - - name - - type - - url - type: object - type: array results: properties: logs: @@ -84,32 +69,8 @@ spec: type: object serviceAccount: type: string - sources: - items: - properties: - branch: - type: string - commit: - type: string - name: - type: string - serviceAccount: - type: string - type: - type: string - url: - type: string - required: - - name - - type - - url - - branch - type: object - type: array required: - serviceAccount - - sources - - artifactStores - results type: object status: diff --git a/config/crds/pipeline_v1beta1_pipelinerun.yaml b/config/crds/pipeline_v1beta1_pipelinerun.yaml index c8beb0556ff..8863ec0dd9c 100644 --- a/config/crds/pipeline_v1beta1_pipelinerun.yaml +++ b/config/crds/pipeline_v1beta1_pipelinerun.yaml @@ -76,6 +76,25 @@ spec: - lastTransitionTime type: object type: array + resourceVersion: + items: + properties: + resourceRef: + properties: + apiVersion: + type: string + name: + type: string + required: + - name + type: object + version: + type: string + required: + - resourceRef + - version + type: object + type: array taskRuns: items: properties: diff --git a/config/crds/pipeline_v1beta1_resource.yaml b/config/crds/pipeline_v1beta1_resource.yaml new file mode 100644 index 00000000000..d59f2116a65 --- /dev/null +++ b/config/crds/pipeline_v1beta1_resource.yaml @@ -0,0 +1,61 @@ +apiVersion: apiextensions.k8s.io/v1beta1 +kind: CustomResourceDefinition +metadata: + creationTimestamp: null + labels: + controller-tools.k8s.io: "1.0" + name: resources.pipeline.knative.dev +spec: + group: pipeline.knative.dev + names: + kind: Resource + plural: resources + scope: Namespaced + validation: + openAPIV3Schema: + properties: + apiVersion: + type: string + kind: + type: string + metadata: + type: object + spec: + properties: + resources: + items: + properties: + name: + type: string + params: + items: + properties: + name: + type: string + value: + type: string + required: + - name + - value + type: object + type: array + type: + type: string + required: + - name + - type + - params + type: object + type: array + required: + - resources + type: object + status: + type: object + type: object + version: v1beta1 +status: + acceptedNames: + kind: "" + plural: "" + conditions: null diff --git a/config/crds/pipeline_v1beta1_task.yaml b/config/crds/pipeline_v1beta1_task.yaml index 62b9f2fad53..c95b7063b03 100644 --- a/config/crds/pipeline_v1beta1_task.yaml +++ b/config/crds/pipeline_v1beta1_task.yaml @@ -59,38 +59,52 @@ spec: properties: name: type: string - type: + value: type: string required: - name - - type + - value type: object type: array - sources: + resources: items: properties: name: type: string + resourceRef: + properties: + apiVersion: + type: string + name: + type: string + required: + - name + type: object required: - name + - resourceRef type: object type: array type: object outputs: properties: - artifacts: + resources: items: properties: name: type: string - storeKey: - type: string - type: - type: string + resourceRef: + properties: + apiVersion: + type: string + name: + type: string + required: + - name + type: object required: - name - - type - - storeKey + - resourceRef type: object type: array results: diff --git a/config/crds/pipeline_v1beta1_taskrun.yaml b/config/crds/pipeline_v1beta1_taskrun.yaml index 4cbedfb670b..3fb403ff5d4 100644 --- a/config/crds/pipeline_v1beta1_taskrun.yaml +++ b/config/crds/pipeline_v1beta1_taskrun.yaml @@ -29,53 +29,54 @@ spec: properties: name: type: string - type: + value: type: string required: - name - - type + - value type: object type: array - sources: + resourcesVersion: items: properties: - branch: - type: string - commit: - type: string - name: - type: string - serviceAccount: - type: string - type: - type: string - url: + resourceRef: + properties: + apiVersion: + type: string + name: + type: string + required: + - name + type: object + version: type: string required: - - name - - type - - url - - branch + - resourceRef + - version type: object type: array required: - - sources + - resourcesVersion type: object outputs: properties: - artifacts: + resources: items: properties: name: type: string - storeKey: - type: string - type: - type: string + resourceRef: + properties: + apiVersion: + type: string + name: + type: string + required: + - name + type: object required: - name - - type - - storeKey + - resourceRef type: object type: array results: @@ -150,14 +151,6 @@ spec: type: object trigger: properties: - nextTasks: - items: - type: string - type: array - prevTasks: - items: - type: string - type: array triggerRef: properties: name: diff --git a/config/samples/pipeline_v1beta1_pipeline.yaml b/config/samples/pipeline_v1beta1_pipeline.yaml index 8e0666d6b0c..7936d8f83e4 100644 --- a/config/samples/pipeline_v1beta1_pipeline.yaml +++ b/config/samples/pipeline_v1beta1_pipeline.yaml @@ -5,48 +5,56 @@ metadata: namespace: default spec: tasks: - - name: 'test' - taskRef: - name: test-wizzbang-task - sourceBindings: - - inputName: 'repoUnderTest' - sourceKey: 'wizzbang' - nextTasks: ['buildPush'] - - name: 'buildPush' - taskRef: - name: build-push-task - sourceBindings: - - inputName: 'workspace' - sourceKey: 'wizzbang' - artifactStoreBindings: - - storeName: 'registry' - storeKey: 'stagingRegistry' - params: - - name: 'pathToDockerfile' - value: 'build/Dockerfile' - - name: 'integrationTest' - taskRef: - name: integration-test-wizzbang-task - sourceBindings: - - inputName: 'repoUnderTest' - sourceKey: 'wizzbang' - paramBindings: - - inputName: testImage - taskName: buildPush - taskOutputName: builtImage - - name: 'deploy' - taskRef: - name: deploy-with-helm - params: - - name: pathToHelmCharts - value: deploy/helm - sourceBindings: - - inputName: 'repoToDeploy' - sourceKey: 'wizzbang' - clusterBindings: - - clusterName: prod - paramBindings: - - inputName: image - taskName: buildPush - taskOutputName: builtImage - prevTasks: ['integrationTest'] + - name: 'test' + taskRef: + name: test-wizzbang-task + inputsourceBindings: + - name: repoUnderTest + sourceKey: wizzbang + - name: 'buildPush' + taskRef: + name: build-push-task + inputSourceBindings: + - name: 'workspace' + sourceKey: 'wizzbang' + passedConstraint: [test-wizzbang-task] + outputSourceBindings: + - name: registry + sourceKey: wizzbangStagingImage + params: + - name: pathToDockerfile + value: build/Dockerfile + - name: integrationTest + taskRef: + name: integration-test-wizzbang-task + inputSourceBindings: + - name: repoUnderTest + sourceKey: wizzbang + passedConstraint: [test] + outputSourceBindings: + - name: registry + sourceKey: wizzbangStagingImage + passedConstraint: [build-push-task] + - name: deploy + taskRef: + name: deploy-with-helm + params: + - name: pathToHelmCharts + value: deploy/helm + inputSourceBindings: + - name: repoToDeploy + sourceKey: wizzbang + passedConstraint: [integration-test-wizzbang-task] + clusterBindings: + - clusterName: prod + outputSourceBindings: + - name: registry + sourceKey: wizzbangStagingImage + passedConstraint: [build-push-task] + resources: + - name: wizzbang + resourceRef: + name: wizzbang + - name: wizzbangStagingImage + resourceRef: + name: wizzbangStagingImage \ No newline at end of file diff --git a/config/samples/pipeline_v1beta1_pipelineparams.yaml b/config/samples/pipeline_v1beta1_pipelineparams.yaml index 0b3cd85913e..790aa3ab4b5 100644 --- a/config/samples/pipeline_v1beta1_pipelineparams.yaml +++ b/config/samples/pipeline_v1beta1_pipelineparams.yaml @@ -5,19 +5,6 @@ metadata: namespace: default spec: serviceAccount: 'wizzbangPipelineServiceAccount' - sources: - - name: 'wizzbang' - type: 'github' - url: 'github.com/wizzbangcorp/wizzbang' - branch: 'master' - commit: 'HEAD' - artifactStores: - - name: 'prodRegistry' - type: 'imageRegistry' - url: 'gcr.io/wizzbang' - - name: 'stagingRegistry' - type: 'imageRegistry' - url: 'gcr.io/wizzbang-staging' results: runs: name: 'runsBucket' diff --git a/config/samples/pipeline_v1beta1_pipelinerun.yaml b/config/samples/pipeline_v1beta1_pipelinerun.yaml index 27f05697d06..fdf032624f9 100644 --- a/config/samples/pipeline_v1beta1_pipelinerun.yaml +++ b/config/samples/pipeline_v1beta1_pipelinerun.yaml @@ -14,16 +14,8 @@ status: taskRuns: - taskRef: name: test-wizzbang-task-run-sd8f8dfasdfasdfas - nextTasks: - - taskRef: - name: build-push-task-run-sd8f8dfasdfasdfas - prevTasks: [] - taskRef: name: build-push-task-run-sd8f8dfasdfasdfas - nextTasks: [] - prevTasks: - - taskRef: - name: test-wizzbang-task-run-sd8f8dfasdfasdfas conditions: - type: Started status: "True" diff --git a/config/samples/pipeline_v1beta1_resources.yaml b/config/samples/pipeline_v1beta1_resources.yaml new file mode 100644 index 00000000000..ab7a1e0f9dd --- /dev/null +++ b/config/samples/pipeline_v1beta1_resources.yaml @@ -0,0 +1,19 @@ +apiVersion: pipeline.knative.dev/v1beta1 +kind: Resource +metadata: + name: wizzbang-resources + namespace: default +spec: + resources: + - name: wizzbang + type: git + params: + - name: url + value: github.com/wizzbangcorp/wizzbang + - name: revision + value: HEAD + - name: wizzbangStagingImage + type: image + params: + - name: url + value: gcr.io/wizzbang-staging \ No newline at end of file diff --git a/config/samples/pipeline_v1beta1_task.yaml b/config/samples/pipeline_v1beta1_task.yaml index fc55b23ce51..a6e246568ca 100644 --- a/config/samples/pipeline_v1beta1_task.yaml +++ b/config/samples/pipeline_v1beta1_task.yaml @@ -5,16 +5,18 @@ metadata: namespace: default spec: inputs: - sources: - - name: workspace + resources: + - resourceRef: + name: resource-name + name: workspace params: - - name: PATH_TO_DOCKERFILE - type: string + - name: PATH_TO_DOCKERFILE + value: string outputs: - artifacts: - - name: builtImage - type: image - storeKey: registry # registy is the name of the ArtifactStore + resources: + - resourceRef: + name: registry + name: builtImage # registy is the name of the ArtifactStore buildSpec: template: name: kaniko diff --git a/config/samples/pipeline_v1beta1_taskrun.yaml b/config/samples/pipeline_v1beta1_taskrun.yaml index 60cdc8f483b..273290621c7 100644 --- a/config/samples/pipeline_v1beta1_taskrun.yaml +++ b/config/samples/pipeline_v1beta1_taskrun.yaml @@ -10,24 +10,20 @@ spec: triggerRef: type: PipelineRun name: wizzbangPipeline-sd8f8dfasdfasdfas - prevTasks: [""] - nextTasks: ["build-push-sd8f8dfasdfasdfas"] inputs: - sources: - - name: 'wizzbang' - type: 'github' - url: 'github.com/wizzbangcorp/wizzbang' - branch: 'master' - commit: 'HEAD' + resourcesVersion: + - resourceRef: + name: wizzbang + version: HEAD params: - - name: 'image' - type: 'string' - value: 'gcr.io/wizzbang-staging/wizzbang@234324k32j432k32l4k2a' + - name: 'image' + type: 'string' + value: 'gcr.io/wizzbang-staging/wizzbang@234324k32j432k32l4k2a' outputs: artifacts: - - name: builtImage - type: image - storeKey: registry # registy is the name of the ArtifactStore + - name: builtImage + type: image + storeKey: registry # registy is the name of the ArtifactStore results: runs: name: 'runsBucket' diff --git a/crds.png b/crds.png index f37eaffb72f..46ca008216a 100644 Binary files a/crds.png and b/crds.png differ diff --git a/examples/build_task.yaml b/examples/build_task.yaml index 8a88a3da4b6..8bf9549d357 100644 --- a/examples/build_task.yaml +++ b/examples/build_task.yaml @@ -5,16 +5,18 @@ metadata: namespace: default spec: inputs: - sources: - - name: workspace + resources: + - resourceRef: + name: resource-name + name: workspace params: - name: pathToDockerFile - type: string + value: string outputs: - artifacts: - - name: builtImage - type: image - storeKey: registry + resources: + - resourceRef: + name: registry + name: builtImage buildSpec: template: name: kaniko diff --git a/examples/deploy_tasks.yaml b/examples/deploy_tasks.yaml index c0261b20a08..0b11ba519ab 100644 --- a/examples/deploy_tasks.yaml +++ b/examples/deploy_tasks.yaml @@ -5,15 +5,17 @@ metadata: namespace: default spec: inputs: - sources: - - name: workspace + resources: + - resourceRef: + name: resource-name + name: workspace params: - name: pathToHelmCharts - type: string + value: string - name: helmArgs - type: string + value: string - name: image - type: string + value: string cluster: - name: clusterName buildSpec: @@ -30,14 +32,15 @@ metadata: namespace: default spec: inputs: - sources: - - name: workspace - type: string + resources: + - resourceRef: + name: resource-name + name: workspace params: - name: kubectlArgs - type: string + value: string - name: pathToFiles - type: string + value: string cluster: - name: targetCluster buildSpec: diff --git a/examples/pipelineparams.yaml b/examples/pipelineparams.yaml index a9cc5b7ce0c..6493b890cd4 100644 --- a/examples/pipelineparams.yaml +++ b/examples/pipelineparams.yaml @@ -5,31 +5,6 @@ metadata: namespace: default spec: serviceAccount: 'demoServiceAccount' - sources: - - name: 'guestbook' - type: 'github' - url: 'github.com/kubernetes/examples' - serviceAccount: 'githubServiceAccount' - branch: 'master' - commit: 'HEAD' - - name: 'redis-docker' - type: 'github' - url: 'github.com/GoogleCloudPlatform/redis-docker/blob/master/4/debian9/4.0/Dockerfile' - serviceAccount: 'githubServiceAccount' - branch: 'master' - commit: 'HEAD' - - name: 'kritis' - type: 'github' - url: 'github.com/grafeas/kritis' - branch: 'master' - commit: 'HEAD' - artifactStores: - - name: 'prodRegistry' - type: 'imageRegistry' - url: 'gcr.io/demo' - - name: 'stagingRegistry' - type: 'imageRegistry' - url: 'gcr.io/demo-staging' clusters: - name: 'testCluster' type: 'gke' diff --git a/examples/pipelines/guestbook-resources.yaml b/examples/pipelines/guestbook-resources.yaml new file mode 100644 index 00000000000..93e61f1a79c --- /dev/null +++ b/examples/pipelines/guestbook-resources.yaml @@ -0,0 +1,30 @@ +apiVersion: pipeline.knative.dev/v1beta1 +kind: Resource +metadata: + name: guestbook-resources-sample + namespace: default +spec: + resources: + - name: guestbook + type: git + params: + - name: url + value: github.com/kubernetes/examples + - name: revision + value: HEAD + - name: serviceAccount + value: githubServiceAccount + - name: redis-docker + type: git + params: + - name: url + value: github.com/GoogleCloudPlatform/redis-docker/blob/master/4/debian9/4.0/Dockerfile + - name: serviceAccount + value: githubServiceAccount + - name: revision + value: HEAD + - name: stagingRegistry + type: image + params: + - name: url + value: gcr.io/demo-staging \ No newline at end of file diff --git a/examples/pipelines/guestbook.yaml b/examples/pipelines/guestbook.yaml index 2a3b5087375..087b311b2e2 100644 --- a/examples/pipelines/guestbook.yaml +++ b/examples/pipelines/guestbook.yaml @@ -8,76 +8,88 @@ spec: - name: build-guestbook # 1.a Build guestbook go sample code. taskRef: name: build-push - sourceBindings: - - inputName: workspace + inputSourceBindings: + - name: workspace sourceKey: guestbook - artifactStoreBindings: - - storeName: registry - storeKey: stagingRegistry - builtImage: gb-frontend # TODO Add Commit SHA + outputSourceBindings: + - name: registry + sourceKey: stagingRegistry params: - name: pathToDockerfile value: guestbook-go/Dockerfile - name: build-redis # 1.b Build and push redis docker image. taskRef: name: build-push - sourceBindings: - - inputName: workspace + inputSourceBindings: + - name: workspace sourceKey: redis-docker # TODO Add Commit SHA - artifactStoreBindings: - - storeName: registry - storeKey: stagingRegistry - builtImage: redis4 + outputSourceBindings: + - name: registry + sourceKey: stagingRegistry params: - name: pathToDockerfile value: 4/debian9/4.0/Dockerfile - name: deploy-bundle-test # 2. Deploy GuestBook and Redis to test cluster taskRef: name: deploy-with-kubectl - sourceBindings: - - inputName: workspace - sourceKey: guestbook + inputSourceBindings: + - name: workspace + sourceKey: guestbook + passedConstraint: + - build-guestbook + - build-redis + - name: workspace + sourceKey: redis-docker + passedConstraint: + - build-push params: - name: pathToFiles value: guestbook/all-in-one/guestbook-all-in-one.yaml clusterBindings: - clusterName: test - prevTasks: - - build-redis4 - - build-guestbook - name: int-test-osx # 3.a Run Integration tests for osx taskRef: name: integrationTestInDocker - sourceBindings: - - inputName: workspace + inputSourceBindings: + - name: workspace sourceKey: guestbook + passedConstraint: + - deploy-bundle-test params: - name: dockerBuildFile value: guestbook-int/Dockerfile - prevTasks: - - deploy-test - name: int-test-linux # 3.b Run Integration tests for linux taskRef: name: integration-test-in-docker - sourceBindings: - - inputName: workspace + inputSourceBindings: + - name: workspace sourceKey: guestbook + passedConstraint: + - deploy-bundle-test params: - name: dockerBuildFile value: guestbook-int/Dockerfile - prevTasks: - - deploy-test - name: deploy-bundle-staging # 4. Deploy GuestBook and Redis to staging cluster taskRef: name: deploy-with-kubectl - sourceBindings: - - inputName: workspace + inputSourceBindings: + - name: workspace sourceKey: guestbook + passedConstraint: + - int-test-osx + - int-test-linux params: - name: pathToFiles value: guestbook/all-in-one/guestbook-all-in-one.yaml clusterBindings: - clusterName: staging - prevTasks: - - int-test-osx - - int-test-linux + resources: + - name: guestbook + resourceRef: + name: guestbook + - name: redis-docker + resourceRef: + name: redis-docker + - name: stagingRegistry + resourceRef: + name: stagingRegistry \ No newline at end of file diff --git a/examples/pipelines/kritis-pipeline.png b/examples/pipelines/kritis-pipeline.png index b85359496c1..f1bf790fe5a 100644 Binary files a/examples/pipelines/kritis-pipeline.png and b/examples/pipelines/kritis-pipeline.png differ diff --git a/examples/pipelines/kritis-resources.yaml b/examples/pipelines/kritis-resources.yaml new file mode 100644 index 00000000000..ecd46baddb0 --- /dev/null +++ b/examples/pipelines/kritis-resources.yaml @@ -0,0 +1,26 @@ +apiVersion: pipeline.knative.dev/v1beta1 +kind: Resource +metadata: + name: kritis-resources-sample + namespace: default +spec: + resources: + - name: kritis-app-github-resource + type: git + params: + - name: url + value: https://github.com/grafeas/kritis + - name: revision + value: master + - name: kritis-test-github-resource + type: git + params: + - name: revision + value: master + - name: url + value: https://github.com/grafeas/kritis-test + - name: stagingRegistry-resource + type: image + params: + - name: url + value: gcr.io/staging-image \ No newline at end of file diff --git a/examples/pipelines/kritis.yaml b/examples/pipelines/kritis.yaml index dbed6c603c5..84804076619 100644 --- a/examples/pipelines/kritis.yaml +++ b/examples/pipelines/kritis.yaml @@ -5,51 +5,58 @@ metadata: namespace: default spec: tasks: - - name: unit-test-kritis # 1. Run unit Tests - taskRef: - name: make - sourceBindings: - - inputName: workspace - sourceKey: kritis - params: - - name: makeTarget - value: test - - name: push-kritis # 2. Build And Push Tests - taskRef: - name: build-push - sourceBindings: - - inputName: workspace - sourceKey: kritis - artifactStoreBindings: - - storeName: registry - storeKey: stagingRegistry - builtImage: kritis # TODO Add Commit SHA - params: - - name: pathToDockerfile - value: deploy/Dockerfile - prevTasks: ['unit-test-kritis'] - - name: deploy-test-env # 3. Finally Deploy to Test environment - taskRef: - name: deploy-with-helm - sourceBindings: - - inputName: workspace - sourceKey: kritis - paramBindings: # Implicit dependency on buildPush task. - - inputName: testImage - taskName: buildPush - taskOutputName: builtImage - params: - - name: pathToHelmCharts - value: kritis-charts - clusterBindings: - - clusterName: test - nextTasks: ['integration-test'] - - name: integration-test # 4. Run Integration Tests in test cluster - taskRef: - name: integration-test-in-docker - sourceBindings: - - inputName: workspace - sourceKey: kritis - params: - - name: testArgs - value: "-e REMOTE_INTEGRATION=true" + - name: unit-test-kritis # 1. Run unit Tests + taskRef: + name: make + inputSourceBindings: + - name: workspace + sourceKey: kritis-app-github + params: + - name: makeTarget + value: test + - name: push-kritis # 2. Build And Push Tests + taskRef: + name: build-push + inputSourceBindings: + - name: workspace + sourceKey: kritis-app-github + passedConstraint: + - make + outputSourceBindings: + - name: registry + sourceKey: stagingRegistry + params: + - name: pathToDockerfile + value: deploy/Dockerfile + - name: deploy-test-env # 3. Finally Deploy to Test environment + taskRef: + name: deploy-with-helm + inputSourceBindings: + - name: registry + sourceKey: stagingRegistry + passedConstraint: [build-push] + params: + - name: pathToHelmCharts + value: kritis-charts + clusterBindings: + - clusterName: test + - name: integration-test # 4. Run Integration Tests in test cluster + taskRef: + name: integration-test-in-docker + inputSourceBindings: + - name: workspace + sourceKey: kritis-test-github + passedConstraint: [deploy-with-helm] + params: + - name: testArgs + value: "-e REMOTE_INTEGRATION=true" + resources: + - name: kritis-app-github + resourceRef: + name: kritis-app-github-resource + - name: kritis-test-github + resourceRef: + name: kritis-test-github-resource + - name: stagingRegistry + resourceRef: + name: stagingRegistry-resource diff --git a/examples/test_tasks.yaml b/examples/test_tasks.yaml index 93d834c9f63..62082a3372d 100644 --- a/examples/test_tasks.yaml +++ b/examples/test_tasks.yaml @@ -5,11 +5,13 @@ metadata: namespace: default spec: inputs: - sources: - - name: workspace + resources: + - resourceRef: + name: resource-name + name: workspace params: - - name: makeTarget - type: string + - name: makeTarget + value: string outputs: results: - name: testResults @@ -29,13 +31,15 @@ metadata: namespace: default spec: inputs: - sources: - - name: workspace + resources: + - resourceRef: + name: resource-name + name: workspace params: - name: testImage - type: string + value: string - name: testArgs - type: string + value: string outputs: results: - name: testResults diff --git a/pkg/apis/pipeline/v1beta1/git_source.go b/pkg/apis/pipeline/v1beta1/git_source.go new file mode 100644 index 00000000000..1735c31c491 --- /dev/null +++ b/pkg/apis/pipeline/v1beta1/git_source.go @@ -0,0 +1,44 @@ +/* +Copyright 2018 The Knative Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1beta1 + +// GitResource is an endpoint from which to get data which is required +// by a Build/Task for context (e.g. a repo from which to build an image). +type GitResource struct { + Name string `json:"name"` + Type string `json:"type"` + URL string `json:"url"` + // Git revision (branch, tag, commit SHA or ref) to clone. See + // https://git-scm.com/docs/gitrevisions#_specifying_revisions for more + // information. + Revision string `json:"revision"` + ServiceAccount string `json:"serviceAccount,omitempty"` +} + +func (s GitResource) getName() string { + return s.Name +} + +func (s GitResource) getType() ResourceType { + return ResourceTypeGit +} + +func (s GitResource) getVersion() string { + return s.Revision +} + +func (s GitResource) getParams() []Param { return []Param{} } diff --git a/pkg/apis/pipeline/v1beta1/image_source.go b/pkg/apis/pipeline/v1beta1/image_source.go new file mode 100644 index 00000000000..1880f146b7b --- /dev/null +++ b/pkg/apis/pipeline/v1beta1/image_source.go @@ -0,0 +1,40 @@ +/* +Copyright 2018 The Knative Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1beta1 + +// ImageResource defines an endpoint where artifacts can be stored, such as images. +type ImageResource struct { + Name string `json:"name"` + // TODO: maybe an enum, with values like 'registry', GCS bucket + Type string `json:"type"` + URL string `json:"url"` + Digest string `json:"digest"` +} + +func (s ImageResource) getName() string { + return s.Name +} + +func (s ImageResource) getType() ResourceType { + return ResourceTypeImage +} + +func (s ImageResource) getVersion() string { + return s.Digest +} + +func (s ImageResource) getParams() []Param { return []Param{} } diff --git a/pkg/apis/pipeline/v1beta1/pipeline_types.go b/pkg/apis/pipeline/v1beta1/pipeline_types.go index 5ac1dda10f9..a92e58bd67c 100644 --- a/pkg/apis/pipeline/v1beta1/pipeline_types.go +++ b/pkg/apis/pipeline/v1beta1/pipeline_types.go @@ -22,7 +22,8 @@ import ( // PipelineSpec defines the desired state of PipeLine. type PipelineSpec struct { - Tasks []PipelineTask `json:"tasks"` + Tasks []PipelineTask `json:"tasks"` + Sources []PipelineResource `json:"resources"` } // PipelineStatus defines the observed state of Pipeline @@ -50,22 +51,11 @@ type Pipeline struct { // PipelineTask defines a task in a Pipeline, passing inputs from both // PipelineParams and from the output of previous tasks. type PipelineTask struct { - Name string `json:"name"` - TaskRef TaskRef `json:"taskRef"` - SourceBindings []SourceBinding `json:"sourceBindings,omitempty"` - ArtifactStoreBindings []ArtifactStoreBinding `json:"artifactStoreBindings,omitempty"` - Params []PipelineTaskParam `json:"params,omitempty"` - ParamBindings []PipelineTaskParamBinding `json:"paramBindings,omitempty"` - - NextTasks []string `json:"nextTasks,omitempty"` - PrevTasks []string `json:"prevTasks,omitempty"` -} - -// PipelineTaskParamBinding is used to bind the outputs of a Task to the inputs of another Task. -type PipelineTaskParamBinding struct { - InputName string `json:"inputName"` - TaskName string `json:"taskName"` - TaskOutputName string `json:"taskOutputName"` + Name string `json:"name"` + TaskRef TaskRef `json:"taskRef"` + InputSourceBindings []SourceBinding `json:"inputSourceBindings,omitempty"` + OutputSourceBindings []SourceBinding `json:"outputSourceBindings,omitempty"` + Params []Param `json:"params,omitempty"` } // PipelineTaskParam is used to provide arbitrary string parameters to a Task. @@ -77,19 +67,13 @@ type PipelineTaskParam struct { // SourceBinding is used to bind a Source from a PipelineParams to a source required // as an input for a task. type SourceBinding struct { - // InputName is the string the Task will use to identify this source in its inputs. - InputName string `json:"inputName"` + // InputName is the string the Task will use to identify this resource in its inputs. + Name string `json:"name"` // SourceKey is the string that the PipelineParams will use to identify this source. SourceKey string `json:"sourceKey"` -} - -// ArtifactStoreBinding is used to bind an ArtifactStore from a PipelineParams to -// artifacts that will be produced as output by a task. -type ArtifactStoreBinding struct { - // InputName is the string the Task will use to identify this source in its outputs. - StoreName string `json:"storeName"` - // StoreKey is the string that the PipelineParams will use to identify this artifact store. - StoreKey string `json:"storeKey"` + // TODO: validate the passedConstraints values match previous Task names + // PassedConstraints is the list of Task names that the resource has to pass through. + PassedConstraints []string `json:"passedConstraints,omitempty"` } // TaskRef can be used to refer to a specific instance of a task. @@ -101,6 +85,20 @@ type TaskRef struct { APIVersion string `json:"apiVersion,omitempty"` } +// ResourceRef can be used to refer to a specific instance of a Resource +type ResourceRef struct { + // Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names + Name string `json:"name"` + // API version of the referent + APIVersion string `json:"apiVersion,omitempty"` +} + +// PipelineResource defines set of resources required by all Tasks in the pipeline. +type PipelineResource struct { + Name string `json:"name"` + ResourceRef ResourceRef `json:"resourceRef"` +} + // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // PipelineList contains a list of Pipeline diff --git a/pkg/apis/pipeline/v1beta1/pipelineparams_types.go b/pkg/apis/pipeline/v1beta1/pipelineparams_types.go index 0cb66b6079d..7fb7b6008cf 100644 --- a/pkg/apis/pipeline/v1beta1/pipelineparams_types.go +++ b/pkg/apis/pipeline/v1beta1/pipelineparams_types.go @@ -22,34 +22,8 @@ import ( // PipelineParamsSpec is the spec for a Pipeline resource type PipelineParamsSpec struct { - ServiceAccount string `json:"serviceAccount"` - Sources []Source `json:"sources"` - ArtifactStores []ArtifactStore `json:"artifactStores"` - Results Results `json:"results"` -} - -// SourceType represents the type of endpoint the Source is, so that the -// controller will know this Source should be fetched and optionally what -// additional metatdata should be provided for it. -type SourceType string - -const ( - // SourceTypeGitHub indicates that this source is a GitHub repo. - SourceTypeGitHub SourceType = "github" - - // SourceTypeGCS indicates that this source is a GCS bucket. - SourceTypeGCS SourceType = "gcs" -) - -// Source is an endpoint from which to get data which is required -// by a Build/Task for context (e.g. a repo from which to build an image). -type Source struct { - Name string `json:"name"` - Type SourceType `json:"type"` - URL string `json:"url"` - Branch string `json:"branch"` - Commit string `json:"commit,omitempty"` - ServiceAccount string `json:"serviceAccount,omitempty"` + ServiceAccount string `json:"serviceAccount"` + Results Results `json:"results"` } // PipelineParamsStatus defines the observed state of PipelineParams @@ -71,14 +45,6 @@ type PipelineParams struct { Status PipelineParamsStatus `json:"status,omitempty"` } -// ArtifactStore defines an endpoint where artifacts can be stored, such as images. -type ArtifactStore struct { - Name string `json:"name"` - // TODO: maybe an enum, with values like 'registry', GCS bucket - Type string `json:"type"` - URL string `json:"url"` -} - // Results tells a pipeline where to persist the results of runnign the pipeline. type Results struct { // Runs is used to store the yaml/json of TaskRuns and PipelineRuns. diff --git a/pkg/apis/pipeline/v1beta1/pipelinerun_types.go b/pkg/apis/pipeline/v1beta1/pipelinerun_types.go index fe8cbe171b4..5185b6464b5 100644 --- a/pkg/apis/pipeline/v1beta1/pipelinerun_types.go +++ b/pkg/apis/pipeline/v1beta1/pipelinerun_types.go @@ -63,8 +63,9 @@ type PipelineTriggerRef struct { // PipelineRunStatus defines the observed state of PipelineRun type PipelineRunStatus struct { - TaskRuns []PipelineTaskRun `json:"taskRuns,omitempty"` - Conditions []PipelineRunCondition `json:"conditions"` + TaskRuns []PipelineTaskRun `json:"taskRuns,omitempty"` + ResourceVersion []ResourceVersion `json:"resourceVersion,omitempty"` + Conditions []PipelineRunCondition `json:"conditions"` } // +genclient @@ -96,23 +97,6 @@ type PipelineTaskRun struct { Name string `json:"name"` } -// PipelineTaskRunRef refers to a TaskRun and also indicates which TaskRuns -// executed before and after it. -type PipelineTaskRunRef struct { - TaskRunRef - NextTasks []TaskRunRef `json:"nextTasks"` - PrevTasks []TaskRunRef `json:"prevTasks"` -} - -// TaskRunRef can be used to refer to a specific instance of a TaskRun. -// Copied from CrossVersionObjectReference: https://github.com/kubernetes/kubernetes/blob/169df7434155cbbc22f1532cba8e0a9588e29ad8/pkg/apis/autoscaling/types.go#L64 -type TaskRunRef struct { - // Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names - Name string `json:"name"` - // API version of the referent - APIVersion string `json:"apiVersion,omitempty"` -} - // PipelineRunConditionType indicates the status of the execution of the PipelineRun. type PipelineRunConditionType string diff --git a/pkg/apis/pipeline/v1beta1/resource_types.go b/pkg/apis/pipeline/v1beta1/resource_types.go new file mode 100644 index 00000000000..8cd16e3857c --- /dev/null +++ b/pkg/apis/pipeline/v1beta1/resource_types.go @@ -0,0 +1,95 @@ +/* +Copyright 2018 The Knative Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1beta1 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// ResourceType represents the type of endpoint the resource is, so that the +// controller will know this resource should be fetched and optionally what +// additional metatdata should be provided for it. +type ResourceType string + +const ( + // ResourceTypeGit indicates that this source is a GitHub repo. + ResourceTypeGit ResourceType = "git" + + // ResourceTypeGCS indicates that this source is a GCS bucket. + ResourceTypeGCS ResourceType = "gcs" + + // ResourceTypeImage indicates that this source is a docker Image. + ResourceTypeImage ResourceType = "image" +) + +// ResourceSuper interface to be implemented by different resource types +type ResourceSuper interface { + getName() string + getType() ResourceType + getParams() []Param + getVersion() string +} + +// ResourceStatus should implment status for resource +type ResourceStatus struct { + // INSERT ADDITIONAL STATUS FIELD - define observed state of cluster + // Important: Run "make" to regenerate code after modifying this file +} + +// ResourceSpec defines set of resources required by all Tasks in the pipeline. +type ResourceSpec struct { + Resources []ResourceItem `json:"resources"` +} + +// ResourceItem is an individual resource object definition +type ResourceItem struct { + Name string `json:"name"` + Type ResourceType `json:"type"` + Params []Param `json:"params"` +} + +// +genclient +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object + +// Resource is the Schema for the resources API +// +k8s:openapi-gen=true +type Resource struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + Spec ResourceSpec `json:"spec,omitempty"` + Status ResourceStatus `json:"status,omitempty"` +} + +// ResourceVersion defines the desired state of version of the resource +type ResourceVersion struct { + ResourceRef ResourceRef `json:"resourceRef"` + Version string `json:"version"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object + +// ResourceList contains a list of Resources +type ResourceList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []Resource `json:"items"` +} + +func init() { + SchemeBuilder.Register(&Resource{}, &ResourceList{}) +} diff --git a/pkg/apis/pipeline/v1beta1/task_types.go b/pkg/apis/pipeline/v1beta1/task_types.go index 37bebfec517..ebd1024c3df 100644 --- a/pkg/apis/pipeline/v1beta1/task_types.go +++ b/pkg/apis/pipeline/v1beta1/task_types.go @@ -50,39 +50,32 @@ type Task struct { // Inputs are the requirements that a task needs to run a Build. type Inputs struct { - Sources []SourceInput `json:"sources,omitempty"` - Params []Param `json:"params,omitempty"` + Sources []Source `json:"resources,omitempty"` + Params []Param `json:"params,omitempty"` } -// SourceInput is data which is required by a Build/Task for context +// Source is data which is required by a Build/Task for context // (e.g. a repo from which to build an image). The name of the input will be // used as the name of the volume containing this context which will be mounted -// into the container executed by the Build/Task, e.g. a SourceInput with the +// into the container executed by the Build/Task, e.g. a Source with the // name "workspace" would be mounted into "/workspace". -type SourceInput struct { - Name string `json:"name"` +type Source struct { + Name string `json:"name"` + ResouceRef ResourceRef `json:"resourceRef"` } -// ParamType represents the type of the parameter. -type ParamType string - -const ( - // ParamTypeString indicates this parameter is just a string. - ParamTypeString ParamType = "string" -) - // Param defines arbitrary parameters needed by a task beyond typed inputs -// such as Sources. +// such as resources. type Param struct { - Name string `json:"name"` - Type ParamType `json:"type"` + Name string `json:"name"` + Value string `json:"value"` } // Outputs allow a task to declare what data the Build/Task will be producing, // i.e. results such as logs and artifacts such as images. type Outputs struct { - Results []TestResult `json:"results,omitempty"` - Artifacts []Artifact `json:"artifacts,omitempty"` + Results []TestResult `json:"results,omitempty"` + Sources []Source `json:"resources,omitempty"` } // TestResult allows a task to specify the location where test logs @@ -94,23 +87,6 @@ type TestResult struct { Path string `json:"path"` } -// ArtifactType indicates what type of artifact store this is, so the controller -// will know how to publish artifacts from it. -type ArtifactType string - -const ( - // ArtifactTypeImage indicates that this artifact is a container image. - ArtifactTypeImage ArtifactType = "image" -) - -// Artifact allows a Task to describe what artifacts it will be producing -// and specify where they will be stored. -type Artifact struct { - Name string `json:"name"` - Type ArtifactType `json:"type"` - StoreKey string `json:"storeKey"` -} - // BuildSpec describes how to create a Build for this Task. // A BuildSpec will contain either a Template or a series of Steps. type BuildSpec struct { diff --git a/pkg/apis/pipeline/v1beta1/taskrun_types.go b/pkg/apis/pipeline/v1beta1/taskrun_types.go index 2267e9713ed..924e9f0961d 100644 --- a/pkg/apis/pipeline/v1beta1/taskrun_types.go +++ b/pkg/apis/pipeline/v1beta1/taskrun_types.go @@ -32,15 +32,13 @@ type TaskRunSpec struct { // TaskRunInputs holds the input values that this task was invoked with. type TaskRunInputs struct { - Sources []Source `json:"sources"` - Params []Param `json:"params,omitempty"` + Resources []ResourceVersion `json:"resourcesVersion"` + Params []Param `json:"params,omitempty"` } // TaskTrigger defines a webhook style trigger to start a TaskRun type TaskTrigger struct { TriggerRef TaskTriggerRef `json:"triggerRef"` - PrevTasks []string `json:"prevTasks,omitempty"` - NextTasks []string `json:"nextTasks,omitempty"` } // TaskTriggerType indicates the mechanism by which this TaskRun was created. diff --git a/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go b/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go index b19d4ad340c..c9c85441818 100644 --- a/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go +++ b/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go @@ -25,73 +25,57 @@ import ( ) // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Artifact) DeepCopyInto(out *Artifact) { +func (in *BuildSpec) DeepCopyInto(out *BuildSpec) { *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Artifact. -func (in *Artifact) DeepCopy() *Artifact { - if in == nil { - return nil + if in.Steps != nil { + in, out := &in.Steps, &out.Steps + *out = make([]v1.Container, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } } - out := new(Artifact) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ArtifactStore) DeepCopyInto(out *ArtifactStore) { - *out = *in + in.Template.DeepCopyInto(&out.Template) return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactStore. -func (in *ArtifactStore) DeepCopy() *ArtifactStore { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BuildSpec. +func (in *BuildSpec) DeepCopy() *BuildSpec { if in == nil { return nil } - out := new(ArtifactStore) + out := new(BuildSpec) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ArtifactStoreBinding) DeepCopyInto(out *ArtifactStoreBinding) { +func (in *GitResource) DeepCopyInto(out *GitResource) { *out = *in return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArtifactStoreBinding. -func (in *ArtifactStoreBinding) DeepCopy() *ArtifactStoreBinding { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GitResource. +func (in *GitResource) DeepCopy() *GitResource { if in == nil { return nil } - out := new(ArtifactStoreBinding) + out := new(GitResource) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BuildSpec) DeepCopyInto(out *BuildSpec) { +func (in *ImageResource) DeepCopyInto(out *ImageResource) { *out = *in - if in.Steps != nil { - in, out := &in.Steps, &out.Steps - *out = make([]v1.Container, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - in.Template.DeepCopyInto(&out.Template) return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BuildSpec. -func (in *BuildSpec) DeepCopy() *BuildSpec { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ImageResource. +func (in *ImageResource) DeepCopy() *ImageResource { if in == nil { return nil } - out := new(BuildSpec) + out := new(ImageResource) in.DeepCopyInto(out) return out } @@ -101,7 +85,7 @@ func (in *Inputs) DeepCopyInto(out *Inputs) { *out = *in if in.Sources != nil { in, out := &in.Sources, &out.Sources - *out = make([]SourceInput, len(*in)) + *out = make([]Source, len(*in)) copy(*out, *in) } if in.Params != nil { @@ -130,9 +114,9 @@ func (in *Outputs) DeepCopyInto(out *Outputs) { *out = make([]TestResult, len(*in)) copy(*out, *in) } - if in.Artifacts != nil { - in, out := &in.Artifacts, &out.Artifacts - *out = make([]Artifact, len(*in)) + if in.Sources != nil { + in, out := &in.Sources, &out.Sources + *out = make([]Source, len(*in)) copy(*out, *in) } return @@ -230,7 +214,7 @@ func (in *PipelineParams) DeepCopyInto(out *PipelineParams) { *out = *in out.TypeMeta = in.TypeMeta in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) - in.Spec.DeepCopyInto(&out.Spec) + out.Spec = in.Spec out.Status = in.Status return } @@ -305,16 +289,6 @@ func (in *PipelineParamsRef) DeepCopy() *PipelineParamsRef { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *PipelineParamsSpec) DeepCopyInto(out *PipelineParamsSpec) { *out = *in - if in.Sources != nil { - in, out := &in.Sources, &out.Sources - *out = make([]Source, len(*in)) - copy(*out, *in) - } - if in.ArtifactStores != nil { - in, out := &in.ArtifactStores, &out.ArtifactStores - *out = make([]ArtifactStore, len(*in)) - copy(*out, *in) - } out.Results = in.Results return } @@ -361,6 +335,23 @@ func (in *PipelineRef) DeepCopy() *PipelineRef { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineResource) DeepCopyInto(out *PipelineResource) { + *out = *in + out.ResourceRef = in.ResourceRef + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineResource. +func (in *PipelineResource) DeepCopy() *PipelineResource { + if in == nil { + return nil + } + out := new(PipelineResource) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *PipelineRun) DeepCopyInto(out *PipelineRun) { *out = *in @@ -466,6 +457,11 @@ func (in *PipelineRunStatus) DeepCopyInto(out *PipelineRunStatus) { *out = make([]PipelineTaskRun, len(*in)) copy(*out, *in) } + if in.ResourceVersion != nil { + in, out := &in.ResourceVersion, &out.ResourceVersion + *out = make([]ResourceVersion, len(*in)) + copy(*out, *in) + } if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions *out = make([]PipelineRunCondition, len(*in)) @@ -496,6 +492,11 @@ func (in *PipelineSpec) DeepCopyInto(out *PipelineSpec) { (*in)[i].DeepCopyInto(&(*out)[i]) } } + if in.Sources != nil { + in, out := &in.Sources, &out.Sources + *out = make([]PipelineResource, len(*in)) + copy(*out, *in) + } return } @@ -529,34 +530,23 @@ func (in *PipelineStatus) DeepCopy() *PipelineStatus { func (in *PipelineTask) DeepCopyInto(out *PipelineTask) { *out = *in out.TaskRef = in.TaskRef - if in.SourceBindings != nil { - in, out := &in.SourceBindings, &out.SourceBindings + if in.InputSourceBindings != nil { + in, out := &in.InputSourceBindings, &out.InputSourceBindings *out = make([]SourceBinding, len(*in)) - copy(*out, *in) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } } - if in.ArtifactStoreBindings != nil { - in, out := &in.ArtifactStoreBindings, &out.ArtifactStoreBindings - *out = make([]ArtifactStoreBinding, len(*in)) - copy(*out, *in) + if in.OutputSourceBindings != nil { + in, out := &in.OutputSourceBindings, &out.OutputSourceBindings + *out = make([]SourceBinding, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } } if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]PipelineTaskParam, len(*in)) - copy(*out, *in) - } - if in.ParamBindings != nil { - in, out := &in.ParamBindings, &out.ParamBindings - *out = make([]PipelineTaskParamBinding, len(*in)) - copy(*out, *in) - } - if in.NextTasks != nil { - in, out := &in.NextTasks, &out.NextTasks - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.PrevTasks != nil { - in, out := &in.PrevTasks, &out.PrevTasks - *out = make([]string, len(*in)) + *out = make([]Param, len(*in)) copy(*out, *in) } return @@ -589,76 +579,187 @@ func (in *PipelineTaskParam) DeepCopy() *PipelineTaskParam { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineTaskParamBinding) DeepCopyInto(out *PipelineTaskParamBinding) { +func (in *PipelineTaskRun) DeepCopyInto(out *PipelineTaskRun) { *out = *in return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskParamBinding. -func (in *PipelineTaskParamBinding) DeepCopy() *PipelineTaskParamBinding { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskRun. +func (in *PipelineTaskRun) DeepCopy() *PipelineTaskRun { if in == nil { return nil } - out := new(PipelineTaskParamBinding) + out := new(PipelineTaskRun) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineTaskRun) DeepCopyInto(out *PipelineTaskRun) { +func (in *PipelineTriggerRef) DeepCopyInto(out *PipelineTriggerRef) { *out = *in return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskRun. -func (in *PipelineTaskRun) DeepCopy() *PipelineTaskRun { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTriggerRef. +func (in *PipelineTriggerRef) DeepCopy() *PipelineTriggerRef { if in == nil { return nil } - out := new(PipelineTaskRun) + out := new(PipelineTriggerRef) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineTaskRunRef) DeepCopyInto(out *PipelineTaskRunRef) { +func (in *Resource) DeepCopyInto(out *Resource) { *out = *in - out.TaskRunRef = in.TaskRunRef - if in.NextTasks != nil { - in, out := &in.NextTasks, &out.NextTasks - *out = make([]TaskRunRef, len(*in)) - copy(*out, *in) + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + out.Status = in.Status + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Resource. +func (in *Resource) DeepCopy() *Resource { + if in == nil { + return nil + } + out := new(Resource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *Resource) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c } - if in.PrevTasks != nil { - in, out := &in.PrevTasks, &out.PrevTasks - *out = make([]TaskRunRef, len(*in)) + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceItem) DeepCopyInto(out *ResourceItem) { + *out = *in + if in.Params != nil { + in, out := &in.Params, &out.Params + *out = make([]Param, len(*in)) copy(*out, *in) } return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskRunRef. -func (in *PipelineTaskRunRef) DeepCopy() *PipelineTaskRunRef { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceItem. +func (in *ResourceItem) DeepCopy() *ResourceItem { if in == nil { return nil } - out := new(PipelineTaskRunRef) + out := new(ResourceItem) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PipelineTriggerRef) DeepCopyInto(out *PipelineTriggerRef) { +func (in *ResourceList) DeepCopyInto(out *ResourceList) { *out = *in + out.TypeMeta = in.TypeMeta + out.ListMeta = in.ListMeta + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]Resource, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } return } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTriggerRef. -func (in *PipelineTriggerRef) DeepCopy() *PipelineTriggerRef { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceList. +func (in *ResourceList) DeepCopy() *ResourceList { if in == nil { return nil } - out := new(PipelineTriggerRef) + out := new(ResourceList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *ResourceList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceRef) DeepCopyInto(out *ResourceRef) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceRef. +func (in *ResourceRef) DeepCopy() *ResourceRef { + if in == nil { + return nil + } + out := new(ResourceRef) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceSpec) DeepCopyInto(out *ResourceSpec) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = make([]ResourceItem, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceSpec. +func (in *ResourceSpec) DeepCopy() *ResourceSpec { + if in == nil { + return nil + } + out := new(ResourceSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceStatus) DeepCopyInto(out *ResourceStatus) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceStatus. +func (in *ResourceStatus) DeepCopy() *ResourceStatus { + if in == nil { + return nil + } + out := new(ResourceStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceVersion) DeepCopyInto(out *ResourceVersion) { + *out = *in + out.ResourceRef = in.ResourceRef + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceVersion. +func (in *ResourceVersion) DeepCopy() *ResourceVersion { + if in == nil { + return nil + } + out := new(ResourceVersion) in.DeepCopyInto(out) return out } @@ -701,6 +802,7 @@ func (in *Results) DeepCopy() *Results { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Source) DeepCopyInto(out *Source) { *out = *in + out.ResouceRef = in.ResouceRef return } @@ -717,6 +819,11 @@ func (in *Source) DeepCopy() *Source { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SourceBinding) DeepCopyInto(out *SourceBinding) { *out = *in + if in.PassedConstraints != nil { + in, out := &in.PassedConstraints, &out.PassedConstraints + *out = make([]string, len(*in)) + copy(*out, *in) + } return } @@ -730,22 +837,6 @@ func (in *SourceBinding) DeepCopy() *SourceBinding { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SourceInput) DeepCopyInto(out *SourceInput) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceInput. -func (in *SourceInput) DeepCopy() *SourceInput { - if in == nil { - return nil - } - out := new(SourceInput) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *StepRun) DeepCopyInto(out *StepRun) { *out = *in @@ -887,9 +978,9 @@ func (in *TaskRunCondition) DeepCopy() *TaskRunCondition { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *TaskRunInputs) DeepCopyInto(out *TaskRunInputs) { *out = *in - if in.Sources != nil { - in, out := &in.Sources, &out.Sources - *out = make([]Source, len(*in)) + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = make([]ResourceVersion, len(*in)) copy(*out, *in) } if in.Params != nil { @@ -943,27 +1034,11 @@ func (in *TaskRunList) DeepCopyObject() runtime.Object { return nil } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TaskRunRef) DeepCopyInto(out *TaskRunRef) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskRunRef. -func (in *TaskRunRef) DeepCopy() *TaskRunRef { - if in == nil { - return nil - } - out := new(TaskRunRef) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *TaskRunSpec) DeepCopyInto(out *TaskRunSpec) { *out = *in out.TaskRef = in.TaskRef - in.Trigger.DeepCopyInto(&out.Trigger) + out.Trigger = in.Trigger in.Inputs.DeepCopyInto(&out.Inputs) in.Outputs.DeepCopyInto(&out.Outputs) out.Results = in.Results @@ -1047,16 +1122,6 @@ func (in *TaskStatus) DeepCopy() *TaskStatus { func (in *TaskTrigger) DeepCopyInto(out *TaskTrigger) { *out = *in out.TriggerRef = in.TriggerRef - if in.PrevTasks != nil { - in, out := &in.PrevTasks, &out.PrevTasks - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.NextTasks != nil { - in, out := &in.NextTasks, &out.NextTasks - *out = make([]string, len(*in)) - copy(*out, *in) - } return }