diff --git a/Gopkg.lock b/Gopkg.lock index a8e7111b83f..5788a63544a 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1356,10 +1356,8 @@ "github.com/google/go-containerregistry/pkg/v1", "github.com/google/go-containerregistry/pkg/v1/empty", "github.com/google/go-containerregistry/pkg/v1/layout", - "github.com/google/go-containerregistry/pkg/v1/partial", "github.com/google/go-containerregistry/pkg/v1/random", "github.com/google/go-containerregistry/pkg/v1/remote", - "github.com/google/go-containerregistry/pkg/v1/types", "github.com/hashicorp/go-multierror", "github.com/hashicorp/golang-lru", "github.com/jenkins-x/go-scm/scm", diff --git a/pkg/apis/pipeline/v1alpha1/merge.go b/pkg/apis/pipeline/v1alpha1/merge.go index 8566d84aa5a..7a1638facbe 100644 --- a/pkg/apis/pipeline/v1alpha1/merge.go +++ b/pkg/apis/pipeline/v1alpha1/merge.go @@ -17,74 +17,14 @@ limitations under the License. package v1alpha1 import ( - "encoding/json" - + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" v1 "k8s.io/api/core/v1" - "k8s.io/apimachinery/pkg/util/strategicpatch" ) // MergeStepsWithStepTemplate takes a possibly nil container template and a // list of steps, merging each of the steps with the container template, if // it's not nil, and returning the resulting list. +// Deprecated func MergeStepsWithStepTemplate(template *v1.Container, steps []Step) ([]Step, error) { - if template == nil { - return steps, nil - } - - // We need JSON bytes to generate a patch to merge the step containers - // onto the template container, so marshal the template. - templateAsJSON, err := json.Marshal(template) - if err != nil { - return nil, err - } - // We need to do a three-way merge to actually merge the template and - // step containers, so we need an empty container as the "original" - emptyAsJSON, err := json.Marshal(&v1.Container{}) - if err != nil { - return nil, err - } - - for i, s := range steps { - // Marshal the step's to JSON - stepAsJSON, err := json.Marshal(s.Container) - if err != nil { - return nil, err - } - - // Get the patch meta for Container, which is needed for generating and applying the merge patch. - patchSchema, err := strategicpatch.NewPatchMetaFromStruct(template) - - if err != nil { - return nil, err - } - - // Create a merge patch, with the empty JSON as the original, the step JSON as the modified, and the template - // JSON as the current - this lets us do a deep merge of the template and step containers, with awareness of - // the "patchMerge" tags. - patch, err := strategicpatch.CreateThreeWayMergePatch(emptyAsJSON, stepAsJSON, templateAsJSON, patchSchema, true) - if err != nil { - return nil, err - } - - // Actually apply the merge patch to the template JSON. - mergedAsJSON, err := strategicpatch.StrategicMergePatchUsingLookupPatchMeta(templateAsJSON, patch, patchSchema) - if err != nil { - return nil, err - } - - // Unmarshal the merged JSON to a Container pointer, and return it. - merged := &v1.Container{} - err = json.Unmarshal(mergedAsJSON, merged) - if err != nil { - return nil, err - } - - // If the container's args is nil, reset it to empty instead - if merged.Args == nil && s.Args != nil { - merged.Args = []string{} - } - - steps[i] = Step{Container: *merged} - } - return steps, nil + return v1alpha2.MergeStepsWithStepTemplate(template, steps) } diff --git a/pkg/apis/pipeline/v1alpha1/param_types.go b/pkg/apis/pipeline/v1alpha1/param_types.go index 2c6d35bcfe1..c901c0a2fef 100644 --- a/pkg/apis/pipeline/v1alpha1/param_types.go +++ b/pkg/apis/pipeline/v1alpha1/param_types.go @@ -17,113 +17,37 @@ limitations under the License. package v1alpha1 import ( - "context" - "encoding/json" - "fmt" - - "github.com/tektoncd/pipeline/pkg/substitution" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" ) // ParamSpec defines arbitrary parameters needed beyond typed inputs (such as // resources). Parameter values are provided by users as inputs on a TaskRun // or PipelineRun. -type ParamSpec struct { - // Name declares the name by which a parameter is referenced. - Name string `json:"name"` - // Type is the user-specified type of the parameter. The possible types - // are currently "string" and "array", and "string" is the default. - // +optional - Type ParamType `json:"type,omitempty"` - // Description is a user-facing description of the parameter that may be - // used to populate a UI. - // +optional - Description string `json:"description,omitempty"` - // Default is the value a parameter takes if no input value is supplied. If - // default is set, a Task may be executed without a supplied value for the - // parameter. - // +optional - Default *ArrayOrString `json:"default,omitempty"` -} - -func (pp *ParamSpec) SetDefaults(ctx context.Context) { - if pp != nil && pp.Type == "" { - if pp.Default != nil { - // propagate the parsed ArrayOrString's type to the parent ParamSpec's type - pp.Type = pp.Default.Type - } else { - // ParamTypeString is the default value (when no type can be inferred from the default value) - pp.Type = ParamTypeString - } - } -} +type ParamSpec = v1alpha2.ParamSpec // ResourceParam declares a string value to use for the parameter called Name, and is used in // the specific context of PipelineResources. -type ResourceParam struct { - Name string `json:"name"` - Value string `json:"value"` -} +type ResourceParam = v1alpha2.ResourceParam // Param declares an ArrayOrString to use for the parameter called name. -type Param struct { - Name string `json:"name"` - Value ArrayOrString `json:"value"` -} +type Param = v1alpha2.Param // ParamType indicates the type of an input parameter; // Used to distinguish between a single string and an array of strings. -type ParamType string +type ParamType = v1alpha2.ParamType // Valid ParamTypes: const ( - ParamTypeString ParamType = "string" - ParamTypeArray ParamType = "array" + ParamTypeString ParamType = v1alpha2.ParamTypeString + ParamTypeArray ParamType = v1alpha2.ParamTypeArray ) // AllParamTypes can be used for ParamType validation. -var AllParamTypes = []ParamType{ParamTypeString, ParamTypeArray} +var AllParamTypes = v1alpha2.AllParamTypes // ArrayOrString is modeled after IntOrString in kubernetes/apimachinery: // ArrayOrString is a type that can hold a single string or string array. // Used in JSON unmarshalling so that a single JSON field can accept // either an individual string or an array of strings. -type ArrayOrString struct { - Type ParamType // Represents the stored type of ArrayOrString. - StringVal string - ArrayVal []string -} - -// UnmarshalJSON implements the json.Unmarshaller interface. -func (arrayOrString *ArrayOrString) UnmarshalJSON(value []byte) error { - if value[0] == '"' { - arrayOrString.Type = ParamTypeString - return json.Unmarshal(value, &arrayOrString.StringVal) - } - arrayOrString.Type = ParamTypeArray - return json.Unmarshal(value, &arrayOrString.ArrayVal) -} - -// MarshalJSON implements the json.Marshaller interface. -func (arrayOrString ArrayOrString) MarshalJSON() ([]byte, error) { - switch arrayOrString.Type { - case ParamTypeString: - return json.Marshal(arrayOrString.StringVal) - case ParamTypeArray: - return json.Marshal(arrayOrString.ArrayVal) - default: - return []byte{}, fmt.Errorf("impossible ArrayOrString.Type: %q", arrayOrString.Type) - } -} - -func (arrayOrString *ArrayOrString) ApplyReplacements(stringReplacements map[string]string, arrayReplacements map[string][]string) { - if arrayOrString.Type == ParamTypeString { - arrayOrString.StringVal = substitution.ApplyReplacements(arrayOrString.StringVal, stringReplacements) - } else { - var newArrayVal []string - for _, v := range arrayOrString.ArrayVal { - newArrayVal = append(newArrayVal, substitution.ApplyArrayReplacements(v, stringReplacements, arrayReplacements)...) - } - arrayOrString.ArrayVal = newArrayVal - } -} +type ArrayOrString = v1alpha2.ArrayOrString diff --git a/pkg/apis/pipeline/v1alpha1/resource_types.go b/pkg/apis/pipeline/v1alpha1/resource_types.go index 0659365fc22..2260426b16d 100644 --- a/pkg/apis/pipeline/v1alpha1/resource_types.go +++ b/pkg/apis/pipeline/v1alpha1/resource_types.go @@ -19,45 +19,42 @@ package v1alpha1 import ( "github.com/google/go-cmp/cmp" "github.com/tektoncd/pipeline/pkg/apis/pipeline" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" "golang.org/x/xerrors" - v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // PipelineResourceType represents the type of endpoint the pipelineResource is, so that the // controller will know this pipelineResource should be fetched and optionally what // additional metatdata should be provided for it. -type PipelineResourceType string +type PipelineResourceType = v1alpha2.PipelineResourceType var ( - AllowedOutputResources = map[PipelineResourceType]bool{ - PipelineResourceTypeStorage: true, - PipelineResourceTypeGit: true, - } + AllowedOutputResources = v1alpha2.AllowedOutputResources ) const ( // PipelineResourceTypeGit indicates that this source is a GitHub repo. - PipelineResourceTypeGit PipelineResourceType = "git" + PipelineResourceTypeGit PipelineResourceType = v1alpha2.PipelineResourceTypeGit // PipelineResourceTypeStorage indicates that this source is a storage blob resource. - PipelineResourceTypeStorage PipelineResourceType = "storage" + PipelineResourceTypeStorage PipelineResourceType = v1alpha2.PipelineResourceTypeStorage // PipelineResourceTypeImage indicates that this source is a docker Image. - PipelineResourceTypeImage PipelineResourceType = "image" + PipelineResourceTypeImage PipelineResourceType = v1alpha2.PipelineResourceTypeImage // PipelineResourceTypeCluster indicates that this source is a k8s cluster Image. - PipelineResourceTypeCluster PipelineResourceType = "cluster" + PipelineResourceTypeCluster PipelineResourceType = v1alpha2.PipelineResourceTypeCluster // PipelineResourceTypePullRequest indicates that this source is a SCM Pull Request. - PipelineResourceTypePullRequest PipelineResourceType = "pullRequest" + PipelineResourceTypePullRequest PipelineResourceType = v1alpha2.PipelineResourceTypePullRequest // PipelineResourceTypeCloudEvent indicates that this source is a cloud event URI - PipelineResourceTypeCloudEvent PipelineResourceType = "cloudEvent" + PipelineResourceTypeCloudEvent PipelineResourceType = v1alpha2.PipelineResourceTypeCloudEvent ) // AllResourceTypes can be used for validation to check if a provided Resource type is one of the known types. -var AllResourceTypes = []PipelineResourceType{PipelineResourceTypeGit, PipelineResourceTypeStorage, PipelineResourceTypeImage, PipelineResourceTypeCluster, PipelineResourceTypePullRequest, PipelineResourceTypeCloudEvent} +var AllResourceTypes = v1alpha2.AllResourceTypes // PipelineResourceInterface interface to be implemented by different PipelineResource types type PipelineResourceInterface interface { @@ -77,33 +74,10 @@ type PipelineResourceInterface interface { } // TaskModifier is an interface to be implemented by different PipelineResources -type TaskModifier interface { - GetStepsToPrepend() []Step - GetStepsToAppend() []Step - GetVolumes() []v1.Volume -} +type TaskModifier = v1alpha2.TaskModifier // InternalTaskModifier implements TaskModifier for resources that are built-in to Tekton Pipelines. -type InternalTaskModifier struct { - StepsToPrepend []Step - StepsToAppend []Step - Volumes []v1.Volume -} - -// GetStepsToPrepend returns a set of Steps to prepend to the Task. -func (tm *InternalTaskModifier) GetStepsToPrepend() []Step { - return tm.StepsToPrepend -} - -// GetStepsToAppend returns a set of Steps to append to the Task. -func (tm *InternalTaskModifier) GetStepsToAppend() []Step { - return tm.StepsToAppend -} - -// GetVolumes returns a set of Volumes to prepend to the Task pod. -func (tm *InternalTaskModifier) GetVolumes() []v1.Volume { - return tm.Volumes -} +type InternalTaskModifier = v1alpha2.InternalTaskModifier func checkStepNotAlreadyAdded(s Step, steps []Step) error { for _, step := range steps { @@ -118,6 +92,7 @@ func checkStepNotAlreadyAdded(s Step, steps []Step) error { // If steps with the same name exist in ts an error will be returned. If identical Volumes have // been added, they will not be added again. If Volumes with the same name but different contents // have been added, an error will be returned. +// FIXME(vdemeester) de-duplicate this func ApplyTaskModifier(ts *TaskSpec, tm TaskModifier) error { steps := tm.GetStepsToPrepend() for _, step := range steps { @@ -238,18 +213,7 @@ type PipelineResourceList struct { // PipelineResources within the type's definition, and when provided as an Input, the Name will be the // path to the volume mounted containing this PipelineResource as an input (e.g. // an input Resource named `workspace` will be mounted at `/workspace`). -type ResourceDeclaration struct { - // Name declares the name by which a resource is referenced in the - // definition. Resources may be referenced by name in the definition of a - // Task's steps. - Name string `json:"name"` - // Type is the type of this resource; - Type PipelineResourceType `json:"type"` - // TargetPath is the path in workspace directory where the resource - // will be copied. - // +optional - TargetPath string `json:"targetPath,omitempty"` -} +type ResourceDeclaration = v1alpha2.ResourceDeclaration // ResourceFromType returns an instance of the correct PipelineResource object type which can be // used to add input and ouput containers as well as volumes to a TaskRun's pod in order to realize diff --git a/pkg/apis/pipeline/v1alpha1/task_types.go b/pkg/apis/pipeline/v1alpha1/task_types.go index b0975c4369e..5c8f5b20d7f 100644 --- a/pkg/apis/pipeline/v1alpha1/task_types.go +++ b/pkg/apis/pipeline/v1alpha1/task_types.go @@ -19,6 +19,8 @@ package v1alpha1 import ( corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" ) func (t *Task) TaskSpec() TaskSpec { @@ -63,14 +65,7 @@ type TaskSpec struct { // Step embeds the Container type, which allows it to include fields not // provided by Container. -type Step struct { - corev1.Container - - // Script is the contents of an executable file to execute. - // - // If Script is not empty, the Step cannot have an Command or Args. - Script string `json:"script,omitempty"` -} +type Step = v1alpha2.Step // +genclient // +genclient:noStatus @@ -111,9 +106,7 @@ type Inputs struct { // the Task definition, and when provided as an Input, the Name will be the // path to the volume mounted containing this Resource as an input (e.g. // an input Resource named `workspace` will be mounted at `/workspace`). -type TaskResource struct { - ResourceDeclaration `json:",inline"` -} +type TaskResource = v1alpha2.TaskResource // Outputs allow a task to declare what data the Build/Task will be producing, // i.e. results such as logs and artifacts such as images. diff --git a/pkg/apis/pipeline/v1alpha1/zz_generated.deepcopy.go b/pkg/apis/pipeline/v1alpha1/zz_generated.deepcopy.go index 2d57103828e..2709b144c13 100644 --- a/pkg/apis/pipeline/v1alpha1/zz_generated.deepcopy.go +++ b/pkg/apis/pipeline/v1alpha1/zz_generated.deepcopy.go @@ -21,32 +21,12 @@ limitations under the License. package v1alpha1 import ( + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" ) -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ArrayOrString) DeepCopyInto(out *ArrayOrString) { - *out = *in - if in.ArrayVal != nil { - in, out := &in.ArrayVal, &out.ArrayVal - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArrayOrString. -func (in *ArrayOrString) DeepCopy() *ArrayOrString { - if in == nil { - return nil - } - out := new(ArrayOrString) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ArtifactBucket) DeepCopyInto(out *ArtifactBucket) { *out = *in @@ -356,14 +336,14 @@ func (in *ConditionSpec) DeepCopyInto(out *ConditionSpec) { in.Check.DeepCopyInto(&out.Check) if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]ParamSpec, len(*in)) + *out = make([]v1alpha2.ParamSpec, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } } if in.Resources != nil { in, out := &in.Resources, &out.Resources - *out = make([]ResourceDeclaration, len(*in)) + *out = make([]v1alpha2.ResourceDeclaration, len(*in)) copy(*out, *in) } return @@ -437,12 +417,12 @@ func (in *Inputs) DeepCopyInto(out *Inputs) { *out = *in if in.Resources != nil { in, out := &in.Resources, &out.Resources - *out = make([]TaskResource, len(*in)) + *out = make([]v1alpha2.TaskResource, len(*in)) copy(*out, *in) } if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]ParamSpec, len(*in)) + *out = make([]v1alpha2.ParamSpec, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -460,43 +440,6 @@ func (in *Inputs) DeepCopy() *Inputs { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InternalTaskModifier) DeepCopyInto(out *InternalTaskModifier) { - *out = *in - if in.StepsToPrepend != nil { - in, out := &in.StepsToPrepend, &out.StepsToPrepend - *out = make([]Step, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StepsToAppend != nil { - in, out := &in.StepsToAppend, &out.StepsToAppend - *out = make([]Step, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Volumes != nil { - in, out := &in.Volumes, &out.Volumes - *out = make([]v1.Volume, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InternalTaskModifier. -func (in *InternalTaskModifier) DeepCopy() *InternalTaskModifier { - if in == nil { - return nil - } - out := new(InternalTaskModifier) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Outputs) DeepCopyInto(out *Outputs) { *out = *in @@ -507,7 +450,7 @@ func (in *Outputs) DeepCopyInto(out *Outputs) { } if in.Resources != nil { in, out := &in.Resources, &out.Resources - *out = make([]TaskResource, len(*in)) + *out = make([]v1alpha2.TaskResource, len(*in)) copy(*out, *in) } return @@ -523,44 +466,6 @@ func (in *Outputs) DeepCopy() *Outputs { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Param) DeepCopyInto(out *Param) { - *out = *in - in.Value.DeepCopyInto(&out.Value) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Param. -func (in *Param) DeepCopy() *Param { - if in == nil { - return nil - } - out := new(Param) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParamSpec) DeepCopyInto(out *ParamSpec) { - *out = *in - if in.Default != nil { - in, out := &in.Default, &out.Default - *out = new(ArrayOrString) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParamSpec. -func (in *ParamSpec) DeepCopy() *ParamSpec { - if in == nil { - return nil - } - out := new(ParamSpec) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Pipeline) DeepCopyInto(out *Pipeline) { *out = *in @@ -795,7 +700,7 @@ func (in *PipelineResourceSpec) DeepCopyInto(out *PipelineResourceSpec) { *out = *in if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]ResourceParam, len(*in)) + *out = make([]v1alpha2.ResourceParam, len(*in)) copy(*out, *in) } if in.SecretParams != nil { @@ -936,7 +841,7 @@ func (in *PipelineRunSpec) DeepCopyInto(out *PipelineRunSpec) { } if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]Param, len(*in)) + *out = make([]v1alpha2.Param, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -1074,7 +979,7 @@ func (in *PipelineSpec) DeepCopyInto(out *PipelineSpec) { } if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]ParamSpec, len(*in)) + *out = make([]v1alpha2.ParamSpec, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -1131,7 +1036,7 @@ func (in *PipelineTask) DeepCopyInto(out *PipelineTask) { } if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]Param, len(*in)) + *out = make([]v1alpha2.Param, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -1154,7 +1059,7 @@ func (in *PipelineTaskCondition) DeepCopyInto(out *PipelineTaskCondition) { *out = *in if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]Param, len(*in)) + *out = make([]v1alpha2.Param, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -1369,38 +1274,6 @@ func (in *PullRequestResource) DeepCopy() *PullRequestResource { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ResourceDeclaration) DeepCopyInto(out *ResourceDeclaration) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceDeclaration. -func (in *ResourceDeclaration) DeepCopy() *ResourceDeclaration { - if in == nil { - return nil - } - out := new(ResourceDeclaration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ResourceParam) DeepCopyInto(out *ResourceParam) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceParam. -func (in *ResourceParam) DeepCopy() *ResourceParam { - if in == nil { - return nil - } - out := new(ResourceParam) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SecretParam) DeepCopyInto(out *SecretParam) { *out = *in @@ -1433,23 +1306,6 @@ func (in *SidecarState) DeepCopy() *SidecarState { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Step) DeepCopyInto(out *Step) { - *out = *in - in.Container.DeepCopyInto(&out.Container) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Step. -func (in *Step) DeepCopy() *Step { - if in == nil { - return nil - } - out := new(Step) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *StepState) DeepCopyInto(out *StepState) { *out = *in @@ -1543,23 +1399,6 @@ func (in *TaskRef) DeepCopy() *TaskRef { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TaskResource) DeepCopyInto(out *TaskResource) { - *out = *in - out.ResourceDeclaration = in.ResourceDeclaration - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskResource. -func (in *TaskResource) DeepCopy() *TaskResource { - if in == nil { - return nil - } - out := new(TaskResource) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *TaskResourceBinding) DeepCopyInto(out *TaskResourceBinding) { *out = *in @@ -1622,7 +1461,7 @@ func (in *TaskRunInputs) DeepCopyInto(out *TaskRunInputs) { } if in.Params != nil { in, out := &in.Params, &out.Params - *out = make([]Param, len(*in)) + *out = make([]v1alpha2.Param, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } @@ -1801,7 +1640,7 @@ func (in *TaskSpec) DeepCopyInto(out *TaskSpec) { } if in.Steps != nil { in, out := &in.Steps, &out.Steps - *out = make([]Step, len(*in)) + *out = make([]v1alpha2.Step, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } diff --git a/pkg/apis/pipeline/v1alpha2/doc.go b/pkg/apis/pipeline/v1alpha2/doc.go new file mode 100644 index 00000000000..6627a64a5ce --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/doc.go @@ -0,0 +1,23 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Package v1alpha1 contains API Schema definitions for the pipeline v1alpha1 API group +// +k8s:openapi-gen=true +// +k8s:deepcopy-gen=package,register +// +k8s:conversion-gen=github.com/tektoncd/pipeline/pkg/apis/pipeline +// +k8s:defaulter-gen=TypeMeta +// +groupName=tekton.dev +package v1alpha2 diff --git a/pkg/apis/pipeline/v1alpha2/merge.go b/pkg/apis/pipeline/v1alpha2/merge.go new file mode 100644 index 00000000000..a3333fc3836 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/merge.go @@ -0,0 +1,90 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "encoding/json" + + v1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/util/strategicpatch" +) + +// MergeStepsWithStepTemplate takes a possibly nil container template and a +// list of steps, merging each of the steps with the container template, if +// it's not nil, and returning the resulting list. +func MergeStepsWithStepTemplate(template *v1.Container, steps []Step) ([]Step, error) { + if template == nil { + return steps, nil + } + + // We need JSON bytes to generate a patch to merge the step containers + // onto the template container, so marshal the template. + templateAsJSON, err := json.Marshal(template) + if err != nil { + return nil, err + } + // We need to do a three-way merge to actually merge the template and + // step containers, so we need an empty container as the "original" + emptyAsJSON, err := json.Marshal(&v1.Container{}) + if err != nil { + return nil, err + } + + for i, s := range steps { + // Marshal the step's to JSON + stepAsJSON, err := json.Marshal(s.Container) + if err != nil { + return nil, err + } + + // Get the patch meta for Container, which is needed for generating and applying the merge patch. + patchSchema, err := strategicpatch.NewPatchMetaFromStruct(template) + + if err != nil { + return nil, err + } + + // Create a merge patch, with the empty JSON as the original, the step JSON as the modified, and the template + // JSON as the current - this lets us do a deep merge of the template and step containers, with awareness of + // the "patchMerge" tags. + patch, err := strategicpatch.CreateThreeWayMergePatch(emptyAsJSON, stepAsJSON, templateAsJSON, patchSchema, true) + if err != nil { + return nil, err + } + + // Actually apply the merge patch to the template JSON. + mergedAsJSON, err := strategicpatch.StrategicMergePatchUsingLookupPatchMeta(templateAsJSON, patch, patchSchema) + if err != nil { + return nil, err + } + + // Unmarshal the merged JSON to a Container pointer, and return it. + merged := &v1.Container{} + err = json.Unmarshal(mergedAsJSON, merged) + if err != nil { + return nil, err + } + + // If the container's args is nil, reset it to empty instead + if merged.Args == nil && s.Args != nil { + merged.Args = []string{} + } + + steps[i] = Step{Container: *merged} + } + return steps, nil +} diff --git a/pkg/apis/pipeline/v1alpha2/merge_test.go b/pkg/apis/pipeline/v1alpha2/merge_test.go new file mode 100644 index 00000000000..1f8ea6fbd4f --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/merge_test.go @@ -0,0 +1,115 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + corev1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/resource" +) + +func TestMergeStepsWithStepTemplate(t *testing.T) { + resourceQuantityCmp := cmp.Comparer(func(x, y resource.Quantity) bool { + return x.Cmp(y) == 0 + }) + + for _, tc := range []struct { + name string + template *corev1.Container + steps []Step + expected []Step + }{{ + name: "nil-template", + template: nil, + steps: []Step{{Container: corev1.Container{ + Image: "some-image", + }}}, + expected: []Step{{Container: corev1.Container{ + Image: "some-image", + }}}, + }, { + name: "not-overlapping", + template: &corev1.Container{ + Command: []string{"/somecmd"}, + }, + steps: []Step{{Container: corev1.Container{ + Image: "some-image", + }}}, + expected: []Step{{Container: corev1.Container{ + Command: []string{"/somecmd"}, + Image: "some-image", + }}}, + }, { + name: "overwriting-one-field", + template: &corev1.Container{ + Image: "some-image", + Command: []string{"/somecmd"}, + }, + steps: []Step{{Container: corev1.Container{ + Image: "some-other-image", + }}}, + expected: []Step{{Container: corev1.Container{ + Command: []string{"/somecmd"}, + Image: "some-other-image", + }}}, + }, { + name: "merge-and-overwrite-slice", + template: &corev1.Container{ + Env: []corev1.EnvVar{{ + Name: "KEEP_THIS", + Value: "A_VALUE", + }, { + Name: "SOME_KEY", + Value: "ORIGINAL_VALUE", + }}, + }, + steps: []Step{{Container: corev1.Container{ + Env: []corev1.EnvVar{{ + Name: "NEW_KEY", + Value: "A_VALUE", + }, { + Name: "SOME_KEY", + Value: "NEW_VALUE", + }}, + }}}, + expected: []Step{{Container: corev1.Container{ + Env: []corev1.EnvVar{{ + Name: "NEW_KEY", + Value: "A_VALUE", + }, { + Name: "KEEP_THIS", + Value: "A_VALUE", + }, { + Name: "SOME_KEY", + Value: "NEW_VALUE", + }}, + }}}, + }} { + t.Run(tc.name, func(t *testing.T) { + result, err := MergeStepsWithStepTemplate(tc.template, tc.steps) + if err != nil { + t.Errorf("expected no error. Got error %v", err) + } + + if d := cmp.Diff(tc.expected, result, resourceQuantityCmp); d != "" { + t.Errorf("merged steps don't match, diff: %s", d) + } + }) + } +} diff --git a/pkg/apis/pipeline/v1alpha2/param_types.go b/pkg/apis/pipeline/v1alpha2/param_types.go new file mode 100644 index 00000000000..59346995aab --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/param_types.go @@ -0,0 +1,127 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "context" + "encoding/json" + "fmt" +) + +// ParamSpec defines arbitrary parameters needed beyond typed inputs (such as +// resources). Parameter values are provided by users as inputs on a TaskRun +// or PipelineRun. +type ParamSpec struct { + // Name declares the name by which a parameter is referenced. + Name string `json:"name"` + // Type is the user-specified type of the parameter. The possible types + // are currently "string" and "array", and "string" is the default. + // +optional + Type ParamType `json:"type,omitempty"` + // Description is a user-facing description of the parameter that may be + // used to populate a UI. + // +optional + Description string `json:"description,omitempty"` + // Default is the value a parameter takes if no input value is supplied. If + // default is set, a Task may be executed without a supplied value for the + // parameter. + // +optional + Default *ArrayOrString `json:"default,omitempty"` +} + +func (pp *ParamSpec) SetDefaults(ctx context.Context) { + if pp != nil && pp.Type == "" { + if pp.Default != nil { + // propagate the parsed ArrayOrString's type to the parent ParamSpec's type + pp.Type = pp.Default.Type + } else { + // ParamTypeString is the default value (when no type can be inferred from the default value) + pp.Type = ParamTypeString + } + } +} + +// ResourceParam declares a string value to use for the parameter called Name, and is used in +// the specific context of PipelineResources. +type ResourceParam struct { + Name string `json:"name"` + Value string `json:"value"` +} + +// Param declares an ArrayOrString to use for the parameter called name. +type Param struct { + Name string `json:"name"` + Value ArrayOrString `json:"value"` +} + +// ParamType indicates the type of an input parameter; +// Used to distinguish between a single string and an array of strings. +type ParamType string + +// Valid ParamTypes: +const ( + ParamTypeString ParamType = "string" + ParamTypeArray ParamType = "array" +) + +// AllParamTypes can be used for ParamType validation. +var AllParamTypes = []ParamType{ParamTypeString, ParamTypeArray} + +// ArrayOrString is modeled after IntOrString in kubernetes/apimachinery: + +// ArrayOrString is a type that can hold a single string or string array. +// Used in JSON unmarshalling so that a single JSON field can accept +// either an individual string or an array of strings. +type ArrayOrString struct { + Type ParamType // Represents the stored type of ArrayOrString. + StringVal string + ArrayVal []string +} + +// UnmarshalJSON implements the json.Unmarshaller interface. +func (arrayOrString *ArrayOrString) UnmarshalJSON(value []byte) error { + if value[0] == '"' { + arrayOrString.Type = ParamTypeString + return json.Unmarshal(value, &arrayOrString.StringVal) + } + arrayOrString.Type = ParamTypeArray + return json.Unmarshal(value, &arrayOrString.ArrayVal) +} + +// MarshalJSON implements the json.Marshaller interface. +func (arrayOrString ArrayOrString) MarshalJSON() ([]byte, error) { + switch arrayOrString.Type { + case ParamTypeString: + return json.Marshal(arrayOrString.StringVal) + case ParamTypeArray: + return json.Marshal(arrayOrString.ArrayVal) + default: + return []byte{}, fmt.Errorf("impossible ArrayOrString.Type: %q", arrayOrString.Type) + } +} + +func (arrayOrString *ArrayOrString) ApplyReplacements(stringReplacements map[string]string, arrayReplacements map[string][]string) { + if arrayOrString.Type == ParamTypeString { + arrayOrString.StringVal = ApplyReplacements(arrayOrString.StringVal, stringReplacements) + } else { + var newArrayVal []string + for _, v := range arrayOrString.ArrayVal { + newArrayVal = append(newArrayVal, ApplyArrayReplacements(v, stringReplacements, arrayReplacements)...) + } + arrayOrString.ArrayVal = newArrayVal + } +} diff --git a/pkg/apis/pipeline/v1alpha2/param_types_test.go b/pkg/apis/pipeline/v1alpha2/param_types_test.go new file mode 100644 index 00000000000..b64cc2739c8 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/param_types_test.go @@ -0,0 +1,189 @@ +/* +Copyright 2019 The Tekton Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2_test + +import ( + "context" + "encoding/json" + "reflect" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + "github.com/tektoncd/pipeline/test/builder" +) + +func TestParamSpec_SetDefaults(t *testing.T) { + tests := []struct { + name string + before *v1alpha2.ParamSpec + defaultsApplied *v1alpha2.ParamSpec + }{{ + name: "inferred string type", + before: &v1alpha2.ParamSpec{ + Name: "parametername", + }, + defaultsApplied: &v1alpha2.ParamSpec{ + Name: "parametername", + Type: v1alpha2.ParamTypeString, + }, + }, { + name: "inferred type from default value", + before: &v1alpha2.ParamSpec{ + Name: "parametername", + Default: builder.ArrayOrString("an", "array"), + }, + defaultsApplied: &v1alpha2.ParamSpec{ + Name: "parametername", + Type: v1alpha2.ParamTypeArray, + Default: builder.ArrayOrString("an", "array"), + }, + }, { + name: "fully defined ParamSpec", + before: &v1alpha2.ParamSpec{ + Name: "parametername", + Type: v1alpha2.ParamTypeArray, + Description: "a description", + Default: builder.ArrayOrString("an", "array"), + }, + defaultsApplied: &v1alpha2.ParamSpec{ + Name: "parametername", + Type: v1alpha2.ParamTypeArray, + Description: "a description", + Default: builder.ArrayOrString("an", "array"), + }, + }} + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx := context.Background() + tc.before.SetDefaults(ctx) + if d := cmp.Diff(tc.before, tc.defaultsApplied); d != "" { + t.Errorf("ParamSpec.SetDefaults/%s (-want, +got) = %v", tc.name, d) + } + }) + } +} + +func TestArrayOrString_ApplyReplacements(t *testing.T) { + type args struct { + input *v1alpha2.ArrayOrString + stringReplacements map[string]string + arrayReplacements map[string][]string + } + tests := []struct { + name string + args args + expectedOutput *v1alpha2.ArrayOrString + }{{ + name: "no replacements on array", + args: args{ + input: builder.ArrayOrString("an", "array"), + stringReplacements: map[string]string{"some": "value", "anotherkey": "value"}, + arrayReplacements: map[string][]string{"arraykey": {"array", "value"}, "sdfdf": {"sdf", "sdfsd"}}, + }, + expectedOutput: builder.ArrayOrString("an", "array"), + }, { + name: "string replacements on string", + args: args{ + input: builder.ArrayOrString("astring$(some) asdf $(anotherkey)"), + stringReplacements: map[string]string{"some": "value", "anotherkey": "value"}, + arrayReplacements: map[string][]string{"arraykey": {"array", "value"}, "sdfdf": {"asdf", "sdfsd"}}, + }, + expectedOutput: builder.ArrayOrString("astringvalue asdf value"), + }, { + name: "single array replacement", + args: args{ + input: builder.ArrayOrString("firstvalue", "$(arraykey)", "lastvalue"), + stringReplacements: map[string]string{"some": "value", "anotherkey": "value"}, + arrayReplacements: map[string][]string{"arraykey": {"array", "value"}, "sdfdf": {"asdf", "sdfsd"}}, + }, + expectedOutput: builder.ArrayOrString("firstvalue", "array", "value", "lastvalue"), + }, { + name: "multiple array replacement", + args: args{ + input: builder.ArrayOrString("firstvalue", "$(arraykey)", "lastvalue", "$(sdfdf)"), + stringReplacements: map[string]string{"some": "value", "anotherkey": "value"}, + arrayReplacements: map[string][]string{"arraykey": {"array", "value"}, "sdfdf": {"asdf", "sdfsd"}}, + }, + expectedOutput: builder.ArrayOrString("firstvalue", "array", "value", "lastvalue", "asdf", "sdfsd"), + }, { + name: "empty array replacement", + args: args{ + input: builder.ArrayOrString("firstvalue", "$(arraykey)", "lastvalue"), + stringReplacements: map[string]string{"some": "value", "anotherkey": "value"}, + arrayReplacements: map[string][]string{"arraykey": {}}, + }, + expectedOutput: builder.ArrayOrString("firstvalue", "lastvalue"), + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.args.input.ApplyReplacements(tt.args.stringReplacements, tt.args.arrayReplacements) + if d := cmp.Diff(tt.expectedOutput, tt.args.input); d != "" { + t.Errorf("ApplyReplacements() output did not match expected value %s", d) + } + }) + } +} + +type ArrayOrStringHolder struct { + AOrS v1alpha2.ArrayOrString `json:"val"` +} + +func TestArrayOrString_UnmarshalJSON(t *testing.T) { + cases := []struct { + input string + result v1alpha2.ArrayOrString + }{ + {"{\"val\": \"123\"}", *builder.ArrayOrString("123")}, + {"{\"val\": \"\"}", *builder.ArrayOrString("")}, + {"{\"val\":[]}", v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{}}}, + {"{\"val\":[\"oneelement\"]}", v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"oneelement"}}}, + {"{\"val\":[\"multiple\", \"elements\"]}", v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"multiple", "elements"}}}, + } + + for _, c := range cases { + var result ArrayOrStringHolder + if err := json.Unmarshal([]byte(c.input), &result); err != nil { + t.Errorf("Failed to unmarshal input '%v': %v", c.input, err) + } + if !reflect.DeepEqual(result.AOrS, c.result) { + t.Errorf("Failed to unmarshal input '%v': expected %+v, got %+v", c.input, c.result, result) + } + } +} + +func TestArrayOrString_MarshalJSON(t *testing.T) { + cases := []struct { + input v1alpha2.ArrayOrString + result string + }{ + {*builder.ArrayOrString("123"), "{\"val\":\"123\"}"}, + {*builder.ArrayOrString("123", "1234"), "{\"val\":[\"123\",\"1234\"]}"}, + {*builder.ArrayOrString("a", "a", "a"), "{\"val\":[\"a\",\"a\",\"a\"]}"}, + } + + for _, c := range cases { + input := ArrayOrStringHolder{c.input} + result, err := json.Marshal(&input) + if err != nil { + t.Errorf("Failed to marshal input '%v': %v", input, err) + } + if string(result) != c.result { + t.Errorf("Failed to marshal input '%v': expected: %+v, got %q", input, c.result, string(result)) + } + } +} diff --git a/pkg/apis/pipeline/v1alpha2/register.go b/pkg/apis/pipeline/v1alpha2/register.go index 007c6751645..6798906c03b 100644 --- a/pkg/apis/pipeline/v1alpha2/register.go +++ b/pkg/apis/pipeline/v1alpha2/register.go @@ -45,9 +45,10 @@ var ( // Adds the list of known types to Scheme. func addKnownTypes(scheme *runtime.Scheme) error { - scheme.AddKnownTypes(SchemeGroupVersion) - // &Task{}, - // &TaskList{}, + scheme.AddKnownTypes(SchemeGroupVersion, + &Task{}, + &TaskList{}, + ) // &Condition{}, // &ConditionList{}, // &ClusterTask{}, diff --git a/pkg/apis/pipeline/v1alpha2/resource_types.go b/pkg/apis/pipeline/v1alpha2/resource_types.go new file mode 100644 index 00000000000..92bd202adea --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/resource_types.go @@ -0,0 +1,176 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "github.com/google/go-cmp/cmp" + "golang.org/x/xerrors" + v1 "k8s.io/api/core/v1" +) + +// PipelineResourceType represents the type of endpoint the pipelineResource is, so that the +// controller will know this pipelineResource should be fetched and optionally what +// additional metatdata should be provided for it. +type PipelineResourceType string + +var ( + AllowedOutputResources = map[PipelineResourceType]bool{ + PipelineResourceTypeStorage: true, + PipelineResourceTypeGit: true, + } +) + +const ( + // PipelineResourceTypeGit indicates that this source is a GitHub repo. + PipelineResourceTypeGit PipelineResourceType = "git" + + // PipelineResourceTypeStorage indicates that this source is a storage blob resource. + PipelineResourceTypeStorage PipelineResourceType = "storage" + + // PipelineResourceTypeImage indicates that this source is a docker Image. + PipelineResourceTypeImage PipelineResourceType = "image" + + // PipelineResourceTypeCluster indicates that this source is a k8s cluster Image. + PipelineResourceTypeCluster PipelineResourceType = "cluster" + + // PipelineResourceTypePullRequest indicates that this source is a SCM Pull Request. + PipelineResourceTypePullRequest PipelineResourceType = "pullRequest" + + // PipelineResourceTypeCloudEvent indicates that this source is a cloud event URI + PipelineResourceTypeCloudEvent PipelineResourceType = "cloudEvent" +) + +// AllResourceTypes can be used for validation to check if a provided Resource type is one of the known types. +var AllResourceTypes = []PipelineResourceType{PipelineResourceTypeGit, PipelineResourceTypeStorage, PipelineResourceTypeImage, PipelineResourceTypeCluster, PipelineResourceTypePullRequest, PipelineResourceTypeCloudEvent} + +// TaskResources allows a Pipeline to declare how its DeclaredPipelineResources +// should be provided to a Task as its inputs and outputs. +type TaskResources struct { + // Inputs holds the mapping from the PipelineResources declared in + // DeclaredPipelineResources to the input PipelineResources required by the Task. + Inputs []TaskResource `json:"inputs,omitempty"` + // Outputs holds the mapping from the PipelineResources declared in + // DeclaredPipelineResources to the input PipelineResources required by the Task. + Outputs []TaskResource `json:"outputs,omitempty"` +} + +// TaskResource defines an input or output Resource declared as a requirement +// by a Task. The Name field will be used to refer to these Resources within +// the Task definition, and when provided as an Input, the Name will be the +// path to the volume mounted containing this Resource as an input (e.g. +// an input Resource named `workspace` will be mounted at `/workspace`). +type TaskResource struct { + ResourceDeclaration `json:",inline"` +} + +// ResourceDeclaration defines an input or output PipelineResource declared as a requirement +// by another type such as a Task or Condition. The Name field will be used to refer to these +// PipelineResources within the type's definition, and when provided as an Input, the Name will be the +// path to the volume mounted containing this PipelineResource as an input (e.g. +// an input Resource named `workspace` will be mounted at `/workspace`). +type ResourceDeclaration struct { + // Name declares the name by which a resource is referenced in the + // definition. Resources may be referenced by name in the definition of a + // Task's steps. + Name string `json:"name"` + // Type is the type of this resource; + Type PipelineResourceType `json:"type"` + // TargetPath is the path in workspace directory where the resource + // will be copied. + // +optional + TargetPath string `json:"targetPath,omitempty"` +} + +// TaskModifier is an interface to be implemented by different PipelineResources +type TaskModifier interface { + GetStepsToPrepend() []Step + GetStepsToAppend() []Step + GetVolumes() []v1.Volume +} + +// InternalTaskModifier implements TaskModifier for resources that are built-in to Tekton Pipelines. +type InternalTaskModifier struct { + StepsToPrepend []Step + StepsToAppend []Step + Volumes []v1.Volume +} + +// GetStepsToPrepend returns a set of Steps to prepend to the Task. +func (tm *InternalTaskModifier) GetStepsToPrepend() []Step { + return tm.StepsToPrepend +} + +// GetStepsToAppend returns a set of Steps to append to the Task. +func (tm *InternalTaskModifier) GetStepsToAppend() []Step { + return tm.StepsToAppend +} + +// GetVolumes returns a set of Volumes to prepend to the Task pod. +func (tm *InternalTaskModifier) GetVolumes() []v1.Volume { + return tm.Volumes +} + +// ApplyTaskModifier applies a modifier to the task by appending and prepending steps and volumes. +// If steps with the same name exist in ts an error will be returned. If identical Volumes have +// been added, they will not be added again. If Volumes with the same name but different contents +// have been added, an error will be returned. +func ApplyTaskModifier(ts *TaskSpec, tm TaskModifier) error { + steps := tm.GetStepsToPrepend() + for _, step := range steps { + if err := checkStepNotAlreadyAdded(step, ts.Steps); err != nil { + return err + } + } + ts.Steps = append(steps, ts.Steps...) + + steps = tm.GetStepsToAppend() + for _, step := range steps { + if err := checkStepNotAlreadyAdded(step, ts.Steps); err != nil { + return err + } + } + ts.Steps = append(ts.Steps, steps...) + + volumes := tm.GetVolumes() + for _, volume := range volumes { + var alreadyAdded bool + for _, v := range ts.Volumes { + if volume.Name == v.Name { + // If a Volume with the same name but different contents has already been added, we can't add both + if d := cmp.Diff(volume, v); d != "" { + return xerrors.Errorf("Tried to add volume %s already added but with different contents", volume.Name) + } + // If an identical Volume has already been added, don't add it again + alreadyAdded = true + } + } + if !alreadyAdded { + ts.Volumes = append(ts.Volumes, volume) + } + } + + return nil +} + +func checkStepNotAlreadyAdded(s Step, steps []Step) error { + for _, step := range steps { + if s.Name == step.Name { + return xerrors.Errorf("Step %s cannot be added again", step.Name) + } + } + return nil +} diff --git a/pkg/apis/pipeline/v1alpha2/resource_types_test.go b/pkg/apis/pipeline/v1alpha2/resource_types_test.go new file mode 100644 index 00000000000..63a4c22f8e6 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/resource_types_test.go @@ -0,0 +1,144 @@ +/* +Copyright 2019 The Tekton Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + corev1 "k8s.io/api/core/v1" +) + +var ( + prependStep = corev1.Container{ + Name: "prepend-step", + Image: "dummy", + Command: []string{"doit"}, + Args: []string{"stuff", "things"}, + } + appendStep = corev1.Container{ + Name: "append-step", + Image: "dummy", + Command: []string{"doit"}, + Args: []string{"other stuff", "other things"}, + } + volume = corev1.Volume{ + Name: "magic-volume", + VolumeSource: corev1.VolumeSource{ + PersistentVolumeClaim: &corev1.PersistentVolumeClaimVolumeSource{ClaimName: "some-claim"}, + }, + } +) + +type TestTaskModifier struct{} + +func (tm *TestTaskModifier) GetStepsToPrepend() []v1alpha2.Step { + return []v1alpha2.Step{{ + Container: prependStep, + }} +} + +func (tm *TestTaskModifier) GetStepsToAppend() []v1alpha2.Step { + return []v1alpha2.Step{{ + Container: appendStep, + }} +} + +func (tm *TestTaskModifier) GetVolumes() []corev1.Volume { + return []corev1.Volume{volume} +} + +func TestApplyTaskModifier(t *testing.T) { + testcases := []struct { + name string + ts v1alpha2.TaskSpec + }{{ + name: "success", + ts: v1alpha2.TaskSpec{}, + }, { + name: "identical volume already added", + ts: v1alpha2.TaskSpec{ + // Trying to add the same Volume that has already been added shouldn't be an error + // and it should not be added twice + Volumes: []corev1.Volume{volume}, + }, + }} + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + if err := v1alpha2.ApplyTaskModifier(&tc.ts, &TestTaskModifier{}); err != nil { + t.Fatalf("Did not expect error modifying TaskSpec but got %v", err) + } + + expectedTaskSpec := v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{ + Container: prependStep, + }, { + Container: appendStep, + }}, + Volumes: []corev1.Volume{ + volume, + }, + } + + if d := cmp.Diff(expectedTaskSpec, tc.ts); d != "" { + t.Errorf("TaskSpec was not modified as expected (-want, +got): %s", d) + } + }) + } +} + +func TestApplyTaskModifier_AlreadyAdded(t *testing.T) { + testcases := []struct { + name string + ts v1alpha2.TaskSpec + }{{ + name: "prepend already added", + ts: v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{Container: prependStep}}, + }, + }, { + name: "append already added", + ts: v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{Container: appendStep}}, + }, + }, { + name: "both steps already added", + ts: v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{Container: prependStep}, {Container: appendStep}}, + }, + }, { + name: "both steps already added reverse order", + ts: v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{Container: appendStep}, {Container: prependStep}}, + }, + }, { + name: "volume with same name but diff content already added", + ts: v1alpha2.TaskSpec{ + Volumes: []corev1.Volume{{ + Name: "magic-volume", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }}, + }, + }} + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + if err := v1alpha2.ApplyTaskModifier(&tc.ts, &TestTaskModifier{}); err == nil { + t.Errorf("Expected error when applying values already added but got none") + } + }) + } +} diff --git a/pkg/apis/pipeline/v1alpha2/resource_types_validation.go b/pkg/apis/pipeline/v1alpha2/resource_types_validation.go new file mode 100644 index 00000000000..8020375cab9 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/resource_types_validation.go @@ -0,0 +1,70 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "context" + "fmt" + "strings" + + "knative.dev/pkg/apis" +) + +func (tr *TaskResources) Validate(ctx context.Context) *apis.FieldError { + if tr == nil { + return nil + } + if err := validateTaskResources(tr.Inputs, "inputs"); err != nil { + return err + } + if err := validateTaskResources(tr.Outputs, "outputs"); err != nil { + return err + } + return nil +} + +func validateTaskResources(resources []TaskResource, name string) *apis.FieldError { + for _, resource := range resources { + if err := validateResourceType(resource, fmt.Sprintf("taskspec.resources.%s.%s.Type", name, resource.Name)); err != nil { + return err + } + } + if err := checkForDuplicates(resources, fmt.Sprintf("taskspec.resources.%s.name", name)); err != nil { + return err + } + return nil +} + +func checkForDuplicates(resources []TaskResource, path string) *apis.FieldError { + encountered := map[string]struct{}{} + for _, r := range resources { + if _, ok := encountered[strings.ToLower(r.Name)]; ok { + return apis.ErrMultipleOneOf(path) + } + encountered[strings.ToLower(r.Name)] = struct{}{} + } + return nil +} + +func validateResourceType(r TaskResource, path string) *apis.FieldError { + for _, allowed := range AllResourceTypes { + if r.Type == allowed { + return nil + } + } + return apis.ErrInvalidValue(string(r.Type), path) +} diff --git a/pkg/apis/pipeline/v1alpha2/substitution.go b/pkg/apis/pipeline/v1alpha2/substitution.go new file mode 100644 index 00000000000..8d191b6ded2 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/substitution.go @@ -0,0 +1,139 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "fmt" + "regexp" + "strings" + + "knative.dev/pkg/apis" +) + +const parameterSubstitution = "[_a-zA-Z][_a-zA-Z0-9.-]*" + +const braceMatchingRegex = "(\\$(\\(%s.(?P%s)\\)))" + +func ValidateVariable(name, value, prefix, contextPrefix, locationName, path string, vars map[string]struct{}) *apis.FieldError { + if vs, present := extractVariablesFromString(value, contextPrefix+prefix); present { + for _, v := range vs { + if _, ok := vars[v]; !ok { + return &apis.FieldError{ + Message: fmt.Sprintf("non-existent variable in %q for %s %s", value, locationName, name), + Paths: []string{path + "." + name}, + } + } + } + } + return nil +} + +// Verifies that variables matching the relevant string expressions do not reference any of the names present in vars. +func ValidateVariableProhibited(name, value, prefix, contextPrefix, locationName, path string, vars map[string]struct{}) *apis.FieldError { + if vs, present := extractVariablesFromString(value, contextPrefix+prefix); present { + for _, v := range vs { + if _, ok := vars[v]; ok { + return &apis.FieldError{ + Message: fmt.Sprintf("variable type invalid in %q for %s %s", value, locationName, name), + Paths: []string{path + "." + name}, + } + } + } + } + return nil +} + +// Verifies that variables matching the relevant string expressions are completely isolated if present. +func ValidateVariableIsolated(name, value, prefix, contextPrefix, locationName, path string, vars map[string]struct{}) *apis.FieldError { + if vs, present := extractVariablesFromString(value, contextPrefix+prefix); present { + firstMatch, _ := extractExpressionFromString(value, contextPrefix+prefix) + for _, v := range vs { + if _, ok := vars[v]; ok { + if len(value) != len(firstMatch) { + return &apis.FieldError{ + Message: fmt.Sprintf("variable is not properly isolated in %q for %s %s", value, locationName, name), + Paths: []string{path + "." + name}, + } + } + } + } + } + return nil +} + +// Extract a the first full string expressions found (e.g "$(input.params.foo)"). Return +// "" and false if nothing is found. +func extractExpressionFromString(s, prefix string) (string, bool) { + pattern := fmt.Sprintf(braceMatchingRegex, prefix, parameterSubstitution) + re := regexp.MustCompile(pattern) + match := re.FindStringSubmatch(s) + if match == nil { + return "", false + } + return match[0], true +} + +func extractVariablesFromString(s, prefix string) ([]string, bool) { + pattern := fmt.Sprintf(braceMatchingRegex, prefix, parameterSubstitution) + re := regexp.MustCompile(pattern) + matches := re.FindAllStringSubmatch(s, -1) + if len(matches) == 0 { + return []string{}, false + } + vars := make([]string, len(matches)) + for i, match := range matches { + groups := matchGroups(match, re) + // foo -> foo + // foo.bar -> foo + // foo.bar.baz -> foo + vars[i] = strings.SplitN(groups["var"], ".", 2)[0] + } + return vars, true +} + +func matchGroups(matches []string, pattern *regexp.Regexp) map[string]string { + groups := make(map[string]string) + for i, name := range pattern.SubexpNames()[1:] { + groups[name] = matches[i+1] + } + return groups +} + +func ApplyReplacements(in string, replacements map[string]string) string { + for k, v := range replacements { + in = strings.Replace(in, fmt.Sprintf("$(%s)", k), v, -1) + } + return in +} + +// Take an input string, and output an array of strings related to possible arrayReplacements. If there aren't any +// areas where the input can be split up via arrayReplacements, then just return an array with a single element, +// which is ApplyReplacements(in, replacements). +func ApplyArrayReplacements(in string, stringReplacements map[string]string, arrayReplacements map[string][]string) []string { + for k, v := range arrayReplacements { + stringToReplace := fmt.Sprintf("$(%s)", k) + + // If the input string matches a replacement's key (without padding characters), return the corresponding array. + // Note that the webhook should prevent all instances where this could evaluate to false. + if (strings.Count(in, stringToReplace) == 1) && len(in) == len(stringToReplace) { + return v + } + } + + // Otherwise return a size-1 array containing the input string with standard stringReplacements applied. + return []string{ApplyReplacements(in, stringReplacements)} +} diff --git a/pkg/apis/pipeline/v1alpha2/task_defaults.go b/pkg/apis/pipeline/v1alpha2/task_defaults.go new file mode 100644 index 00000000000..f1d1ee1e8b1 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/task_defaults.go @@ -0,0 +1,36 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "context" + + "knative.dev/pkg/apis" +) + +var _ apis.Defaultable = (*Task)(nil) + +func (t *Task) SetDefaults(ctx context.Context) { + t.Spec.SetDefaults(ctx) +} + +// SetDefaults set any defaults for the task spec +func (ts *TaskSpec) SetDefaults(ctx context.Context) { + for i := range ts.Params { + ts.Params[i].SetDefaults(ctx) + } +} diff --git a/pkg/apis/pipeline/v1alpha2/task_interface.go b/pkg/apis/pipeline/v1alpha2/task_interface.go new file mode 100644 index 00000000000..ecb8adeade4 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/task_interface.go @@ -0,0 +1,26 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + +// TaskInterface is implemented by Task and ClusterTask +type TaskInterface interface { + TaskMetadata() metav1.ObjectMeta + TaskSpec() TaskSpec + Copy() TaskInterface +} diff --git a/pkg/apis/pipeline/v1alpha2/task_types.go b/pkg/apis/pipeline/v1alpha2/task_types.go index 800478749e9..e5d96efd56a 100644 --- a/pkg/apis/pipeline/v1alpha2/task_types.go +++ b/pkg/apis/pipeline/v1alpha2/task_types.go @@ -15,3 +15,91 @@ limitations under the License. */ package v1alpha2 + +import ( + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// +genclient +// +genclient:noStatus +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object + +// Task represents a collection of sequential steps that are run as part of a +// Pipeline using a set of inputs and producing a set of outputs. Tasks execute +// when TaskRuns are created that provide the input parameters and resources and +// output resources the Task requires. +// +// +k8s:openapi-gen=true +type Task struct { + metav1.TypeMeta `json:",inline"` + // +optional + metav1.ObjectMeta `json:"metadata"` + + // Spec holds the desired state of the Task from the client + // +optional + Spec TaskSpec `json:"spec"` +} + +func (t *Task) TaskSpec() TaskSpec { + return t.Spec +} + +func (t *Task) TaskMetadata() metav1.ObjectMeta { + return t.ObjectMeta +} + +func (t *Task) Copy() TaskInterface { + return t.DeepCopy() +} + +// TaskSpec defines the desired state of Task. +type TaskSpec struct { + // Resources is a list input and output resource to run the task + // Resources are represented in TaskRuns as bindings to instances of + // PipelineResources. + // +optional + Resources *TaskResources `json:"resources,omitempty"` + + // Params is a list of input parameters required to run the task. Params + // must be supplied as inputs in TaskRuns unless they declare a default + // value. + // +optional + Params []ParamSpec `json:"params,omitempty"` + + // Steps are the steps of the build; each step is run sequentially with the + // source mounted into /workspace. + Steps []Step `json:"steps,omitempty"` + + // Volumes is a collection of volumes that are available to mount into the + // steps of the build. + Volumes []corev1.Volume `json:"volumes,omitempty"` + + // StepTemplate can be used as the basis for all step containers within the + // Task, so that the steps inherit settings on the base container. + StepTemplate *corev1.Container `json:"stepTemplate,omitempty"` + + // Sidecars are run alongside the Task's step containers. They begin before + // the steps start and end after the steps complete. + Sidecars []corev1.Container `json:"sidecars,omitempty"` +} + +// Step embeds the Container type, which allows it to include fields not +// provided by Container. +type Step struct { + corev1.Container + + // Script is the contents of an executable file to execute. + // + // If Script is not empty, the Step cannot have an Command or Args. + Script string `json:"script,omitempty"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// TaskList contains a list of Task +type TaskList struct { + metav1.TypeMeta `json:",inline"` + // +optional + metav1.ListMeta `json:"metadata,omitempty"` + Items []Task `json:"items"` +} diff --git a/pkg/apis/pipeline/v1alpha2/task_validation.go b/pkg/apis/pipeline/v1alpha2/task_validation.go new file mode 100644 index 00000000000..9be36140852 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/task_validation.go @@ -0,0 +1,279 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "context" + "fmt" + "strings" + + "github.com/tektoncd/pipeline/pkg/apis/validate" + "github.com/tektoncd/pipeline/pkg/substitution" + corev1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/api/equality" + "k8s.io/apimachinery/pkg/util/validation" + "knative.dev/pkg/apis" +) + +var _ apis.Validatable = (*Task)(nil) + +func (t *Task) Validate(ctx context.Context) *apis.FieldError { + if err := validate.ObjectMetadata(t.GetObjectMeta()); err != nil { + return err.ViaField("metadata") + } + return t.Spec.Validate(ctx) +} + +func (ts *TaskSpec) Validate(ctx context.Context) *apis.FieldError { + if equality.Semantic.DeepEqual(ts, &TaskSpec{}) { + return apis.ErrMissingField(apis.CurrentField) + } + + if len(ts.Steps) == 0 { + return apis.ErrMissingField("steps") + } + if err := ValidateVolumes(ts.Volumes).ViaField("volumes"); err != nil { + return err + } + mergedSteps, err := MergeStepsWithStepTemplate(ts.StepTemplate, ts.Steps) + if err != nil { + return &apis.FieldError{ + Message: fmt.Sprintf("error merging step template and steps: %s", err), + Paths: []string{"stepTemplate"}, + } + } + + if err := validateSteps(mergedSteps).ViaField("steps"); err != nil { + return err + } + + // Validate Resources declaration + if err := ts.Resources.Validate(ctx); err != nil { + return err + } + + // Validate that the parameters type are correct + if err := validateParameterTypes(ts.Params); err != nil { + return err + } + + // Validate task step names + for _, step := range ts.Steps { + if errs := validation.IsDNS1123Label(step.Name); step.Name != "" && len(errs) > 0 { + return &apis.FieldError{ + Message: fmt.Sprintf("invalid value %q", step.Name), + Paths: []string{"taskspec.steps.name"}, + Details: "Task step name must be a valid DNS Label, For more info refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", + } + } + } + + // FIXME(vdemeester) validate param variables + if err := validateParameterVariables(ts.Steps, ts.Params); err != nil { + return err + } + // FIXME(vdemeester) validate resource + return nil +} + +func ValidateVolumes(volumes []corev1.Volume) *apis.FieldError { + // Task must not have duplicate volume names. + vols := map[string]struct{}{} + for _, v := range volumes { + if _, ok := vols[v.Name]; ok { + return &apis.FieldError{ + Message: fmt.Sprintf("multiple volumes with same name %q", v.Name), + Paths: []string{"name"}, + } + } + vols[v.Name] = struct{}{} + } + return nil +} + +func validateSteps(steps []Step) *apis.FieldError { + // Task must not have duplicate step names. + names := map[string]struct{}{} + for _, s := range steps { + if s.Image == "" { + return apis.ErrMissingField("Image") + } + + if s.Script != "" { + if len(s.Args) > 0 || len(s.Command) > 0 { + return &apis.FieldError{ + Message: "script cannot be used with args or command", + Paths: []string{"script"}, + } + } + if !strings.HasPrefix(strings.TrimSpace(s.Script), "#!") { + return &apis.FieldError{ + Message: "script must start with a shebang (#!)", + Paths: []string{"script"}, + } + } + } + + if s.Name == "" { + continue + } + if _, ok := names[s.Name]; ok { + return apis.ErrInvalidValue(s.Name, "name") + } + names[s.Name] = struct{}{} + } + return nil +} + +func validateParameterTypes(params []ParamSpec) *apis.FieldError { + for _, p := range params { + // Ensure param has a valid type. + validType := false + for _, allowedType := range AllParamTypes { + if p.Type == allowedType { + validType = true + } + } + if !validType { + return apis.ErrInvalidValue(p.Type, fmt.Sprintf("taskspec.params.%s.type", p.Name)) + } + + // If a default value is provided, ensure its type matches param's declared type. + if (p.Default != nil) && (p.Default.Type != p.Type) { + return &apis.FieldError{ + Message: fmt.Sprintf( + "\"%v\" type does not match default value's type: \"%v\"", p.Type, p.Default.Type), + Paths: []string{ + fmt.Sprintf("taskspec.params.%s.type", p.Name), + fmt.Sprintf("taskspec.params.%s.default.type", p.Name), + }, + } + } + } + return nil +} + +func validateParameterVariables(steps []Step, params []ParamSpec) *apis.FieldError { + parameterNames := map[string]struct{}{} + arrayParameterNames := map[string]struct{}{} + + for _, p := range params { + parameterNames[p.Name] = struct{}{} + if p.Type == ParamTypeArray { + arrayParameterNames[p.Name] = struct{}{} + } + } + + if err := validateVariables(steps, "params", parameterNames); err != nil { + return err + } + return validateArrayUsage(steps, "params", arrayParameterNames) +} + +func validateArrayUsage(steps []Step, prefix string, vars map[string]struct{}) *apis.FieldError { + for _, step := range steps { + if err := validateTaskNoArrayReferenced("name", step.Name, prefix, vars); err != nil { + return err + } + if err := validateTaskNoArrayReferenced("image", step.Image, prefix, vars); err != nil { + return err + } + if err := validateTaskNoArrayReferenced("workingDir", step.WorkingDir, prefix, vars); err != nil { + return err + } + for i, cmd := range step.Command { + if err := validateTaskArraysIsolated(fmt.Sprintf("command[%d]", i), cmd, prefix, vars); err != nil { + return err + } + } + for i, arg := range step.Args { + if err := validateTaskArraysIsolated(fmt.Sprintf("arg[%d]", i), arg, prefix, vars); err != nil { + return err + } + } + for _, env := range step.Env { + if err := validateTaskNoArrayReferenced(fmt.Sprintf("env[%s]", env.Name), env.Value, prefix, vars); err != nil { + return err + } + } + for i, v := range step.VolumeMounts { + if err := validateTaskNoArrayReferenced(fmt.Sprintf("volumeMount[%d].Name", i), v.Name, prefix, vars); err != nil { + return err + } + if err := validateTaskNoArrayReferenced(fmt.Sprintf("volumeMount[%d].MountPath", i), v.MountPath, prefix, vars); err != nil { + return err + } + if err := validateTaskNoArrayReferenced(fmt.Sprintf("volumeMount[%d].SubPath", i), v.SubPath, prefix, vars); err != nil { + return err + } + } + } + return nil +} + +func validateVariables(steps []Step, prefix string, vars map[string]struct{}) *apis.FieldError { + for _, step := range steps { + if err := validateTaskVariable("name", step.Name, prefix, vars); err != nil { + return err + } + if err := validateTaskVariable("image", step.Image, prefix, vars); err != nil { + return err + } + if err := validateTaskVariable("workingDir", step.WorkingDir, prefix, vars); err != nil { + return err + } + for i, cmd := range step.Command { + if err := validateTaskVariable(fmt.Sprintf("command[%d]", i), cmd, prefix, vars); err != nil { + return err + } + } + for i, arg := range step.Args { + if err := validateTaskVariable(fmt.Sprintf("arg[%d]", i), arg, prefix, vars); err != nil { + return err + } + } + for _, env := range step.Env { + if err := validateTaskVariable(fmt.Sprintf("env[%s]", env.Name), env.Value, prefix, vars); err != nil { + return err + } + } + for i, v := range step.VolumeMounts { + if err := validateTaskVariable(fmt.Sprintf("volumeMount[%d].Name", i), v.Name, prefix, vars); err != nil { + return err + } + if err := validateTaskVariable(fmt.Sprintf("volumeMount[%d].MountPath", i), v.MountPath, prefix, vars); err != nil { + return err + } + if err := validateTaskVariable(fmt.Sprintf("volumeMount[%d].SubPath", i), v.SubPath, prefix, vars); err != nil { + return err + } + } + } + return nil +} + +func validateTaskVariable(name, value, prefix string, vars map[string]struct{}) *apis.FieldError { + return substitution.ValidateVariable(name, value, prefix, "", "step", "taskspec.steps", vars) +} + +func validateTaskNoArrayReferenced(name, value, prefix string, arrayNames map[string]struct{}) *apis.FieldError { + return substitution.ValidateVariableProhibited(name, value, prefix, "", "step", "taskspec.steps", arrayNames) +} + +func validateTaskArraysIsolated(name, value, prefix string, arrayNames map[string]struct{}) *apis.FieldError { + return substitution.ValidateVariableIsolated(name, value, prefix, "", "step", "taskspec.steps", arrayNames) +} diff --git a/pkg/apis/pipeline/v1alpha2/task_validation_test.go b/pkg/apis/pipeline/v1alpha2/task_validation_test.go new file mode 100644 index 00000000000..5bc9e58c582 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/task_validation_test.go @@ -0,0 +1,571 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + "github.com/tektoncd/pipeline/test/builder" + corev1 "k8s.io/api/core/v1" + "knative.dev/pkg/apis" +) + +var validResource = v1alpha2.TaskResource{ + ResourceDeclaration: v1alpha2.ResourceDeclaration{ + Name: "source", + Type: "git", + }, +} + +var invalidResource = v1alpha2.TaskResource{ + ResourceDeclaration: v1alpha2.ResourceDeclaration{ + Name: "source", + Type: "what", + }, +} + +var validSteps = []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "myimage", +}}} + +var invalidSteps = []v1alpha2.Step{{Container: corev1.Container{ + Name: "replaceImage", + Image: "myimage", +}}} + +func TestTaskSpecValidate(t *testing.T) { + type fields struct { + Params []v1alpha2.ParamSpec + Resources *v1alpha2.TaskResources + Steps []v1alpha2.Step + StepTemplate *corev1.Container + } + tests := []struct { + name string + fields fields + }{{ + name: "unnamed steps", + fields: fields{ + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Image: "myimage", + }}, {Container: corev1.Container{ + Image: "myotherimage", + }}}, + }, + }, { + name: "valid input resources", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Inputs: []v1alpha2.TaskResource{validResource}, + }, + Steps: validSteps, + }, + }, { + name: "valid output resources", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Outputs: []v1alpha2.TaskResource{validResource}, + }, + Steps: validSteps, + }, + }, { + name: "valid params type implied", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "task", + Description: "param", + Default: builder.ArrayOrString("default"), + }}, + Steps: validSteps, + }, + }, { + name: "valid params type explicit", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "task", + Type: v1alpha2.ParamTypeString, + Description: "param", + Default: builder.ArrayOrString("default"), + }}, + Steps: validSteps, + }, + }, { + name: "valid template variable", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", + }, { + Name: "foo-is-baz", + }}, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "url", + Args: []string{"--flag=$(params.baz) && $(params.foo-is-baz)"}, + WorkingDir: "/foo/bar/src/", + }}}, + }, + }, { + name: "valid array template variable", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", + Type: v1alpha2.ParamTypeArray, + }, { + Name: "foo-is-baz", + Type: v1alpha2.ParamTypeArray, + }}, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "myimage", + Command: []string{"$(param.foo-is-baz)"}, + Args: []string{"$(params.baz)", "middle string", "$(params.foo-is-baz)"}, + WorkingDir: "/foo/bar/src/", + }}}, + }, + }, { + name: "step template included in validation", + fields: fields{ + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "astep", + Command: []string{"echo"}, + Args: []string{"hello"}, + }}}, + StepTemplate: &corev1.Container{ + Image: "some-image", + }, + }, + }, { + name: "valid step with script", + fields: fields{ + Steps: []v1alpha2.Step{{ + Container: corev1.Container{ + Image: "my-image", + }, + Script: ` + #!/usr/bin/env bash + hello world`, + }}, + }, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ts := &v1alpha2.TaskSpec{ + Params: tt.fields.Params, + Resources: tt.fields.Resources, + Steps: tt.fields.Steps, + StepTemplate: tt.fields.StepTemplate, + } + ctx := context.Background() + ts.SetDefaults(ctx) + if err := ts.Validate(ctx); err != nil { + t.Errorf("TaskSpec.Validate() = %v", err) + } + }) + } +} + +func TestTaskSpecValidateError(t *testing.T) { + type fields struct { + Params []v1alpha2.ParamSpec + Resources *v1alpha2.TaskResources + Steps []v1alpha2.Step + Volumes []corev1.Volume + } + tests := []struct { + name string + fields fields + expectedError apis.FieldError + }{{ + name: "nil", + expectedError: apis.FieldError{ + Message: `missing field(s)`, + Paths: []string{""}, + }, + }, { + name: "no step", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "validparam", + Type: v1alpha2.ParamTypeString, + Description: "parameter", + Default: builder.ArrayOrString("default"), + }}, + }, + expectedError: apis.FieldError{ + Message: `missing field(s)`, + Paths: []string{"steps"}, + }, + }, { + name: "invalid input resource", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Inputs: []v1alpha2.TaskResource{invalidResource}, + }, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `invalid value: what`, + Paths: []string{"taskspec.resources.inputs.source.Type"}, + }, + }, { + name: "one invalid input resource", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Inputs: []v1alpha2.TaskResource{validResource, invalidResource}, + }, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `invalid value: what`, + Paths: []string{"taskspec.resources.inputs.source.Type"}, + }, + }, { + name: "duplicated inputs resources", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Inputs: []v1alpha2.TaskResource{validResource, validResource}, + Outputs: []v1alpha2.TaskResource{validResource}, + }, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `expected exactly one, got both`, + Paths: []string{"taskspec.resources.inputs.name"}, + }, + }, { + name: "invalid output resource", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Outputs: []v1alpha2.TaskResource{invalidResource}, + }, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `invalid value: what`, + Paths: []string{"taskspec.resources.outputs.source.Type"}, + }, + }, { + name: "one invalid output resource", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Outputs: []v1alpha2.TaskResource{validResource, invalidResource}, + }, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `invalid value: what`, + Paths: []string{"taskspec.resources.outputs.source.Type"}, + }, + }, { + name: "duplicated outputs resources", + fields: fields{ + Resources: &v1alpha2.TaskResources{ + Inputs: []v1alpha2.TaskResource{validResource}, + Outputs: []v1alpha2.TaskResource{validResource, validResource}, + }, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `expected exactly one, got both`, + Paths: []string{"taskspec.resources.outputs.name"}, + }, + }, { + name: "invalid param type", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "validparam", + Type: v1alpha2.ParamTypeString, + Description: "parameter", + Default: builder.ArrayOrString("default"), + }, { + Name: "param-with-invalid-type", + Type: "invalidtype", + Description: "invalidtypedesc", + Default: builder.ArrayOrString("default"), + }}, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `invalid value: invalidtype`, + Paths: []string{"taskspec.params.param-with-invalid-type.type"}, + }, + }, { + name: "param mismatching default/type 1", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "task", + Type: v1alpha2.ParamTypeArray, + Description: "param", + Default: builder.ArrayOrString("default"), + }}, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `"array" type does not match default value's type: "string"`, + Paths: []string{"taskspec.params.task.type", "taskspec.params.task.default.type"}, + }, + }, { + name: "param mismatching default/type 2", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "task", + Type: v1alpha2.ParamTypeString, + Description: "param", + Default: builder.ArrayOrString("default", "array"), + }}, + Steps: validSteps, + }, + expectedError: apis.FieldError{ + Message: `"string" type does not match default value's type: "array"`, + Paths: []string{"taskspec.params.task.type", "taskspec.params.task.default.type"}, + }, + }, { + name: "invalid step", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "validparam", + Type: v1alpha2.ParamTypeString, + Description: "parameter", + Default: builder.ArrayOrString("default"), + }}, + Steps: []v1alpha2.Step{}, + }, + expectedError: apis.FieldError{ + Message: "missing field(s)", + Paths: []string{"steps"}, + }, + }, { + name: "invalid step name", + fields: fields{ + Steps: invalidSteps, + }, + expectedError: apis.FieldError{ + Message: `invalid value "replaceImage"`, + Paths: []string{"taskspec.steps.name"}, + Details: "Task step name must be a valid DNS Label, For more info refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", + }, + }, { + name: "inexistent param variable", + fields: fields{ + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "myimage", + Args: []string{"--flag=$(params.inexistent)"}, + }}}, + }, + expectedError: apis.FieldError{ + Message: `non-existent variable in "--flag=$(params.inexistent)" for step arg[0]`, + Paths: []string{"taskspec.steps.arg[0]"}, + }, + }, { + name: "array used in unaccepted field", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", + Type: v1alpha2.ParamTypeArray, + }, { + Name: "foo-is-baz", + Type: v1alpha2.ParamTypeArray, + }}, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "$(params.baz)", + Command: []string{"$(param.foo-is-baz)"}, + Args: []string{"$(params.baz)", "middle string", "url"}, + WorkingDir: "/foo/bar/src/", + }}}, + }, + expectedError: apis.FieldError{ + Message: `variable type invalid in "$(params.baz)" for step image`, + Paths: []string{"taskspec.steps.image"}, + }, + }, { + name: "array not properly isolated", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", + Type: v1alpha2.ParamTypeArray, + }, { + Name: "foo-is-baz", + Type: v1alpha2.ParamTypeArray, + }}, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "someimage", + Command: []string{"$(param.foo-is-baz)"}, + Args: []string{"not isolated: $(params.baz)", "middle string", "url"}, + WorkingDir: "/foo/bar/src/", + }}}, + }, + expectedError: apis.FieldError{ + Message: `variable is not properly isolated in "not isolated: $(params.baz)" for step arg[0]`, + Paths: []string{"taskspec.steps.arg[0]"}, + }, + }, { + name: "array not properly isolated", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", + Type: v1alpha2.ParamTypeArray, + }, { + Name: "foo-is-baz", + Type: v1alpha2.ParamTypeArray, + }}, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "someimage", + Command: []string{"$(param.foo-is-baz)"}, + Args: []string{"not isolated: $(params.baz)", "middle string", "url"}, + WorkingDir: "/foo/bar/src/", + }}}, + }, + expectedError: apis.FieldError{ + Message: `variable is not properly isolated in "not isolated: $(params.baz)" for step arg[0]`, + Paths: []string{"taskspec.steps.arg[0]"}, + }, + }, { + name: "inferred array not properly isolated", + fields: fields{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", + Default: &v1alpha2.ArrayOrString{ + Type: v1alpha2.ParamTypeArray, + ArrayVal: []string{"implied", "array", "type"}, + }, + }, { + Name: "foo-is-baz", + Default: &v1alpha2.ArrayOrString{ + Type: v1alpha2.ParamTypeArray, + ArrayVal: []string{"implied", "array", "type"}, + }, + }}, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "someimage", + Command: []string{"$(param.foo-is-baz)"}, + Args: []string{"not isolated: $(params.baz)", "middle string", "url"}, + WorkingDir: "/foo/bar/src/", + }}}, + }, + expectedError: apis.FieldError{ + Message: `variable is not properly isolated in "not isolated: $(params.baz)" for step arg[0]`, + Paths: []string{"taskspec.steps.arg[0]"}, + }, + }, { + name: "Inexistent param variable with existing", + fields: fields{ + Params: []v1alpha2.ParamSpec{ + { + Name: "foo", + Description: "param", + Default: builder.ArrayOrString("default"), + }, + }, + Steps: []v1alpha2.Step{{Container: corev1.Container{ + Name: "mystep", + Image: "myimage", + Args: []string{"$(params.foo) && $(params.inexistent)"}, + }}}, + }, + expectedError: apis.FieldError{ + Message: `non-existent variable in "$(params.foo) && $(params.inexistent)" for step arg[0]`, + Paths: []string{"taskspec.steps.arg[0]"}, + }, + }, { + name: "Multiple volumes with same name", + fields: fields{ + Steps: validSteps, + Volumes: []corev1.Volume{{ + Name: "workspace", + }, { + Name: "workspace", + }}, + }, + expectedError: apis.FieldError{ + Message: `multiple volumes with same name "workspace"`, + Paths: []string{"volumes.name"}, + }, + }, { + name: "step with script and args", + fields: fields{ + Steps: []v1alpha2.Step{{ + Container: corev1.Container{ + Image: "myimage", + Args: []string{"arg"}, + }, + Script: "script", + }}, + }, + expectedError: apis.FieldError{ + Message: "script cannot be used with args or command", + Paths: []string{"steps.script"}, + }, + }, { + name: "step with script without shebang", + fields: fields{ + Steps: []v1alpha2.Step{{ + Container: corev1.Container{ + Image: "my-image", + }, + Script: "does not begin with shebang", + }}, + }, + expectedError: apis.FieldError{ + Message: "script must start with a shebang (#!)", + Paths: []string{"steps.script"}, + }, + }, { + name: "step with script and command", + fields: fields{ + Steps: []v1alpha2.Step{{ + Container: corev1.Container{ + Image: "myimage", + Command: []string{"command"}, + }, + Script: "script", + }}, + }, + expectedError: apis.FieldError{ + Message: "script cannot be used with args or command", + Paths: []string{"steps.script"}, + }, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ts := &v1alpha2.TaskSpec{ + Params: tt.fields.Params, + Resources: tt.fields.Resources, + Steps: tt.fields.Steps, + Volumes: tt.fields.Volumes, + } + ctx := context.Background() + ts.SetDefaults(ctx) + err := ts.Validate(context.Background()) + if err == nil { + t.Fatalf("Expected an error, got nothing for %v", ts) + } + if d := cmp.Diff(tt.expectedError, *err, cmpopts.IgnoreUnexported(apis.FieldError{})); d != "" { + t.Errorf("TaskSpec.Validate() errors diff -want, +got: %v", d) + } + }) + } +} diff --git a/pkg/apis/pipeline/v1alpha2/types_test.go b/pkg/apis/pipeline/v1alpha2/types_test.go new file mode 100644 index 00000000000..ac4346cacce --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/types_test.go @@ -0,0 +1,34 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2_test + +import ( + "testing" + + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + "knative.dev/pkg/webhook" +) + +func TestTypes(t *testing.T) { + // Assert that types satisfy webhook interface. + // var _ webhook.GenericCRD = (*v1alpha2.ClusterTask)(nil) + // var _ webhook.GenericCRD = (*v1alpha2.TaskRun)(nil) + // var _ webhook.GenericCRD = (*v1alpha2.PipelineResource)(nil) + var _ webhook.GenericCRD = (*v1alpha2.Task)(nil) + // var _ webhook.GenericCRD = (*v1alpha2.TaskRun)(nil) + // var _ webhook.GenericCRD = (*v1alpha2.Condition)(nil) +} diff --git a/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go b/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go new file mode 100644 index 00000000000..f9a7702a544 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go @@ -0,0 +1,328 @@ +// +build !ignore_autogenerated + +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by deepcopy-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + v1 "k8s.io/api/core/v1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ArrayOrString) DeepCopyInto(out *ArrayOrString) { + *out = *in + if in.ArrayVal != nil { + in, out := &in.ArrayVal, &out.ArrayVal + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArrayOrString. +func (in *ArrayOrString) DeepCopy() *ArrayOrString { + if in == nil { + return nil + } + out := new(ArrayOrString) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InternalTaskModifier) DeepCopyInto(out *InternalTaskModifier) { + *out = *in + if in.StepsToPrepend != nil { + in, out := &in.StepsToPrepend, &out.StepsToPrepend + *out = make([]Step, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.StepsToAppend != nil { + in, out := &in.StepsToAppend, &out.StepsToAppend + *out = make([]Step, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Volumes != nil { + in, out := &in.Volumes, &out.Volumes + *out = make([]v1.Volume, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InternalTaskModifier. +func (in *InternalTaskModifier) DeepCopy() *InternalTaskModifier { + if in == nil { + return nil + } + out := new(InternalTaskModifier) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Param) DeepCopyInto(out *Param) { + *out = *in + in.Value.DeepCopyInto(&out.Value) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Param. +func (in *Param) DeepCopy() *Param { + if in == nil { + return nil + } + out := new(Param) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ParamSpec) DeepCopyInto(out *ParamSpec) { + *out = *in + if in.Default != nil { + in, out := &in.Default, &out.Default + *out = new(ArrayOrString) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParamSpec. +func (in *ParamSpec) DeepCopy() *ParamSpec { + if in == nil { + return nil + } + out := new(ParamSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceDeclaration) DeepCopyInto(out *ResourceDeclaration) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceDeclaration. +func (in *ResourceDeclaration) DeepCopy() *ResourceDeclaration { + if in == nil { + return nil + } + out := new(ResourceDeclaration) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceParam) DeepCopyInto(out *ResourceParam) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceParam. +func (in *ResourceParam) DeepCopy() *ResourceParam { + if in == nil { + return nil + } + out := new(ResourceParam) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Step) DeepCopyInto(out *Step) { + *out = *in + in.Container.DeepCopyInto(&out.Container) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Step. +func (in *Step) DeepCopy() *Step { + if in == nil { + return nil + } + out := new(Step) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Task) DeepCopyInto(out *Task) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Task. +func (in *Task) DeepCopy() *Task { + if in == nil { + return nil + } + out := new(Task) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *Task) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TaskList) DeepCopyInto(out *TaskList) { + *out = *in + out.TypeMeta = in.TypeMeta + out.ListMeta = in.ListMeta + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]Task, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskList. +func (in *TaskList) DeepCopy() *TaskList { + if in == nil { + return nil + } + out := new(TaskList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *TaskList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TaskResource) DeepCopyInto(out *TaskResource) { + *out = *in + out.ResourceDeclaration = in.ResourceDeclaration + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskResource. +func (in *TaskResource) DeepCopy() *TaskResource { + if in == nil { + return nil + } + out := new(TaskResource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TaskResources) DeepCopyInto(out *TaskResources) { + *out = *in + if in.Inputs != nil { + in, out := &in.Inputs, &out.Inputs + *out = make([]TaskResource, len(*in)) + copy(*out, *in) + } + if in.Outputs != nil { + in, out := &in.Outputs, &out.Outputs + *out = make([]TaskResource, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskResources. +func (in *TaskResources) DeepCopy() *TaskResources { + if in == nil { + return nil + } + out := new(TaskResources) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TaskSpec) DeepCopyInto(out *TaskSpec) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(TaskResources) + (*in).DeepCopyInto(*out) + } + if in.Params != nil { + in, out := &in.Params, &out.Params + *out = make([]ParamSpec, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Steps != nil { + in, out := &in.Steps, &out.Steps + *out = make([]Step, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Volumes != nil { + in, out := &in.Volumes, &out.Volumes + *out = make([]v1.Volume, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.StepTemplate != nil { + in, out := &in.StepTemplate, &out.StepTemplate + *out = new(v1.Container) + (*in).DeepCopyInto(*out) + } + if in.Sidecars != nil { + in, out := &in.Sidecars, &out.Sidecars + *out = make([]v1.Container, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskSpec. +func (in *TaskSpec) DeepCopy() *TaskSpec { + if in == nil { + return nil + } + out := new(TaskSpec) + in.DeepCopyInto(out) + return out +} diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go index 704efc1e9d2..b01051d0d42 100644 --- a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go @@ -19,6 +19,7 @@ limitations under the License. package fake import ( + v1alpha2 "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/typed/pipeline/v1alpha2" rest "k8s.io/client-go/rest" testing "k8s.io/client-go/testing" ) @@ -27,6 +28,10 @@ type FakeTektonV1alpha2 struct { *testing.Fake } +func (c *FakeTektonV1alpha2) Tasks(namespace string) v1alpha2.TaskInterface { + return &FakeTasks{c, namespace} +} + // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. func (c *FakeTektonV1alpha2) RESTClient() rest.Interface { diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_task.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_task.go new file mode 100644 index 00000000000..b8dc46324b3 --- /dev/null +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_task.go @@ -0,0 +1,128 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by client-gen. DO NOT EDIT. + +package fake + +import ( + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + schema "k8s.io/apimachinery/pkg/runtime/schema" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + testing "k8s.io/client-go/testing" +) + +// FakeTasks implements TaskInterface +type FakeTasks struct { + Fake *FakeTektonV1alpha2 + ns string +} + +var tasksResource = schema.GroupVersionResource{Group: "tekton.dev", Version: "v1alpha2", Resource: "tasks"} + +var tasksKind = schema.GroupVersionKind{Group: "tekton.dev", Version: "v1alpha2", Kind: "Task"} + +// Get takes name of the task, and returns the corresponding task object, and an error if there is any. +func (c *FakeTasks) Get(name string, options v1.GetOptions) (result *v1alpha2.Task, err error) { + obj, err := c.Fake. + Invokes(testing.NewGetAction(tasksResource, c.ns, name), &v1alpha2.Task{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Task), err +} + +// List takes label and field selectors, and returns the list of Tasks that match those selectors. +func (c *FakeTasks) List(opts v1.ListOptions) (result *v1alpha2.TaskList, err error) { + obj, err := c.Fake. + Invokes(testing.NewListAction(tasksResource, tasksKind, c.ns, opts), &v1alpha2.TaskList{}) + + if obj == nil { + return nil, err + } + + label, _, _ := testing.ExtractFromListOptions(opts) + if label == nil { + label = labels.Everything() + } + list := &v1alpha2.TaskList{ListMeta: obj.(*v1alpha2.TaskList).ListMeta} + for _, item := range obj.(*v1alpha2.TaskList).Items { + if label.Matches(labels.Set(item.Labels)) { + list.Items = append(list.Items, item) + } + } + return list, err +} + +// Watch returns a watch.Interface that watches the requested tasks. +func (c *FakeTasks) Watch(opts v1.ListOptions) (watch.Interface, error) { + return c.Fake. + InvokesWatch(testing.NewWatchAction(tasksResource, c.ns, opts)) + +} + +// Create takes the representation of a task and creates it. Returns the server's representation of the task, and an error, if there is any. +func (c *FakeTasks) Create(task *v1alpha2.Task) (result *v1alpha2.Task, err error) { + obj, err := c.Fake. + Invokes(testing.NewCreateAction(tasksResource, c.ns, task), &v1alpha2.Task{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Task), err +} + +// Update takes the representation of a task and updates it. Returns the server's representation of the task, and an error, if there is any. +func (c *FakeTasks) Update(task *v1alpha2.Task) (result *v1alpha2.Task, err error) { + obj, err := c.Fake. + Invokes(testing.NewUpdateAction(tasksResource, c.ns, task), &v1alpha2.Task{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Task), err +} + +// Delete takes name of the task and deletes it. Returns an error if one occurs. +func (c *FakeTasks) Delete(name string, options *v1.DeleteOptions) error { + _, err := c.Fake. + Invokes(testing.NewDeleteAction(tasksResource, c.ns, name), &v1alpha2.Task{}) + + return err +} + +// DeleteCollection deletes a collection of objects. +func (c *FakeTasks) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + action := testing.NewDeleteCollectionAction(tasksResource, c.ns, listOptions) + + _, err := c.Fake.Invokes(action, &v1alpha2.TaskList{}) + return err +} + +// Patch applies the patch and returns the patched task. +func (c *FakeTasks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha2.Task, err error) { + obj, err := c.Fake. + Invokes(testing.NewPatchSubresourceAction(tasksResource, c.ns, name, pt, data, subresources...), &v1alpha2.Task{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Task), err +} diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go index abe3edb7feb..61dab461677 100644 --- a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go @@ -17,3 +17,5 @@ limitations under the License. // Code generated by client-gen. DO NOT EDIT. package v1alpha2 + +type TaskExpansion interface{} diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go index 549f004a5f7..52493bbea87 100644 --- a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go @@ -27,6 +27,7 @@ import ( type TektonV1alpha2Interface interface { RESTClient() rest.Interface + TasksGetter } // TektonV1alpha2Client is used to interact with features provided by the tekton.dev group. @@ -34,6 +35,10 @@ type TektonV1alpha2Client struct { restClient rest.Interface } +func (c *TektonV1alpha2Client) Tasks(namespace string) TaskInterface { + return newTasks(c, namespace) +} + // NewForConfig creates a new TektonV1alpha2Client for the given config. func NewForConfig(c *rest.Config) (*TektonV1alpha2Client, error) { config := *c diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/task.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/task.go new file mode 100644 index 00000000000..bdc655bb45c --- /dev/null +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/task.go @@ -0,0 +1,174 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by client-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + "time" + + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + scheme "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/scheme" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + rest "k8s.io/client-go/rest" +) + +// TasksGetter has a method to return a TaskInterface. +// A group's client should implement this interface. +type TasksGetter interface { + Tasks(namespace string) TaskInterface +} + +// TaskInterface has methods to work with Task resources. +type TaskInterface interface { + Create(*v1alpha2.Task) (*v1alpha2.Task, error) + Update(*v1alpha2.Task) (*v1alpha2.Task, error) + Delete(name string, options *v1.DeleteOptions) error + DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error + Get(name string, options v1.GetOptions) (*v1alpha2.Task, error) + List(opts v1.ListOptions) (*v1alpha2.TaskList, error) + Watch(opts v1.ListOptions) (watch.Interface, error) + Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha2.Task, err error) + TaskExpansion +} + +// tasks implements TaskInterface +type tasks struct { + client rest.Interface + ns string +} + +// newTasks returns a Tasks +func newTasks(c *TektonV1alpha2Client, namespace string) *tasks { + return &tasks{ + client: c.RESTClient(), + ns: namespace, + } +} + +// Get takes name of the task, and returns the corresponding task object, and an error if there is any. +func (c *tasks) Get(name string, options v1.GetOptions) (result *v1alpha2.Task, err error) { + result = &v1alpha2.Task{} + err = c.client.Get(). + Namespace(c.ns). + Resource("tasks"). + Name(name). + VersionedParams(&options, scheme.ParameterCodec). + Do(). + Into(result) + return +} + +// List takes label and field selectors, and returns the list of Tasks that match those selectors. +func (c *tasks) List(opts v1.ListOptions) (result *v1alpha2.TaskList, err error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + result = &v1alpha2.TaskList{} + err = c.client.Get(). + Namespace(c.ns). + Resource("tasks"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Do(). + Into(result) + return +} + +// Watch returns a watch.Interface that watches the requested tasks. +func (c *tasks) Watch(opts v1.ListOptions) (watch.Interface, error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + opts.Watch = true + return c.client.Get(). + Namespace(c.ns). + Resource("tasks"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Watch() +} + +// Create takes the representation of a task and creates it. Returns the server's representation of the task, and an error, if there is any. +func (c *tasks) Create(task *v1alpha2.Task) (result *v1alpha2.Task, err error) { + result = &v1alpha2.Task{} + err = c.client.Post(). + Namespace(c.ns). + Resource("tasks"). + Body(task). + Do(). + Into(result) + return +} + +// Update takes the representation of a task and updates it. Returns the server's representation of the task, and an error, if there is any. +func (c *tasks) Update(task *v1alpha2.Task) (result *v1alpha2.Task, err error) { + result = &v1alpha2.Task{} + err = c.client.Put(). + Namespace(c.ns). + Resource("tasks"). + Name(task.Name). + Body(task). + Do(). + Into(result) + return +} + +// Delete takes name of the task and deletes it. Returns an error if one occurs. +func (c *tasks) Delete(name string, options *v1.DeleteOptions) error { + return c.client.Delete(). + Namespace(c.ns). + Resource("tasks"). + Name(name). + Body(options). + Do(). + Error() +} + +// DeleteCollection deletes a collection of objects. +func (c *tasks) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + var timeout time.Duration + if listOptions.TimeoutSeconds != nil { + timeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second + } + return c.client.Delete(). + Namespace(c.ns). + Resource("tasks"). + VersionedParams(&listOptions, scheme.ParameterCodec). + Timeout(timeout). + Body(options). + Do(). + Error() +} + +// Patch applies the patch and returns the patched task. +func (c *tasks) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha2.Task, err error) { + result = &v1alpha2.Task{} + err = c.client.Patch(pt). + Namespace(c.ns). + Resource("tasks"). + SubResource(subresources...). + Name(name). + Body(data). + Do(). + Into(result) + return +} diff --git a/pkg/client/informers/externalversions/generic.go b/pkg/client/informers/externalversions/generic.go index a8f87a56336..9ef0cdd5a7c 100644 --- a/pkg/client/informers/externalversions/generic.go +++ b/pkg/client/informers/externalversions/generic.go @@ -22,6 +22,7 @@ import ( "fmt" v1alpha1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha1" + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" schema "k8s.io/apimachinery/pkg/runtime/schema" cache "k8s.io/client-go/tools/cache" ) @@ -68,6 +69,10 @@ func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource case v1alpha1.SchemeGroupVersion.WithResource("taskruns"): return &genericInformer{resource: resource.GroupResource(), informer: f.Tekton().V1alpha1().TaskRuns().Informer()}, nil + // Group=tekton.dev, Version=v1alpha2 + case v1alpha2.SchemeGroupVersion.WithResource("tasks"): + return &genericInformer{resource: resource.GroupResource(), informer: f.Tekton().V1alpha2().Tasks().Informer()}, nil + } return nil, fmt.Errorf("no informer found for %v", resource) diff --git a/pkg/client/informers/externalversions/pipeline/interface.go b/pkg/client/informers/externalversions/pipeline/interface.go index c31159ee4be..72662a82687 100644 --- a/pkg/client/informers/externalversions/pipeline/interface.go +++ b/pkg/client/informers/externalversions/pipeline/interface.go @@ -21,12 +21,15 @@ package tekton import ( internalinterfaces "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/internalinterfaces" v1alpha1 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1alpha1" + v1alpha2 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1alpha2" ) // Interface provides access to each of this group's versions. type Interface interface { // V1alpha1 provides access to shared informers for resources in V1alpha1. V1alpha1() v1alpha1.Interface + // V1alpha2 provides access to shared informers for resources in V1alpha2. + V1alpha2() v1alpha2.Interface } type group struct { @@ -44,3 +47,8 @@ func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakList func (g *group) V1alpha1() v1alpha1.Interface { return v1alpha1.New(g.factory, g.namespace, g.tweakListOptions) } + +// V1alpha2 returns a new v1alpha2.Interface. +func (g *group) V1alpha2() v1alpha2.Interface { + return v1alpha2.New(g.factory, g.namespace, g.tweakListOptions) +} diff --git a/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go b/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go new file mode 100644 index 00000000000..994beeea500 --- /dev/null +++ b/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go @@ -0,0 +1,45 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by informer-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + internalinterfaces "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/internalinterfaces" +) + +// Interface provides access to all the informers in this group version. +type Interface interface { + // Tasks returns a TaskInformer. + Tasks() TaskInformer +} + +type version struct { + factory internalinterfaces.SharedInformerFactory + namespace string + tweakListOptions internalinterfaces.TweakListOptionsFunc +} + +// New returns a new Interface. +func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakListOptions internalinterfaces.TweakListOptionsFunc) Interface { + return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions} +} + +// Tasks returns a TaskInformer. +func (v *version) Tasks() TaskInformer { + return &taskInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} +} diff --git a/pkg/client/informers/externalversions/pipeline/v1alpha2/task.go b/pkg/client/informers/externalversions/pipeline/v1alpha2/task.go new file mode 100644 index 00000000000..d8bddca6a97 --- /dev/null +++ b/pkg/client/informers/externalversions/pipeline/v1alpha2/task.go @@ -0,0 +1,89 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by informer-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + time "time" + + pipelinev1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" + internalinterfaces "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/internalinterfaces" + v1alpha2 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1alpha2" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + runtime "k8s.io/apimachinery/pkg/runtime" + watch "k8s.io/apimachinery/pkg/watch" + cache "k8s.io/client-go/tools/cache" +) + +// TaskInformer provides access to a shared informer and lister for +// Tasks. +type TaskInformer interface { + Informer() cache.SharedIndexInformer + Lister() v1alpha2.TaskLister +} + +type taskInformer struct { + factory internalinterfaces.SharedInformerFactory + tweakListOptions internalinterfaces.TweakListOptionsFunc + namespace string +} + +// NewTaskInformer constructs a new informer for Task type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewTaskInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { + return NewFilteredTaskInformer(client, namespace, resyncPeriod, indexers, nil) +} + +// NewFilteredTaskInformer constructs a new informer for Task type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewFilteredTaskInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { + return cache.NewSharedIndexInformer( + &cache.ListWatch{ + ListFunc: func(options v1.ListOptions) (runtime.Object, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.TektonV1alpha2().Tasks(namespace).List(options) + }, + WatchFunc: func(options v1.ListOptions) (watch.Interface, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.TektonV1alpha2().Tasks(namespace).Watch(options) + }, + }, + &pipelinev1alpha2.Task{}, + resyncPeriod, + indexers, + ) +} + +func (f *taskInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { + return NewFilteredTaskInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) +} + +func (f *taskInformer) Informer() cache.SharedIndexInformer { + return f.factory.InformerFor(&pipelinev1alpha2.Task{}, f.defaultInformer) +} + +func (f *taskInformer) Lister() v1alpha2.TaskLister { + return v1alpha2.NewTaskLister(f.Informer().GetIndexer()) +} diff --git a/pkg/client/injection/informers/pipeline/v1alpha2/task/fake/fake.go b/pkg/client/injection/informers/pipeline/v1alpha2/task/fake/fake.go new file mode 100644 index 00000000000..675368cd38c --- /dev/null +++ b/pkg/client/injection/informers/pipeline/v1alpha2/task/fake/fake.go @@ -0,0 +1,40 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package fake + +import ( + "context" + + fake "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory/fake" + task "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1alpha2/task" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" +) + +var Get = task.Get + +func init() { + injection.Fake.RegisterInformer(withInformer) +} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := fake.Get(ctx) + inf := f.Tekton().V1alpha2().Tasks() + return context.WithValue(ctx, task.Key{}, inf), inf.Informer() +} diff --git a/pkg/client/injection/informers/pipeline/v1alpha2/task/task.go b/pkg/client/injection/informers/pipeline/v1alpha2/task/task.go new file mode 100644 index 00000000000..ee1f1c3ec0e --- /dev/null +++ b/pkg/client/injection/informers/pipeline/v1alpha2/task/task.go @@ -0,0 +1,52 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package task + +import ( + "context" + + v1alpha2 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1alpha2" + factory "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" + logging "knative.dev/pkg/logging" +) + +func init() { + injection.Default.RegisterInformer(withInformer) +} + +// Key is used for associating the Informer inside the context.Context. +type Key struct{} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := factory.Get(ctx) + inf := f.Tekton().V1alpha2().Tasks() + return context.WithValue(ctx, Key{}, inf), inf.Informer() +} + +// Get extracts the typed informer from the context. +func Get(ctx context.Context) v1alpha2.TaskInformer { + untyped := ctx.Value(Key{}) + if untyped == nil { + logging.FromContext(ctx).Panic( + "Unable to fetch github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1alpha2.TaskInformer from context.") + } + return untyped.(v1alpha2.TaskInformer) +} diff --git a/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go b/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go new file mode 100644 index 00000000000..0bdeafb14e8 --- /dev/null +++ b/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go @@ -0,0 +1,27 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by lister-gen. DO NOT EDIT. + +package v1alpha2 + +// TaskListerExpansion allows custom methods to be added to +// TaskLister. +type TaskListerExpansion interface{} + +// TaskNamespaceListerExpansion allows custom methods to be added to +// TaskNamespaceLister. +type TaskNamespaceListerExpansion interface{} diff --git a/pkg/client/listers/pipeline/v1alpha2/task.go b/pkg/client/listers/pipeline/v1alpha2/task.go new file mode 100644 index 00000000000..9b8b2ee73eb --- /dev/null +++ b/pkg/client/listers/pipeline/v1alpha2/task.go @@ -0,0 +1,94 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by lister-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/tools/cache" +) + +// TaskLister helps list Tasks. +type TaskLister interface { + // List lists all Tasks in the indexer. + List(selector labels.Selector) (ret []*v1alpha2.Task, err error) + // Tasks returns an object that can list and get Tasks. + Tasks(namespace string) TaskNamespaceLister + TaskListerExpansion +} + +// taskLister implements the TaskLister interface. +type taskLister struct { + indexer cache.Indexer +} + +// NewTaskLister returns a new TaskLister. +func NewTaskLister(indexer cache.Indexer) TaskLister { + return &taskLister{indexer: indexer} +} + +// List lists all Tasks in the indexer. +func (s *taskLister) List(selector labels.Selector) (ret []*v1alpha2.Task, err error) { + err = cache.ListAll(s.indexer, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha2.Task)) + }) + return ret, err +} + +// Tasks returns an object that can list and get Tasks. +func (s *taskLister) Tasks(namespace string) TaskNamespaceLister { + return taskNamespaceLister{indexer: s.indexer, namespace: namespace} +} + +// TaskNamespaceLister helps list and get Tasks. +type TaskNamespaceLister interface { + // List lists all Tasks in the indexer for a given namespace. + List(selector labels.Selector) (ret []*v1alpha2.Task, err error) + // Get retrieves the Task from the indexer for a given namespace and name. + Get(name string) (*v1alpha2.Task, error) + TaskNamespaceListerExpansion +} + +// taskNamespaceLister implements the TaskNamespaceLister +// interface. +type taskNamespaceLister struct { + indexer cache.Indexer + namespace string +} + +// List lists all Tasks in the indexer for a given namespace. +func (s taskNamespaceLister) List(selector labels.Selector) (ret []*v1alpha2.Task, err error) { + err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha2.Task)) + }) + return ret, err +} + +// Get retrieves the Task from the indexer for a given namespace and name. +func (s taskNamespaceLister) Get(name string) (*v1alpha2.Task, error) { + obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) + if err != nil { + return nil, err + } + if !exists { + return nil, errors.NewNotFound(v1alpha2.Resource("task"), name) + } + return obj.(*v1alpha2.Task), nil +}