Skip to content

Commit

Permalink
Add configuration to pipeline stanza of DSC to enable InstructLab pip…
Browse files Browse the repository at this point in the history
…eline
  • Loading branch information
lburgazzoli committed Jan 31, 2025
1 parent 46d8ba2 commit 8cc7364
Show file tree
Hide file tree
Showing 14 changed files with 283 additions and 11 deletions.
23 changes: 23 additions & 0 deletions apis/components/v1alpha1/datasciencepipelines/types.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// +kubebuilder:object:generate=true

// Package datasciencepipelines provides a set of types used for DataSciencePipelines component
package datasciencepipelines

import operatorv1 "github.com/openshift/api/operator/v1"

type ManagedPipelinesSpec struct {
// Configures whether to automatically import the InstructLab pipeline.
// You must enable the trainingoperator component to run the InstructLab pipeline.
InstructLab ManagedPipelineOptions `json:"instructLab,omitempty"`
}

type ManagedPipelineOptions struct {
// Set to one of the following values:
//
// - "Managed" : This pipeline is automatically imported.
// - "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported.
//
// +kubebuilder:validation:Enum=Managed;Removed
// +kubebuilder:default=Removed
State operatorv1.ManagementState `json:"state,omitempty"`
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions apis/components/v1alpha1/datasciencepipelines_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package v1alpha1

import (
"github.com/opendatahub-io/opendatahub-operator/v2/apis/common"
"github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1alpha1/datasciencepipelines"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)

Expand Down Expand Up @@ -51,6 +52,7 @@ type DataSciencePipelinesSpec struct {

type DataSciencePipelinesCommonSpec struct {
common.DevFlagsSpec `json:",inline"`
PreloadedPipelines datasciencepipelines.ManagedPipelinesSpec `json:"managedPipelines,omitempty"`
}

// DataSciencePipelinesCommonStatus defines the shared observed state of DataSciencePipelines
Expand Down
1 change: 1 addition & 0 deletions apis/components/v1alpha1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,27 @@ spec:
type: object
type: array
type: object
managedPipelines:
properties:
instructLab:
description: |-
Configures whether to automatically import the InstructLab pipeline.
You must enable the trainingoperator component to run the InstructLab pipeline.
properties:
state:
default: Removed
description: |-
Set to one of the following values:
- "Managed" : This pipeline is automatically imported.
- "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported.
enum:
- Managed
- Removed
pattern: ^(Managed|Unmanaged|Force|Removed)$
type: string
type: object
type: object
type: object
status:
description: DataSciencePipelinesStatus defines the observed state of
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,27 @@ spec:
type: object
type: array
type: object
managedPipelines:
properties:
instructLab:
description: |-
Configures whether to automatically import the InstructLab pipeline.
You must enable the trainingoperator component to run the InstructLab pipeline.
properties:
state:
default: Removed
description: |-
Set to one of the following values:
- "Managed" : This pipeline is automatically imported.
- "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported.
enum:
- Managed
- Removed
pattern: ^(Managed|Unmanaged|Force|Removed)$
type: string
type: object
type: object
managementState:
description: |-
Set to one of the following values:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,27 @@ spec:
type: object
type: array
type: object
managedPipelines:
properties:
instructLab:
description: |-
Configures whether to automatically import the InstructLab pipeline.
You must enable the trainingoperator component to run the InstructLab pipeline.
properties:
state:
default: Removed
description: |-
Set to one of the following values:
- "Managed" : This pipeline is automatically imported.
- "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported.
enum:
- Managed
- Removed
pattern: ^(Managed|Unmanaged|Force|Removed)$
type: string
type: object
type: object
type: object
status:
description: DataSciencePipelinesStatus defines the observed state of
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,27 @@ spec:
type: object
type: array
type: object
managedPipelines:
properties:
instructLab:
description: |-
Configures whether to automatically import the InstructLab pipeline.
You must enable the trainingoperator component to run the InstructLab pipeline.
properties:
state:
default: Removed
description: |-
Set to one of the following values:
- "Managed" : This pipeline is automatically imported.
- "Removed" : This pipeline is not automatically imported when a new pipeline server or DSPA is created. If previously set to "Managed", setting to "Removed" does not remove existing preloaded pipelines but does prevent future updates from being imported.
enum:
- Managed
- Removed
pattern: ^(Managed|Unmanaged|Force|Removed)$
type: string
type: object
type: object
managementState:
description: |-
Set to one of the following values:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,15 @@ func (s *componentHandler) GetManagementState(dsc *dscv1.DataScienceCluster) ope
}

func (s *componentHandler) Init(_ common.Platform) error {
if err := deploy.ApplyParams(paramsPath().String(), imageParamMap); err != nil {
return fmt.Errorf("failed to update images on path %s: %w", paramsPath(), err)
if err := deploy.ApplyParams(paramsPath, imageParamMap); err != nil {
return fmt.Errorf("failed to update images on path %s: %w", paramsPath, err)
}

return nil
}

func (s *componentHandler) NewCRObject(dsc *dscv1.DataScienceCluster) common.PlatformObject {
return &componentApi.DataSciencePipelines{
obj := componentApi.DataSciencePipelines{
TypeMeta: metav1.TypeMeta{
Kind: componentApi.DataSciencePipelinesKind,
APIVersion: componentApi.GroupVersion.String(),
Expand All @@ -61,6 +61,14 @@ func (s *componentHandler) NewCRObject(dsc *dscv1.DataScienceCluster) common.Pla
DataSciencePipelinesCommonSpec: dsc.Spec.Components.DataSciencePipelines.DataSciencePipelinesCommonSpec,
},
}

// since the nested structures are not pointers, we must make sure
// any field respect the validation rules.
if obj.Spec.PreloadedPipelines.InstructLab.State == "" {
obj.Spec.PreloadedPipelines.InstructLab.State = operatorv1.Removed
}

return &obj
}

func (s *componentHandler) UpdateDSCStatus(dsc *dscv1.DataScienceCluster, obj client.Object) error {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,16 @@ func checkPreConditions(ctx context.Context, rr *odhtypes.ReconciliationRequest)
}

func initialize(_ context.Context, rr *odhtypes.ReconciliationRequest) error {
rr.Manifests = append(rr.Manifests, manifestPath(rr.Release.Name))
rr.Manifests = []odhtypes.ManifestInfo{manifestPath(rr.Release.Name)}

extraParamsMap, err := computeParamsMap(rr)
if err != nil {
return fmt.Errorf("computing extra params failed: %w", err)
}

if err := odhdeploy.ApplyParams(paramsPath, nil, extraParamsMap); err != nil {
return fmt.Errorf("failed to update params.env from %s : %w", paramsPath, err)
}

return nil
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
package datasciencepipelines

import (
"encoding/json"
"fmt"
"path"

conditionsv1 "github.com/openshift/custom-resource-status/conditions/v1"

"github.com/opendatahub-io/opendatahub-operator/v2/apis/common"
Expand All @@ -21,6 +25,9 @@ const (
// via Kustomize. Since a deployment selector is immutable, we can't upgrade existing
// deployment to the new component name, so keep it around till we figure out a solution.
LegacyComponentName = "data-science-pipelines-operator"

managedPipelineParamsKey = "MANAGEDPIPELINES"
platformVersionParamsKey = "PLATFORMVERSION"
)

var (
Expand All @@ -42,20 +49,38 @@ var (
cluster.OpenDataHub: "overlays/odh",
cluster.Unknown: "overlays/odh",
}

paramsPath = path.Join(odhdeploy.DefaultManifestPath, ComponentName, "base")
)

func paramsPath() types.ManifestInfo {
func manifestPath(p common.Platform) types.ManifestInfo {
return types.ManifestInfo{
Path: odhdeploy.DefaultManifestPath,
ContextDir: ComponentName,
SourcePath: "base",
SourcePath: overlaysSourcePaths[p],
}
}

func manifestPath(p common.Platform) types.ManifestInfo {
return types.ManifestInfo{
Path: odhdeploy.DefaultManifestPath,
ContextDir: ComponentName,
SourcePath: overlaysSourcePaths[p],
func computeParamsMap(rr *types.ReconciliationRequest) (map[string]string, error) {
dsp, ok := rr.Instance.(*componentApi.DataSciencePipelines)
if !ok {
return nil, fmt.Errorf("resource instance %v is not a componentApi.DataSciencePipelines", rr.Instance)
}

data, err := json.Marshal(dsp.Spec.PreloadedPipelines)
if err != nil {
return nil, fmt.Errorf("marshalling preloaded pipelines failed: %w", err)
}

data, err = json.Marshal(string(data))
if err != nil {
return nil, fmt.Errorf("marshalling preloaded pipelines failed: %w", err)
}

extraParamsMap := map[string]string{
managedPipelineParamsKey: string(data),
platformVersionParamsKey: rr.Release.Version.String(),
}

return extraParamsMap, nil
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
//nolint:testpackage
package datasciencepipelines

import (
"encoding/json"
"testing"

"github.com/blang/semver/v4"
"github.com/operator-framework/api/pkg/lib/version"

componentApi "github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1alpha1"
"github.com/opendatahub-io/opendatahub-operator/v2/apis/components/v1alpha1/datasciencepipelines"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/cluster"
"github.com/opendatahub-io/opendatahub-operator/v2/pkg/controller/types"

. "github.com/onsi/gomega"
)

func TestComputeParamsMap(t *testing.T) {
g := NewWithT(t)

dsp := componentApi.DataSciencePipelines{
Spec: componentApi.DataSciencePipelinesSpec{
DataSciencePipelinesCommonSpec: componentApi.DataSciencePipelinesCommonSpec{
PreloadedPipelines: datasciencepipelines.ManagedPipelinesSpec{},
},
},
}

v := semver.MustParse("1.2.3")
rr := types.ReconciliationRequest{
Instance: &dsp,
Release: cluster.Release{

Check failure on line 33 in controllers/components/datasciencepipelines/datasciencepipelines_support_test.go

View workflow job for this annotation

GitHub Actions / Run tests and collect coverage

undefined: cluster.Release

Check failure on line 33 in controllers/components/datasciencepipelines/datasciencepipelines_support_test.go

View workflow job for this annotation

GitHub Actions / golangci-lint

undefined: cluster.Release (typecheck)
Version: version.OperatorVersion{
Version: v,
},
},
}

result, err := computeParamsMap(&rr)
g.Expect(err).ShouldNot(HaveOccurred())
g.Expect(result).ShouldNot(BeEmpty())

// Marshal the expected value for comparison
expectedData, err := json.Marshal(dsp.Spec.PreloadedPipelines)
g.Expect(err).ShouldNot(HaveOccurred())

expectedData, err = json.Marshal(string(expectedData))
g.Expect(err).ShouldNot(HaveOccurred())

g.Expect(result).Should(And(
HaveKeyWithValue(managedPipelineParamsKey, string(expectedData)),
HaveKeyWithValue(platformVersionParamsKey, v.String()),
))
}
Loading

0 comments on commit 8cc7364

Please sign in to comment.