diff --git a/common/runtime/model.go b/common/runtime/model.go index d2e0e48e3f..db6c379e62 100644 --- a/common/runtime/model.go +++ b/common/runtime/model.go @@ -97,10 +97,11 @@ type ProvisioningParametersDTO struct { ShootName string `json:"shootName,omitempty"` ShootDomain string `json:"shootDomain,omitempty"` - OIDC *OIDCConfigDTO `json:"oidc,omitempty"` - Networking *NetworkingDTO `json:"networking,omitempty"` - Modules *ModulesDTO `json:"modules,omitempty"` - ShootAndSeedSameRegion *bool `json:"shootAndSeedSameRegion,omitempty"` + OIDC *OIDCConfigDTO `json:"oidc,omitempty"` + Networking *NetworkingDTO `json:"networking,omitempty"` + Modules *ModulesDTO `json:"modules,omitempty"` + ShootAndSeedSameRegion *bool `json:"shootAndSeedSameRegion,omitempty"` + AdditionalWorkerNodePools []AdditionalWorkerNodePool `json:"additionalWorkerNodePools,omitempty"` } type AutoScalerParameters struct { @@ -386,3 +387,10 @@ type ModuleDTO struct { Channel Channel `json:"channel,omitempty" yaml:"channel,omitempty"` CustomResourcePolicy CustomResourcePolicy `json:"customResourcePolicy,omitempty" yaml:"customResourcePolicy,omitempty"` } + +type AdditionalWorkerNodePool struct { + AutoScalerParameters `json:",inline"` + + Name string `json:"name"` + MachineType *string `json:"machineType,omitempty"` +} diff --git a/internal/broker/instance_create.go b/internal/broker/instance_create.go index ec384e4cf5..9390a92ad4 100644 --- a/internal/broker/instance_create.go +++ b/internal/broker/instance_create.go @@ -295,6 +295,15 @@ func (b *ProvisionEndpoint) validateAndExtract(details domain.ProvisionDetails, if err := parameters.AutoScalerParameters.Validate(autoscalerMin, autoscalerMax); err != nil { return ersContext, parameters, apiresponses.NewFailureResponse(err, http.StatusUnprocessableEntity, err.Error()) } + + if IsPreviewPlan(details.PlanID) { + for _, workerNodePool := range parameters.AdditionalWorkerNodePools { + if err := workerNodePool.AutoScalerParameters.Validate(autoscalerMin, autoscalerMax); err != nil { + return ersContext, parameters, apiresponses.NewFailureResponse(err, http.StatusUnprocessableEntity, err.Error()) + } + } + } + if parameters.OIDC.IsProvided() { if err := parameters.OIDC.Validate(); err != nil { return ersContext, parameters, apiresponses.NewFailureResponse(err, http.StatusUnprocessableEntity, err.Error()) diff --git a/internal/broker/instance_create_test.go b/internal/broker/instance_create_test.go index 73c30fc85e..ceeb38b8cb 100644 --- a/internal/broker/instance_create_test.go +++ b/internal/broker/instance_create_test.go @@ -1491,6 +1491,165 @@ func TestProvision_Provision(t *testing.T) { // then require.EqualError(t, err, "while validating input parameters: region: region must be one of the following: \"me-central2\"") }) + + t.Run("Should pass with additional worker node pools", func(t *testing.T) { + // given + memoryStorage := storage.NewMemoryStorage() + + queue := &automock.Queue{} + queue.On("Add", mock.AnythingOfType("string")) + + factoryBuilder := &automock.PlanValidator{} + factoryBuilder.On("IsPlanSupport", broker.PreviewPlanID).Return(true) + + planDefaults := func(planID string, platformProvider pkg.CloudProvider, provider *pkg.CloudProvider) (*gqlschema.ClusterConfigInput, error) { + return &gqlschema.ClusterConfigInput{}, nil + } + kcBuilder := &kcMock.KcBuilder{} + kcBuilder.On("GetServerURL", "").Return("", fmt.Errorf("error")) + // #create provisioner endpoint + provisionEndpoint := broker.NewProvision( + broker.Config{ + EnablePlans: []string{"preview"}, + URL: brokerURL, + OnlySingleTrialPerGA: true, + EnableKubeconfigURLLabel: true, + }, + gardener.Config{Project: "test", ShootDomain: "example.com", DNSProviders: fixDNSProviders()}, + memoryStorage.Operations(), + memoryStorage.Instances(), + memoryStorage.InstancesArchived(), + queue, + factoryBuilder, + broker.PlansConfig{}, + planDefaults, + log, + dashboardConfig, + kcBuilder, + whitelist.Set{}, + &broker.OneForAllConvergedCloudRegionsProvider{}, + ) + + additionalWorkerNodePools := `[{"name": "name-1", "machineType": "m6i.large", "autoScalerMin": 3, "autoScalerMax": 20}, {"name": "name-2", "machineType": "m6i.large", "autoScalerMin": 3, "autoScalerMax": 20}]` + + // when + _, err := provisionEndpoint.Provision(fixRequestContext(t, "cf-sa30"), instanceID, domain.ProvisionDetails{ + ServiceID: serviceID, + PlanID: broker.PreviewPlanID, + RawParameters: json.RawMessage(fmt.Sprintf(`{"name": "%s", "region": "%s","additionalWorkerNodePools": %s }`, clusterName, "eu-central-1", additionalWorkerNodePools)), + RawContext: json.RawMessage(fmt.Sprintf(`{"globalaccount_id": "%s", "subaccount_id": "%s", "user_id": "%s"}`, "any-global-account-id", subAccountID, "Test@Test.pl")), + }, true) + t.Logf("%+v\n", *provisionEndpoint) + + // then + require.NoError(t, err) + }) + + t.Run("Should pass with empty additional worker node pools", func(t *testing.T) { + // given + memoryStorage := storage.NewMemoryStorage() + + queue := &automock.Queue{} + queue.On("Add", mock.AnythingOfType("string")) + + factoryBuilder := &automock.PlanValidator{} + factoryBuilder.On("IsPlanSupport", broker.PreviewPlanID).Return(true) + + planDefaults := func(planID string, platformProvider pkg.CloudProvider, provider *pkg.CloudProvider) (*gqlschema.ClusterConfigInput, error) { + return &gqlschema.ClusterConfigInput{}, nil + } + kcBuilder := &kcMock.KcBuilder{} + kcBuilder.On("GetServerURL", "").Return("", fmt.Errorf("error")) + // #create provisioner endpoint + provisionEndpoint := broker.NewProvision( + broker.Config{ + EnablePlans: []string{"preview"}, + URL: brokerURL, + OnlySingleTrialPerGA: true, + EnableKubeconfigURLLabel: true, + }, + gardener.Config{Project: "test", ShootDomain: "example.com", DNSProviders: fixDNSProviders()}, + memoryStorage.Operations(), + memoryStorage.Instances(), + memoryStorage.InstancesArchived(), + queue, + factoryBuilder, + broker.PlansConfig{}, + planDefaults, + log, + dashboardConfig, + kcBuilder, + whitelist.Set{}, + &broker.OneForAllConvergedCloudRegionsProvider{}, + ) + + additionalWorkerNodePools := "[]" + + // when + _, err := provisionEndpoint.Provision(fixRequestContext(t, "cf-sa30"), instanceID, domain.ProvisionDetails{ + ServiceID: serviceID, + PlanID: broker.PreviewPlanID, + RawParameters: json.RawMessage(fmt.Sprintf(`{"name": "%s", "region": "%s","additionalWorkerNodePools": %s }`, clusterName, "eu-central-1", additionalWorkerNodePools)), + RawContext: json.RawMessage(fmt.Sprintf(`{"globalaccount_id": "%s", "subaccount_id": "%s", "user_id": "%s"}`, "any-global-account-id", subAccountID, "Test@Test.pl")), + }, true) + t.Logf("%+v\n", *provisionEndpoint) + + // then + require.NoError(t, err) + }) + + t.Run("Should fail for autoScalerMin bigger than autoScalerMax", func(t *testing.T) { + // given + memoryStorage := storage.NewMemoryStorage() + + queue := &automock.Queue{} + queue.On("Add", mock.AnythingOfType("string")) + + factoryBuilder := &automock.PlanValidator{} + factoryBuilder.On("IsPlanSupport", broker.PreviewPlanID).Return(true) + + planDefaults := func(planID string, platformProvider pkg.CloudProvider, provider *pkg.CloudProvider) (*gqlschema.ClusterConfigInput, error) { + return &gqlschema.ClusterConfigInput{}, nil + } + kcBuilder := &kcMock.KcBuilder{} + kcBuilder.On("GetServerURL", "").Return("", fmt.Errorf("error")) + // #create provisioner endpoint + provisionEndpoint := broker.NewProvision( + broker.Config{ + EnablePlans: []string{"preview"}, + URL: brokerURL, + OnlySingleTrialPerGA: true, + EnableKubeconfigURLLabel: true, + }, + gardener.Config{Project: "test", ShootDomain: "example.com", DNSProviders: fixDNSProviders()}, + memoryStorage.Operations(), + memoryStorage.Instances(), + memoryStorage.InstancesArchived(), + queue, + factoryBuilder, + broker.PlansConfig{}, + planDefaults, + log, + dashboardConfig, + kcBuilder, + whitelist.Set{}, + &broker.OneForAllConvergedCloudRegionsProvider{}, + ) + + additionalWorkerNodePools := `[{"name": "name-1", "machineType": "m6i.large", "autoScalerMin": 20, "autoScalerMax": 3}, {"name": "name-2", "machineType": "m6i.large", "autoScalerMin": 3, "autoScalerMax": 20}]` + + // when + _, err := provisionEndpoint.Provision(fixRequestContext(t, "cf-sa30"), instanceID, domain.ProvisionDetails{ + ServiceID: serviceID, + PlanID: broker.PreviewPlanID, + RawParameters: json.RawMessage(fmt.Sprintf(`{"name": "%s", "region": "%s","additionalWorkerNodePools": %s }`, clusterName, "eu-central-1", additionalWorkerNodePools)), + RawContext: json.RawMessage(fmt.Sprintf(`{"globalaccount_id": "%s", "subaccount_id": "%s", "user_id": "%s"}`, "any-global-account-id", subAccountID, "Test@Test.pl")), + }, true) + t.Logf("%+v\n", *provisionEndpoint) + + // then + require.EqualError(t, err, "AutoScalerMax 3 should be larger than AutoScalerMin 20. User provided values min:20, max:3; plan defaults min:0, max:0") + }) } func TestNetworkingValidation(t *testing.T) { diff --git a/internal/broker/instance_update.go b/internal/broker/instance_update.go index 4c68186999..d355ca5d35 100644 --- a/internal/broker/instance_update.go +++ b/internal/broker/instance_update.go @@ -263,6 +263,15 @@ func (b *UpdateEndpoint) processUpdateParameters(instance *internal.Instance, de logger.Error(fmt.Sprintf("invalid autoscaler parameters: %s", err.Error())) return domain.UpdateServiceSpec{}, apiresponses.NewFailureResponse(err, http.StatusBadRequest, err.Error()) } + + if IsPreviewPlan(details.PlanID) { + for _, workerNodePool := range params.AdditionalWorkerNodePools { + if err := workerNodePool.AutoScalerParameters.Validate(autoscalerMin, autoscalerMax); err != nil { + return domain.UpdateServiceSpec{}, apiresponses.NewFailureResponse(err, http.StatusBadRequest, err.Error()) + } + } + } + err = b.operationStorage.InsertOperation(operation) if err != nil { return domain.UpdateServiceSpec{}, err @@ -287,6 +296,17 @@ func (b *UpdateEndpoint) processUpdateParameters(instance *internal.Instance, de if params.MachineType != nil && *params.MachineType != "" { instance.Parameters.Parameters.MachineType = params.MachineType } + + if IsPreviewPlan(details.PlanID) { + // if the list is empty remove additional worker node pools + if params.AdditionalWorkerNodePools != nil { + newAdditionalWorkerNodePools := make([]pkg.AdditionalWorkerNodePool, 0, len(params.AdditionalWorkerNodePools)) + newAdditionalWorkerNodePools = append(newAdditionalWorkerNodePools, params.AdditionalWorkerNodePools...) + instance.Parameters.Parameters.AdditionalWorkerNodePools = newAdditionalWorkerNodePools + updateStorage = append(updateStorage, "Additional Worker Node Pools") + } + } + if len(updateStorage) > 0 { if err := wait.PollUntilContextTimeout(context.Background(), 500*time.Millisecond, 2*time.Second, true, func(ctx context.Context) (bool, error) { instance, err = b.instanceStorage.Update(*instance) diff --git a/internal/broker/instance_update_test.go b/internal/broker/instance_update_test.go index 15c8952348..d1c91c7c79 100644 --- a/internal/broker/instance_update_test.go +++ b/internal/broker/instance_update_test.go @@ -591,6 +591,66 @@ func TestUpdateEndpoint_UpdateParameters(t *testing.T) { assert.Equal(t, expectedErr.ValidatedStatusCode(nil), apierr.ValidatedStatusCode(nil)) assert.Equal(t, expectedErr.LoggerAction(), apierr.LoggerAction()) }) + + t.Run("Should pass with additional worker node pools", func(t *testing.T) { + // given + additionalWorkerNodePools := `[{"name": "name-1", "machineType": "m6i.large", "autoScalerMin": 3, "autoScalerMax": 20}, {"name": "name-2", "machineType": "m6i.large", "autoScalerMin": 3, "autoScalerMax": 20}]` + + // when + _, err := svc.Update(context.Background(), instanceID, domain.UpdateDetails{ + ServiceID: "", + PlanID: PreviewPlanID, + RawParameters: json.RawMessage("{\"additionalWorkerNodePools\":" + additionalWorkerNodePools + "}"), + PreviousValues: domain.PreviousValues{}, + RawContext: json.RawMessage("{\"globalaccount_id\":\"globalaccount_id_1\", \"active\":true}"), + MaintenanceInfo: nil, + }, true) + + // then + require.NoError(t, err) + }) + + t.Run("Should pass with empty additional worker node pools", func(t *testing.T) { + // given + additionalWorkerNodePools := "[]" + + // when + _, err := svc.Update(context.Background(), instanceID, domain.UpdateDetails{ + ServiceID: "", + PlanID: PreviewPlanID, + RawParameters: json.RawMessage("{\"additionalWorkerNodePools\":" + additionalWorkerNodePools + "}"), + PreviousValues: domain.PreviousValues{}, + RawContext: json.RawMessage("{\"globalaccount_id\":\"globalaccount_id_1\", \"active\":true}"), + MaintenanceInfo: nil, + }, true) + + // then + require.NoError(t, err) + }) + + t.Run("Should fail for autoScalerMin bigger than autoScalerMax", func(t *testing.T) { + // given + additionalWorkerNodePools := `[{"name": "name-1", "machineType": "m6i.large", "autoScalerMin": 20, "autoScalerMax": 3}, {"name": "name-2", "machineType": "m6i.large", "autoScalerMin": 3, "autoScalerMax": 20}]` + errMsg := fmt.Errorf("AutoScalerMax 3 should be larger than AutoScalerMin 20. User provided values min:20, max:3; plan defaults min:0, max:0") + expectedErr := apiresponses.NewFailureResponse(errMsg, http.StatusBadRequest, errMsg.Error()) + + // when + _, err := svc.Update(context.Background(), instanceID, domain.UpdateDetails{ + ServiceID: "", + PlanID: PreviewPlanID, + RawParameters: json.RawMessage("{\"additionalWorkerNodePools\":" + additionalWorkerNodePools + "}"), + PreviousValues: domain.PreviousValues{}, + RawContext: json.RawMessage("{\"globalaccount_id\":\"globalaccount_id_1\", \"active\":true}"), + MaintenanceInfo: nil, + }, true) + + // then + require.Error(t, err) + assert.IsType(t, &apiresponses.FailureResponse{}, err) + apierr := err.(*apiresponses.FailureResponse) + assert.Equal(t, expectedErr.ValidatedStatusCode(nil), apierr.ValidatedStatusCode(nil)) + assert.Equal(t, expectedErr.LoggerAction(), apierr.LoggerAction()) + }) } func TestUpdateEndpoint_UpdateWithEnabledDashboard(t *testing.T) { diff --git a/internal/broker/plans.go b/internal/broker/plans.go index e8b6b193ea..456effd61d 100644 --- a/internal/broker/plans.go +++ b/internal/broker/plans.go @@ -368,6 +368,7 @@ func SapConvergedCloudSchema(machineTypesDisplay, regionsDisplay map[string]stri func PreviewSchema(machineTypesDisplay, regionsDisplay map[string]string, machineTypes []string, additionalParams, update bool, euAccessRestricted bool) *map[string]interface{} { properties := NewProvisioningProperties(machineTypesDisplay, regionsDisplay, machineTypes, AWSRegions(euAccessRestricted), update) properties.Networking = NewNetworkingSchema() + properties.AdditionalWorkerNodePools = NewAdditionalWorkerNodePoolsSchema(machineTypesDisplay, machineTypes) return createSchemaWithProperties(properties, additionalParams, update, requiredSchemaProperties(), false, false) } diff --git a/internal/broker/plans_schema.go b/internal/broker/plans_schema.go index 08b49a66d9..1f0b5b7515 100644 --- a/internal/broker/plans_schema.go +++ b/internal/broker/plans_schema.go @@ -33,12 +33,13 @@ type ProvisioningProperties struct { } type UpdateProperties struct { - Kubeconfig *Type `json:"kubeconfig,omitempty"` - AutoScalerMin *Type `json:"autoScalerMin,omitempty"` - AutoScalerMax *Type `json:"autoScalerMax,omitempty"` - OIDC *OIDCType `json:"oidc,omitempty"` - Administrators *Type `json:"administrators,omitempty"` - MachineType *Type `json:"machineType,omitempty"` + Kubeconfig *Type `json:"kubeconfig,omitempty"` + AutoScalerMin *Type `json:"autoScalerMin,omitempty"` + AutoScalerMax *Type `json:"autoScalerMax,omitempty"` + OIDC *OIDCType `json:"oidc,omitempty"` + Administrators *Type `json:"administrators,omitempty"` + MachineType *Type `json:"machineType,omitempty"` + AdditionalWorkerNodePools *AdditionalWorkerNodePoolsType `json:"additionalWorkerNodePools,omitempty"` } func (up *UpdateProperties) IncludeAdditional() { @@ -147,6 +148,25 @@ type ModulesCustomListItemsProperties struct { CustomResourcePolicy Type `json:"customResourcePolicy,omitempty"` } +type AdditionalWorkerNodePoolsType struct { + Type + Items AdditionalWorkerNodePoolsItems `json:"items,omitempty"` +} + +type AdditionalWorkerNodePoolsItems struct { + Type + ControlsOrder []string `json:"_controlsOrder,omitempty"` + Required []string `json:"required"` + Properties AdditionalWorkerNodePoolsItemsProperties `json:"properties,omitempty"` +} + +type AdditionalWorkerNodePoolsItemsProperties struct { + Name Type `json:"name,omitempty"` + AutoScalerMin Type `json:"autoScalerMin,omitempty"` + AutoScalerMax Type `json:"autoScalerMax,omitempty"` + MachineType Type `json:"machineType,omitempty"` +} + func NewModulesSchema() *Modules { return &Modules{ Type: Type{ @@ -298,6 +318,7 @@ func NewProvisioningProperties(machineTypesDisplay, regionsDisplay map[string]st Type: "string", Enum: ToInterfaceSlice(machineTypes), EnumDisplayName: machineTypesDisplay, + Description: "Specifies the type of the machine.", }, }, Name: NameProperty(), @@ -386,7 +407,7 @@ func unmarshalOrPanic(from, to interface{}) interface{} { } func DefaultControlsOrder() []string { - return []string{"name", "kubeconfig", "shootName", "shootDomain", "region", "shootAndSeedSameRegion", "machineType", "autoScalerMin", "autoScalerMax", "zonesCount", "modules", "networking", "oidc", "administrators"} + return []string{"name", "kubeconfig", "shootName", "shootDomain", "region", "shootAndSeedSameRegion", "machineType", "autoScalerMin", "autoScalerMax", "additionalWorkerNodePools", "zonesCount", "modules", "networking", "oidc", "administrators"} } func ToInterfaceSlice(input []string) []interface{} { @@ -407,3 +428,44 @@ func AdministratorsProperty() *Type { }, } } + +func NewAdditionalWorkerNodePoolsSchema(machineTypesDisplay map[string]string, machineTypes []string) *AdditionalWorkerNodePoolsType { + return &AdditionalWorkerNodePoolsType{ + Type: Type{ + Type: "array", + UniqueItems: true, + Description: "Specifies the list of additional worker node pools."}, + Items: AdditionalWorkerNodePoolsItems{ + ControlsOrder: []string{"name", "machineType", "autoScalerMin", "autoScalerMax"}, + Required: []string{"name", "machineType", "autoScalerMin", "autoScalerMax"}, + Type: Type{ + Type: "object", + }, + Properties: AdditionalWorkerNodePoolsItemsProperties{ + Name: Type{ + Type: "string", + MinLength: 1, + Description: "Specifies the unique name of the additional worker node pool.", + }, + MachineType: Type{ + Type: "string", + MinLength: 1, + Enum: ToInterfaceSlice(machineTypes), + EnumDisplayName: machineTypesDisplay, + Description: "Specifies the type of the machine.", + }, + AutoScalerMin: Type{ + Type: "integer", + Minimum: 0, + Description: "Specifies the minimum number of virtual machines to create.", + }, + AutoScalerMax: Type{ + Type: "integer", + Minimum: 0, + Maximum: 300, + Description: "Specifies the maximum number of virtual machines to create.", + }, + }, + }, + } +} diff --git a/internal/broker/testdata/aws/aws-schema-additional-params-eu.json b/internal/broker/testdata/aws/aws-schema-additional-params-eu.json index 8e84b8c3b0..83e211ba85 100644 --- a/internal/broker/testdata/aws/aws-schema-additional-params-eu.json +++ b/internal/broker/testdata/aws/aws-schema-additional-params-eu.json @@ -50,6 +50,7 @@ "m6i.8xlarge": "m6i.8xlarge (32vCPU, 128GB RAM)", "m6i.12xlarge": "m6i.12xlarge (48vCPU, 192GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "m6i.large", "m6i.xlarge", diff --git a/internal/broker/testdata/aws/aws-schema-additional-params.json b/internal/broker/testdata/aws/aws-schema-additional-params.json index e1e4770426..485a1e1c7a 100644 --- a/internal/broker/testdata/aws/aws-schema-additional-params.json +++ b/internal/broker/testdata/aws/aws-schema-additional-params.json @@ -50,6 +50,7 @@ "m6i.8xlarge": "m6i.8xlarge (32vCPU, 128GB RAM)", "m6i.12xlarge": "m6i.12xlarge (48vCPU, 192GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "m6i.large", "m6i.xlarge", diff --git a/internal/broker/testdata/aws/aws-schema-eu.json b/internal/broker/testdata/aws/aws-schema-eu.json index e496d7dd19..d6f25691ff 100644 --- a/internal/broker/testdata/aws/aws-schema-eu.json +++ b/internal/broker/testdata/aws/aws-schema-eu.json @@ -39,6 +39,7 @@ "m6i.8xlarge": "m6i.8xlarge (32vCPU, 128GB RAM)", "m6i.12xlarge": "m6i.12xlarge (48vCPU, 192GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "m6i.large", "m6i.xlarge", diff --git a/internal/broker/testdata/aws/aws-schema.json b/internal/broker/testdata/aws/aws-schema.json index 8a013c43af..0e21074473 100644 --- a/internal/broker/testdata/aws/aws-schema.json +++ b/internal/broker/testdata/aws/aws-schema.json @@ -39,6 +39,7 @@ "m6i.8xlarge": "m6i.8xlarge (32vCPU, 128GB RAM)", "m6i.12xlarge": "m6i.12xlarge (48vCPU, 192GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "m6i.large", "m6i.xlarge", diff --git a/internal/broker/testdata/aws/update-aws-schema-additional-params.json b/internal/broker/testdata/aws/update-aws-schema-additional-params.json index b91dddadf8..e31f990449 100644 --- a/internal/broker/testdata/aws/update-aws-schema-additional-params.json +++ b/internal/broker/testdata/aws/update-aws-schema-additional-params.json @@ -43,6 +43,7 @@ "m6i.8xlarge": "m6i.8xlarge (32vCPU, 128GB RAM)", "m6i.12xlarge": "m6i.12xlarge (48vCPU, 192GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "m6i.large", "m6i.xlarge", diff --git a/internal/broker/testdata/aws/update-aws-schema.json b/internal/broker/testdata/aws/update-aws-schema.json index 397ed8c894..2fb10c71ae 100644 --- a/internal/broker/testdata/aws/update-aws-schema.json +++ b/internal/broker/testdata/aws/update-aws-schema.json @@ -33,6 +33,7 @@ "m6i.8xlarge": "m6i.8xlarge (32vCPU, 128GB RAM)", "m6i.12xlarge": "m6i.12xlarge (48vCPU, 192GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "m6i.large", "m6i.xlarge", diff --git a/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu-reduced.json b/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu-reduced.json index 967b8fc877..d69efcb2dc 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu-reduced.json +++ b/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu-reduced.json @@ -39,6 +39,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D4s_v5", "Standard_D4_v3" diff --git a/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu.json b/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu.json index 62e646fd84..2627cdcb44 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu.json +++ b/internal/broker/testdata/azure/azure-lite-schema-additional-params-eu.json @@ -41,6 +41,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-lite-schema-additional-params-reduced.json b/internal/broker/testdata/azure/azure-lite-schema-additional-params-reduced.json index b212d9aae6..9d06b175b3 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-additional-params-reduced.json +++ b/internal/broker/testdata/azure/azure-lite-schema-additional-params-reduced.json @@ -39,6 +39,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D4s_v5", "Standard_D4_v3" diff --git a/internal/broker/testdata/azure/azure-lite-schema-additional-params.json b/internal/broker/testdata/azure/azure-lite-schema-additional-params.json index 818e6599e6..35193fb802 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-additional-params.json +++ b/internal/broker/testdata/azure/azure-lite-schema-additional-params.json @@ -41,6 +41,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-lite-schema-eu-reduced.json b/internal/broker/testdata/azure/azure-lite-schema-eu-reduced.json index 171ec2091a..52a573fdea 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-eu-reduced.json +++ b/internal/broker/testdata/azure/azure-lite-schema-eu-reduced.json @@ -29,6 +29,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D4s_v5", "Standard_D4_v3" diff --git a/internal/broker/testdata/azure/azure-lite-schema-eu.json b/internal/broker/testdata/azure/azure-lite-schema-eu.json index 193203c83a..d8fa786a13 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-eu.json +++ b/internal/broker/testdata/azure/azure-lite-schema-eu.json @@ -30,6 +30,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-lite-schema-reduced.json b/internal/broker/testdata/azure/azure-lite-schema-reduced.json index 2a568ca675..39ee6addc9 100644 --- a/internal/broker/testdata/azure/azure-lite-schema-reduced.json +++ b/internal/broker/testdata/azure/azure-lite-schema-reduced.json @@ -29,6 +29,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D4s_v5", "Standard_D4_v3" diff --git a/internal/broker/testdata/azure/azure-lite-schema.json b/internal/broker/testdata/azure/azure-lite-schema.json index bd6867c9e2..d758312b09 100644 --- a/internal/broker/testdata/azure/azure-lite-schema.json +++ b/internal/broker/testdata/azure/azure-lite-schema.json @@ -30,6 +30,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-schema-additional-params-eu.json b/internal/broker/testdata/azure/azure-schema-additional-params-eu.json index eeece583a1..8e2a97548d 100644 --- a/internal/broker/testdata/azure/azure-schema-additional-params-eu.json +++ b/internal/broker/testdata/azure/azure-schema-additional-params-eu.json @@ -51,6 +51,7 @@ "Standard_D48_v3": "Standard_D48_v3 (48vCPU, 192GB RAM)", "Standard_D64_v3": "Standard_D64_v3 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-schema-additional-params.json b/internal/broker/testdata/azure/azure-schema-additional-params.json index 260d559ae9..bc1a02fe41 100644 --- a/internal/broker/testdata/azure/azure-schema-additional-params.json +++ b/internal/broker/testdata/azure/azure-schema-additional-params.json @@ -51,6 +51,7 @@ "Standard_D48_v3": "Standard_D48_v3 (48vCPU, 192GB RAM)", "Standard_D64_v3": "Standard_D64_v3 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-schema-eu.json b/internal/broker/testdata/azure/azure-schema-eu.json index 858ece277b..0909320fc6 100644 --- a/internal/broker/testdata/azure/azure-schema-eu.json +++ b/internal/broker/testdata/azure/azure-schema-eu.json @@ -40,6 +40,7 @@ "Standard_D48_v3": "Standard_D48_v3 (48vCPU, 192GB RAM)", "Standard_D64_v3": "Standard_D64_v3 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/azure-schema.json b/internal/broker/testdata/azure/azure-schema.json index 69653f3b4a..3571760323 100644 --- a/internal/broker/testdata/azure/azure-schema.json +++ b/internal/broker/testdata/azure/azure-schema.json @@ -40,6 +40,7 @@ "Standard_D48_v3": "Standard_D48_v3 (48vCPU, 192GB RAM)", "Standard_D64_v3": "Standard_D64_v3 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/update-azure-lite-schema-additional-params-reduced.json b/internal/broker/testdata/azure/update-azure-lite-schema-additional-params-reduced.json index 0ad6206f7f..d011421031 100644 --- a/internal/broker/testdata/azure/update-azure-lite-schema-additional-params-reduced.json +++ b/internal/broker/testdata/azure/update-azure-lite-schema-additional-params-reduced.json @@ -33,6 +33,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D4s_v5", "Standard_D4_v3" diff --git a/internal/broker/testdata/azure/update-azure-lite-schema-additional-params.json b/internal/broker/testdata/azure/update-azure-lite-schema-additional-params.json index 6bd91fcf39..0d19b80153 100644 --- a/internal/broker/testdata/azure/update-azure-lite-schema-additional-params.json +++ b/internal/broker/testdata/azure/update-azure-lite-schema-additional-params.json @@ -34,6 +34,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/update-azure-lite-schema-reduced.json b/internal/broker/testdata/azure/update-azure-lite-schema-reduced.json index 7d3af88f58..c07f62c940 100644 --- a/internal/broker/testdata/azure/update-azure-lite-schema-reduced.json +++ b/internal/broker/testdata/azure/update-azure-lite-schema-reduced.json @@ -23,6 +23,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D4s_v5", "Standard_D4_v3" diff --git a/internal/broker/testdata/azure/update-azure-lite-schema.json b/internal/broker/testdata/azure/update-azure-lite-schema.json index 608624e17c..4051a040bc 100644 --- a/internal/broker/testdata/azure/update-azure-lite-schema.json +++ b/internal/broker/testdata/azure/update-azure-lite-schema.json @@ -24,6 +24,7 @@ "Standard_D4s_v5":"Standard_D4s_v5 (4vCPU, 16GB RAM)", "Standard_D4_v3":"Standard_D4_v3 (4vCPU, 16GB RAM)" }, + "description": "Specifies the type of the machine.", "enum":[ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/update-azure-schema-additional-params.json b/internal/broker/testdata/azure/update-azure-schema-additional-params.json index 13b500f8e7..d5710df1ab 100644 --- a/internal/broker/testdata/azure/update-azure-schema-additional-params.json +++ b/internal/broker/testdata/azure/update-azure-schema-additional-params.json @@ -44,6 +44,7 @@ "Standard_D48_v3": "Standard_D48_v3 (48vCPU, 192GB RAM)", "Standard_D64_v3": "Standard_D64_v3 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/azure/update-azure-schema.json b/internal/broker/testdata/azure/update-azure-schema.json index 6d9207be2a..afcc688e56 100644 --- a/internal/broker/testdata/azure/update-azure-schema.json +++ b/internal/broker/testdata/azure/update-azure-schema.json @@ -34,6 +34,7 @@ "Standard_D48_v3": "Standard_D48_v3 (48vCPU, 192GB RAM)", "Standard_D64_v3": "Standard_D64_v3 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "Standard_D2s_v5", "Standard_D4s_v5", diff --git a/internal/broker/testdata/gcp/gcp-schema-additional-params-assured-workloads.json b/internal/broker/testdata/gcp/gcp-schema-additional-params-assured-workloads.json index b551d81e67..e1d5160412 100644 --- a/internal/broker/testdata/gcp/gcp-schema-additional-params-assured-workloads.json +++ b/internal/broker/testdata/gcp/gcp-schema-additional-params-assured-workloads.json @@ -44,6 +44,7 @@ "n2-standard-32": "n2-standard-32 (32vCPU, 128GB RAM)", "n2-standard-48": "n2-standard-48 (48vCPU, 192B RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "n2-standard-2", "n2-standard-4", diff --git a/internal/broker/testdata/gcp/gcp-schema-additional-params.json b/internal/broker/testdata/gcp/gcp-schema-additional-params.json index 9040c626b7..3fed50ee21 100644 --- a/internal/broker/testdata/gcp/gcp-schema-additional-params.json +++ b/internal/broker/testdata/gcp/gcp-schema-additional-params.json @@ -44,6 +44,7 @@ "n2-standard-32": "n2-standard-32 (32vCPU, 128GB RAM)", "n2-standard-48": "n2-standard-48 (48vCPU, 192B RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "n2-standard-2", "n2-standard-4", diff --git a/internal/broker/testdata/gcp/gcp-schema-assured-workloads.json b/internal/broker/testdata/gcp/gcp-schema-assured-workloads.json index 13b2b7c284..6d31f5e66e 100644 --- a/internal/broker/testdata/gcp/gcp-schema-assured-workloads.json +++ b/internal/broker/testdata/gcp/gcp-schema-assured-workloads.json @@ -33,6 +33,7 @@ "n2-standard-32": "n2-standard-32 (32vCPU, 128GB RAM)", "n2-standard-48": "n2-standard-48 (48vCPU, 192B RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "n2-standard-2", "n2-standard-4", diff --git a/internal/broker/testdata/gcp/gcp-schema.json b/internal/broker/testdata/gcp/gcp-schema.json index c62e0f1cde..5e059baf33 100644 --- a/internal/broker/testdata/gcp/gcp-schema.json +++ b/internal/broker/testdata/gcp/gcp-schema.json @@ -33,6 +33,7 @@ "n2-standard-32": "n2-standard-32 (32vCPU, 128GB RAM)", "n2-standard-48": "n2-standard-48 (48vCPU, 192B RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "n2-standard-2", "n2-standard-4", diff --git a/internal/broker/testdata/gcp/update-gcp-schema-additional-params.json b/internal/broker/testdata/gcp/update-gcp-schema-additional-params.json index c405a0dcdf..31c0e98a95 100644 --- a/internal/broker/testdata/gcp/update-gcp-schema-additional-params.json +++ b/internal/broker/testdata/gcp/update-gcp-schema-additional-params.json @@ -37,6 +37,7 @@ "n2-standard-32": "n2-standard-32 (32vCPU, 128GB RAM)", "n2-standard-48": "n2-standard-48 (48vCPU, 192B RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "n2-standard-2", "n2-standard-4", diff --git a/internal/broker/testdata/gcp/update-gcp-schema.json b/internal/broker/testdata/gcp/update-gcp-schema.json index 71eab32cdf..e0c1c39a7d 100644 --- a/internal/broker/testdata/gcp/update-gcp-schema.json +++ b/internal/broker/testdata/gcp/update-gcp-schema.json @@ -27,6 +27,7 @@ "n2-standard-32": "n2-standard-32 (32vCPU, 128GB RAM)", "n2-standard-48": "n2-standard-48 (48vCPU, 192B RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "n2-standard-2", "n2-standard-4", diff --git a/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema-additional-params.json b/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema-additional-params.json index ebdbd88161..d226114f2c 100644 --- a/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema-additional-params.json +++ b/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema-additional-params.json @@ -46,6 +46,7 @@ "g_c32_m128": "g_c32_m128 (32vCPU, 128GB RAM)", "g_c64_m256": "g_c64_m256 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "g_c2_m8", "g_c4_m16", diff --git a/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema.json b/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema.json index a1cd7c8062..921a043590 100644 --- a/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema.json +++ b/internal/broker/testdata/sap-converged-cloud/sap-converged-cloud-schema.json @@ -35,6 +35,7 @@ "g_c32_m128": "g_c32_m128 (32vCPU, 128GB RAM)", "g_c64_m256": "g_c64_m256 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "g_c2_m8", "g_c4_m16", diff --git a/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema-additional-params.json b/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema-additional-params.json index 81ddc80553..5c4fa5cb49 100644 --- a/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema-additional-params.json +++ b/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema-additional-params.json @@ -39,6 +39,7 @@ "g_c32_m128": "g_c32_m128 (32vCPU, 128GB RAM)", "g_c64_m256": "g_c64_m256 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "g_c2_m8", "g_c4_m16", diff --git a/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema.json b/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema.json index 368109d4cc..a89ebb63f8 100644 --- a/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema.json +++ b/internal/broker/testdata/sap-converged-cloud/update-sap-converged-cloud-schema.json @@ -29,6 +29,7 @@ "g_c32_m128": "g_c32_m128 (32vCPU, 128GB RAM)", "g_c64_m256": "g_c64_m256 (64vCPU, 256GB RAM)" }, + "description": "Specifies the type of the machine.", "enum": [ "g_c2_m8", "g_c4_m16", diff --git a/internal/dto.go b/internal/dto.go index 2e14ac4f55..3d3d48dfbe 100644 --- a/internal/dto.go +++ b/internal/dto.go @@ -52,9 +52,10 @@ func (p ProvisioningParameters) IsEqual(input ProvisioningParameters) bool { type UpdatingParametersDTO struct { pkg.AutoScalerParameters `json:",inline"` - OIDC *pkg.OIDCConfigDTO `json:"oidc,omitempty"` - RuntimeAdministrators []string `json:"administrators,omitempty"` - MachineType *string `json:"machineType,omitempty"` + OIDC *pkg.OIDCConfigDTO `json:"oidc,omitempty"` + RuntimeAdministrators []string `json:"administrators,omitempty"` + MachineType *string `json:"machineType,omitempty"` + AdditionalWorkerNodePools []pkg.AdditionalWorkerNodePool `json:"additionalWorkerNodePools"` } func (u UpdatingParametersDTO) UpdateAutoScaler(p *pkg.ProvisioningParametersDTO) bool {