diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..e2bf8bc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+*.tfstate
+*.tfstate.backup
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000..4bf04c5
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,38 @@
+include:
+ - project: syseleven/gitlab-ci-templates
+ ref: 3.29.3
+ file: job-templates/TerraformValidate.yml
+ - project: syseleven/gitlab-ci-templates
+ ref: 3.29.3
+ file: job-templates/TerraformFormat.yml
+ - project: syseleven/gitlab-ci-templates
+ ref: 3.29.3
+ file: MarkdownLint.yml
+
+stages:
+ - test
+
+workflow:
+ rules:
+ - if: $CI_MERGE_REQUEST_IID
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ - if: $CI_COMMIT_TAG
+
+default:
+ retry:
+ max: 2
+ when:
+ - runner_system_failure
+
+##################################################################
+# JOBS #
+##################################################################
+
+terraform-format:
+ extends:
+ - .terraform-format
+
+terraform-validate:
+ extends:
+ - .terraform-validate
+
diff --git a/.markdownlintignore b/.markdownlintignore
new file mode 100644
index 0000000..130a1e5
--- /dev/null
+++ b/.markdownlintignore
@@ -0,0 +1,2 @@
+# it's generated and the genereator has some interesting design choices, so ignore it
+docs/
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..c683a84
--- /dev/null
+++ b/README.md
@@ -0,0 +1,51 @@
+# terraform-provider-sys11dbaas
+
+## Generator
+
+This project was initially generated using [terraform-plugin-codegen-openapi](https://github.com/hashicorp/terraform-plugin-codegen-openapi)
+and [terraform-plugin-codegen-framework](https://github.com/hashicorp/terraform-plugin-codegen-framework)
+However, i had to fix so much stuff by hand, i don't think those can ever be used again to add something to this provider.
+But feel free to try in a branch. I left the provider-spec file in the repo.
+
+## Development
+
+Override the provider in your `~/.terraformrc` file:
+
+```terraform
+provider_installation {
+
+ dev_overrides {
+ "registry.terraform.io/syseleven/sys11dbaas" = "/home/me/go/bin"
+ }
+
+ # For all other providers, install them directly from their origin provider
+ # registries as normal. If you omit this, Terraform will _only_ use
+ # the dev_overrides block, and so no other providers will be available.
+ direct {}
+}
+
+```
+
+Then run `go install` in this directory. You should now be able to use the provider.
+
+```terraform
+terraform {
+ required_providers {
+ sys11dbaas = {
+ source = "registry.terraform.io/syseleven/sys11dbaas"
+ }
+ }
+}
+
+provider "sys11dbaas" {
+ [...]
+}
+```
+
+Don't forget to run `go install` again after code changes.
+
+## Generate docs
+
+```bash
+go generate
+```
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..5ccab2b
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,24 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "sys11dbaas Provider"
+subcategory: ""
+description: |-
+
+---
+
+# sys11dbaas Provider
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `api_key` (String)
+- `organization` (String)
+- `project` (String)
+- `url` (String)
+- `wait_for_creation` (Boolean) Wait until databases are fully deployed and usable
diff --git a/docs/resources/database.md b/docs/resources/database.md
new file mode 100644
index 0000000..eddd2fb
--- /dev/null
+++ b/docs/resources/database.md
@@ -0,0 +1,113 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "sys11dbaas_database Resource - terraform-provider-sys11dbaas"
+subcategory: ""
+description: |-
+
+---
+
+# sys11dbaas_database (Resource)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `application_config` (Attributes) (see [below for nested schema](#nestedatt--application_config))
+- `name` (String) The name of the database.
+- `service_config` (Attributes) (see [below for nested schema](#nestedatt--service_config))
+
+### Optional
+
+- `description` (String) fulltext description of the database
+
+### Read-Only
+
+- `created_at` (String) the date when the database was created
+- `created_by` (String) the initial creator of the database
+- `last_modified_at` (String) the date when the database was last modified
+- `last_modified_by` (String) the user who last changed of the database
+- `phase` (String)
+- `resource_status` (String)
+- `status` (String)
+- `uuid` (String) The UUID of the database.
+
+
+### Nested Schema for `application_config`
+
+Required:
+
+- `instances` (Number) How many nodes the cluster should have
+- `type` (String)
+- `version` (String) minor version of postgresql
+
+Optional:
+
+- `password` (String, Sensitive) The password for the admin user
+- `recovery` (Attributes) (see [below for nested schema](#nestedatt--application_config--recovery))
+- `scheduled_backups` (Attributes) The scheduled backup policy for the database. (see [below for nested schema](#nestedatt--application_config--scheduled_backups))
+
+Read-Only:
+
+- `hostname` (String) The dns name of the database in the format uuid.postgresql.syseleven.services.
+- `ip_address` (String) The public IP address of the database. It will be pending if no address has been assigned yet.
+
+
+### Nested Schema for `application_config.recovery`
+
+Optional:
+
+- `exclusive` (Boolean)
+- `source` (String)
+- `target_lsn` (String)
+- `target_name` (String)
+- `target_time` (String)
+- `target_xid` (String)
+
+
+
+### Nested Schema for `application_config.scheduled_backups`
+
+Optional:
+
+- `retention` (Number) How long Backups should be stored
+- `schedule` (Attributes) The schedules for the backup policy. (see [below for nested schema](#nestedatt--application_config--scheduled_backups--schedule))
+
+
+### Nested Schema for `application_config.scheduled_backups.schedule`
+
+Optional:
+
+- `hour` (Number) The hour when the full backup should start. If this value is omitted, a random hour between 1am and 5am will be generated.
+- `minute` (Number) The minute when the full backup should start. If this value is omitted, a random minute will be generated.
+
+
+
+
+
+### Nested Schema for `service_config`
+
+Required:
+
+- `disksize` (Number) Disksize in GB
+- `flavor` (String) vm flavor to use
+- `region` (String) the region for the database
+- `type` (String)
+
+Optional:
+
+- `maintenance_window` (Attributes) The maintenance window. This will be a time window for updates and maintenance. If omitted, a random window will be generated. (see [below for nested schema](#nestedatt--service_config--maintenance_window))
+- `remote_ips` (List of String) List of IP addresses, that should be allowed to connect to the database
+
+
+### Nested Schema for `service_config.maintenance_window`
+
+Optional:
+
+- `day_of_week` (Number) Day of week as a cron time (0=Sun, 1=Mon, ..., 6=Sat). If omitted, a random day will be used.
+- `start_hour` (Number) Hour when the maintenance window starts. If omitted, a random hour between 20 and 4 will be used.
+- `start_minute` (Number) Minute when the maintenance window starts. If omitted, a random minute will be used.
diff --git a/examples/postgresql/README.md b/examples/postgresql/README.md
new file mode 100644
index 0000000..e76bd17
--- /dev/null
+++ b/examples/postgresql/README.md
@@ -0,0 +1,5 @@
+# Example PostgreSQL DB
+
+```bash
+TF_VAR_api_key=$DBAAS_TOKEN TF_VAR_api_url=$DBAAS_URL TF_VAR_project=$DBAAS_PROJECT TF_VAR_org=$DBAAS_ORG terraform plan
+```
diff --git a/examples/postgresql/main.tf b/examples/postgresql/main.tf
new file mode 100644
index 0000000..27486d4
--- /dev/null
+++ b/examples/postgresql/main.tf
@@ -0,0 +1,62 @@
+terraform {
+ required_version = ">= 1.8"
+ required_providers {
+ sys11dbaas = {
+ source = "registry.terraform.io/syseleven/sys11dbaas"
+ version = "~> 1"
+ }
+ }
+}
+
+variable "api_key" {
+ type = string
+}
+
+variable "api_url" {
+ type = string
+}
+
+variable "project" {
+ type = string
+}
+
+variable "org" {
+ type = string
+}
+
+provider "sys11dbaas" {
+ url = var.api_url
+ api_key = var.api_key
+ project = var.project
+ organization = var.org
+ wait_for_creation = "true"
+}
+
+resource "sys11dbaas_database" "my_first_tf_db" {
+ name = "my-first-terraform-db"
+ description = "this is my first terraform db"
+ application_config = {
+ instances = 3
+ password = "veryS3cretPassword"
+ type = "postgresql"
+ version = 16.2
+ scheduled_backups = {
+ schedule = {
+ hour = 4
+ }
+ }
+ }
+ service_config = {
+ disksize = 25
+ flavor = "m2c.small"
+ region = "dus2"
+ type = "database"
+ remote_ips = [
+ "0.0.0.0/0"
+ ]
+ }
+}
+
+output "my_first_tf_db" {
+ value = [resource.sys11dbaas_database.my_first_tf_db.uuid, resource.sys11dbaas_database.my_first_tf_db.status, resource.sys11dbaas_database.my_first_tf_db.phase, resource.sys11dbaas_database.my_first_tf_db.resource_status]
+}
diff --git a/gen-config.yaml b/gen-config.yaml
new file mode 100644
index 0000000..bda1259
--- /dev/null
+++ b/gen-config.yaml
@@ -0,0 +1,16 @@
+provider:
+ name: sys11dbaas
+resources:
+ database:
+ create:
+ path: /{org_id}/{project_id}/v1/databases
+ method: POST
+ read:
+ path: /{org_id}/{project_id}/v1/databases/{db_uuid}
+ method: GET
+ update:
+ path: /{org_id}/{project_id}/v1/databases/{db_uuid}
+ method: PUT
+ delete:
+ path: /{org_id}/{project_id}/v1/databases/{db_uuid}
+ method: DELETE
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..698967e
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,37 @@
+module terraform-provider-sys11dbaas
+
+go 1.21.5
+
+require (
+ github.com/hashicorp/terraform-plugin-framework v1.6.1
+ github.com/hashicorp/terraform-plugin-framework-validators v0.12.0
+ github.com/hashicorp/terraform-plugin-go v0.22.0
+ github.com/hashicorp/terraform-plugin-log v0.9.0
+ github.com/syseleven/sys11dbaas-sdk v0.0.0-00010101000000-000000000000
+)
+
+replace github.com/syseleven/sys11dbaas-sdk => ../sys11dbaas-sdk
+
+require (
+ github.com/fatih/color v1.16.0 // indirect
+ github.com/golang/protobuf v1.5.4 // indirect
+ github.com/hashicorp/go-hclog v1.6.2 // indirect
+ github.com/hashicorp/go-plugin v1.6.0 // indirect
+ github.com/hashicorp/go-uuid v1.0.3 // indirect
+ github.com/hashicorp/terraform-registry-address v0.2.3 // indirect
+ github.com/hashicorp/terraform-svchost v0.1.1 // indirect
+ github.com/hashicorp/yamux v0.1.1 // indirect
+ github.com/mattn/go-colorable v0.1.13 // indirect
+ github.com/mattn/go-isatty v0.0.20 // indirect
+ github.com/mitchellh/go-testing-interface v1.14.1 // indirect
+ github.com/oklog/run v1.1.0 // indirect
+ github.com/stretchr/testify v1.8.2 // indirect
+ github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
+ github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
+ golang.org/x/net v0.22.0 // indirect
+ golang.org/x/sys v0.18.0 // indirect
+ golang.org/x/text v0.14.0 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8 // indirect
+ google.golang.org/grpc v1.62.1 // indirect
+ google.golang.org/protobuf v1.33.0 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..76ec473
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,84 @@
+github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA=
+github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
+github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
+github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
+github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
+github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/hashicorp/go-hclog v1.6.2 h1:NOtoftovWkDheyUM/8JW3QMiXyxJK3uHRK7wV04nD2I=
+github.com/hashicorp/go-hclog v1.6.2/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
+github.com/hashicorp/go-plugin v1.6.0 h1:wgd4KxHJTVGGqWBq4QPB1i5BZNEx9BR8+OFmHDmTk8A=
+github.com/hashicorp/go-plugin v1.6.0/go.mod h1:lBS5MtSSBZk0SHc66KACcjjlU6WzEVP/8pwz68aMkCI=
+github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
+github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/hashicorp/terraform-plugin-framework v1.6.1 h1:hw2XrmUu8d8jVL52ekxim2IqDc+2Kpekn21xZANARLU=
+github.com/hashicorp/terraform-plugin-framework v1.6.1/go.mod h1:aJI+n/hBPhz1J+77GdgNfk5svW12y7fmtxe/5L5IuwI=
+github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 h1:HOjBuMbOEzl7snOdOoUfE2Jgeto6JOjLVQ39Ls2nksc=
+github.com/hashicorp/terraform-plugin-framework-validators v0.12.0/go.mod h1:jfHGE/gzjxYz6XoUwi/aYiiKrJDeutQNUtGQXkaHklg=
+github.com/hashicorp/terraform-plugin-go v0.22.0 h1:1OS1Jk5mO0f5hrziWJGXXIxBrMe2j/B8E+DVGw43Xmc=
+github.com/hashicorp/terraform-plugin-go v0.22.0/go.mod h1:mPULV91VKss7sik6KFEcEu7HuTogMLLO/EvWCuFkRVE=
+github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0=
+github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow=
+github.com/hashicorp/terraform-registry-address v0.2.3 h1:2TAiKJ1A3MAkZlH1YI/aTVcLZRu7JseiXNRHbOAyoTI=
+github.com/hashicorp/terraform-registry-address v0.2.3/go.mod h1:lFHA76T8jfQteVfT7caREqguFrW3c4MFSPhZB7HHgUM=
+github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ=
+github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc=
+github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE=
+github.com/hashicorp/yamux v0.1.1/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ=
+github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c=
+github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo=
+github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
+github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
+github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU=
+github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8=
+github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA=
+github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
+github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8=
+github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
+github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
+github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
+golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
+golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
+golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
+golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
+golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8 h1:IR+hp6ypxjH24bkMfEJ0yHR21+gwPWdV+/IBrPQyn3k=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8/go.mod h1:UCOku4NytXMJuLQE5VuqA5lX3PcHCBo8pxNyvkf4xBs=
+google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
+google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
+google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
+google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/internal/provider/application_config.go b/internal/provider/application_config.go
new file mode 100644
index 0000000..2212b8c
--- /dev/null
+++ b/internal/provider/application_config.go
@@ -0,0 +1,2160 @@
+package provider
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ sys11dbaassdk "github.com/syseleven/sys11dbaas-sdk"
+)
+
+var _ basetypes.ObjectTypable = ApplicationConfigType{}
+
+type ApplicationConfigType struct {
+ basetypes.ObjectType
+}
+
+func (t ApplicationConfigType) Equal(o attr.Type) bool {
+ other, ok := o.(ApplicationConfigType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t ApplicationConfigType) String() string {
+ return "ApplicationConfigType"
+}
+
+func (t ApplicationConfigType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ hostnameAttribute, ok := attributes["hostname"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `hostname is missing from object`)
+
+ return nil, diags
+ }
+
+ hostnameVal, ok := hostnameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`hostname expected to be basetypes.StringValue, was: %T`, hostnameAttribute))
+ }
+
+ instancesAttribute, ok := attributes["instances"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `instances is missing from object`)
+
+ return nil, diags
+ }
+
+ instancesVal, ok := instancesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`instances expected to be basetypes.Int64Value, was: %T`, instancesAttribute))
+ }
+
+ ipAddressAttribute, ok := attributes["ip_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `ip_address is missing from object`)
+
+ return nil, diags
+ }
+
+ ipAddressVal, ok := ipAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`ip_address expected to be basetypes.StringValue, was: %T`, ipAddressAttribute))
+ }
+
+ passwordAttribute, ok := attributes["password"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `password is missing from object`)
+
+ return nil, diags
+ }
+
+ passwordVal, ok := passwordAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`password expected to be basetypes.StringValue, was: %T`, passwordAttribute))
+ }
+
+ recoveryAttribute, ok := attributes["recovery"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `recovery is missing from object`)
+
+ return nil, diags
+ }
+
+ recoveryVal, ok := recoveryAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`recovery expected to be basetypes.ObjectValue, was: %T`, recoveryAttribute))
+ }
+
+ scheduledBackupsAttribute, ok := attributes["scheduled_backups"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `scheduled_backups is missing from object`)
+
+ return nil, diags
+ }
+
+ scheduledBackupsVal, ok := scheduledBackupsAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`scheduled_backups expected to be basetypes.ObjectValue, was: %T`, scheduledBackupsAttribute))
+ }
+
+ typeAttribute, ok := attributes["type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `type is missing from object`)
+
+ return nil, diags
+ }
+
+ typeVal, ok := typeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`type expected to be basetypes.StringValue, was: %T`, typeAttribute))
+ }
+
+ versionAttribute, ok := attributes["version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `version is missing from object`)
+
+ return nil, diags
+ }
+
+ versionVal, ok := versionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return ApplicationConfigValue{
+ Hostname: hostnameVal,
+ Instances: instancesVal,
+ IpAddress: ipAddressVal,
+ Password: passwordVal,
+ Recovery: recoveryVal,
+ ScheduledBackups: scheduledBackupsVal,
+ ApplicationConfigType: typeVal,
+ Version: versionVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewApplicationConfigValueNull() ApplicationConfigValue {
+ return ApplicationConfigValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewApplicationConfigValueUnknown() ApplicationConfigValue {
+ return ApplicationConfigValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewApplicationConfigValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (ApplicationConfigValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing ApplicationConfigValue Attribute Value",
+ "While creating a ApplicationConfigValue value, a missing attribute value was detected. "+
+ "A ApplicationConfigValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ApplicationConfigValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid ApplicationConfigValue Attribute Type",
+ "While creating a ApplicationConfigValue value, an invalid attribute value was detected. "+
+ "A ApplicationConfigValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ApplicationConfigValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("ApplicationConfigValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra ApplicationConfigValue Attribute Value",
+ "While creating a ApplicationConfigValue value, an extra attribute value was detected. "+
+ "A ApplicationConfigValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra ApplicationConfigValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ hostnameAttribute, ok := attributes["hostname"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `hostname is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ hostnameVal, ok := hostnameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`hostname expected to be basetypes.StringValue, was: %T`, hostnameAttribute))
+ }
+
+ instancesAttribute, ok := attributes["instances"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `instances is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ instancesVal, ok := instancesAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`instances expected to be basetypes.Int64Value, was: %T`, instancesAttribute))
+ }
+
+ ipAddressAttribute, ok := attributes["ip_address"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `ip_address is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ ipAddressVal, ok := ipAddressAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`ip_address expected to be basetypes.StringValue, was: %T`, ipAddressAttribute))
+ }
+
+ passwordAttribute, ok := attributes["password"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `password is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ passwordVal, ok := passwordAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`password expected to be basetypes.StringValue, was: %T`, passwordAttribute))
+ }
+
+ recoveryAttribute, ok := attributes["recovery"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `recovery is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ recoveryVal, ok := recoveryAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`recovery expected to be basetypes.ObjectValue, was: %T`, recoveryAttribute))
+ }
+
+ scheduledBackupsAttribute, ok := attributes["scheduled_backups"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `scheduled_backups is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ scheduledBackupsVal, ok := scheduledBackupsAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`scheduled_backups expected to be basetypes.ObjectValue, was: %T`, scheduledBackupsAttribute))
+ }
+
+ typeAttribute, ok := attributes["type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `type is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ typeVal, ok := typeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`type expected to be basetypes.StringValue, was: %T`, typeAttribute))
+ }
+
+ versionAttribute, ok := attributes["version"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `version is missing from object`)
+
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ versionVal, ok := versionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
+ }
+
+ if diags.HasError() {
+ return NewApplicationConfigValueUnknown(), diags
+ }
+
+ return ApplicationConfigValue{
+ Hostname: hostnameVal,
+ Instances: instancesVal,
+ IpAddress: ipAddressVal,
+ Password: passwordVal,
+ Recovery: recoveryVal,
+ ScheduledBackups: scheduledBackupsVal,
+ ApplicationConfigType: typeVal,
+ Version: versionVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewApplicationConfigValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) ApplicationConfigValue {
+ object, diags := NewApplicationConfigValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewApplicationConfigValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t ApplicationConfigType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewApplicationConfigValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewApplicationConfigValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewApplicationConfigValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewApplicationConfigValueMust(ApplicationConfigValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t ApplicationConfigType) ValueType(ctx context.Context) attr.Value {
+ return ApplicationConfigValue{}
+}
+
+var _ basetypes.ObjectValuable = ApplicationConfigValue{}
+
+type ApplicationConfigValue struct {
+ Hostname basetypes.StringValue `tfsdk:"hostname"`
+ Instances basetypes.Int64Value `tfsdk:"instances"`
+ IpAddress basetypes.StringValue `tfsdk:"ip_address"`
+ Password basetypes.StringValue `tfsdk:"password"`
+ Recovery basetypes.ObjectValue `tfsdk:"recovery"`
+ ScheduledBackups basetypes.ObjectValue `tfsdk:"scheduled_backups"`
+ ApplicationConfigType basetypes.StringValue `tfsdk:"type"`
+ Version basetypes.StringValue `tfsdk:"version"`
+ state attr.ValueState
+}
+
+func (v ApplicationConfigValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 8)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["hostname"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["instances"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["ip_address"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["password"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["recovery"] = basetypes.ObjectType{
+ AttrTypes: RecoveryValue{}.AttributeTypes(ctx),
+ }.TerraformType(ctx)
+ attrTypes["scheduled_backups"] = basetypes.ObjectType{
+ AttrTypes: ScheduledBackupsValue{}.AttributeTypes(ctx),
+ }.TerraformType(ctx)
+ attrTypes["type"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 8)
+
+ val, err = v.Hostname.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["hostname"] = val
+
+ val, err = v.Instances.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["instances"] = val
+
+ val, err = v.IpAddress.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["ip_address"] = val
+
+ val, err = v.Password.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["password"] = val
+
+ val, err = v.Recovery.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["recovery"] = val
+
+ val, err = v.ScheduledBackups.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["scheduled_backups"] = val
+
+ val, err = v.ApplicationConfigType.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["type"] = val
+
+ val, err = v.Version.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["version"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v ApplicationConfigValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v ApplicationConfigValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v ApplicationConfigValue) String() string {
+ return "ApplicationConfigValue"
+}
+
+func (v ApplicationConfigValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ var recovery basetypes.ObjectValue
+
+ if v.Recovery.IsNull() {
+ recovery = types.ObjectNull(
+ RecoveryValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if v.Recovery.IsUnknown() {
+ recovery = types.ObjectUnknown(
+ RecoveryValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if !v.Recovery.IsNull() && !v.Recovery.IsUnknown() {
+ recovery = types.ObjectValueMust(
+ RecoveryValue{}.AttributeTypes(ctx),
+ v.Recovery.Attributes(),
+ )
+ }
+
+ var scheduledBackups basetypes.ObjectValue
+
+ if v.ScheduledBackups.IsNull() {
+ scheduledBackups = types.ObjectNull(
+ ScheduledBackupsValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if v.ScheduledBackups.IsUnknown() {
+ scheduledBackups = types.ObjectUnknown(
+ ScheduledBackupsValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if !v.ScheduledBackups.IsNull() && !v.ScheduledBackups.IsUnknown() {
+ scheduledBackups = types.ObjectValueMust(
+ ScheduledBackupsValue{}.AttributeTypes(ctx),
+ v.ScheduledBackups.Attributes(),
+ )
+ }
+
+ objVal, diags := types.ObjectValue(
+ map[string]attr.Type{
+ "hostname": basetypes.StringType{},
+ "instances": basetypes.Int64Type{},
+ "ip_address": basetypes.StringType{},
+ "password": basetypes.StringType{},
+ "recovery": basetypes.ObjectType{
+ AttrTypes: RecoveryValue{}.AttributeTypes(ctx),
+ },
+ "scheduled_backups": basetypes.ObjectType{
+ AttrTypes: ScheduledBackupsValue{}.AttributeTypes(ctx),
+ },
+ "type": basetypes.StringType{},
+ "version": basetypes.StringType{},
+ },
+ map[string]attr.Value{
+ "hostname": v.Hostname,
+ "instances": v.Instances,
+ "ip_address": v.IpAddress,
+ "password": v.Password,
+ "recovery": recovery,
+ "scheduled_backups": scheduledBackups,
+ "type": v.ApplicationConfigType,
+ "version": v.Version,
+ })
+
+ return objVal, diags
+}
+
+func (v ApplicationConfigValue) Equal(o attr.Value) bool {
+ other, ok := o.(ApplicationConfigValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Hostname.Equal(other.Hostname) {
+ return false
+ }
+
+ if !v.Instances.Equal(other.Instances) {
+ return false
+ }
+
+ if !v.IpAddress.Equal(other.IpAddress) {
+ return false
+ }
+
+ if !v.Password.Equal(other.Password) {
+ return false
+ }
+
+ if !v.Recovery.Equal(other.Recovery) {
+ return false
+ }
+
+ if !v.ScheduledBackups.Equal(other.ScheduledBackups) {
+ return false
+ }
+
+ if !v.ApplicationConfigType.Equal(other.ApplicationConfigType) {
+ return false
+ }
+
+ if !v.Version.Equal(other.Version) {
+ return false
+ }
+
+ return true
+}
+
+func (v ApplicationConfigValue) Type(ctx context.Context) attr.Type {
+ return ApplicationConfigType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v ApplicationConfigValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "hostname": basetypes.StringType{},
+ "instances": basetypes.Int64Type{},
+ "ip_address": basetypes.StringType{},
+ "password": basetypes.StringType{},
+ "recovery": basetypes.ObjectType{
+ AttrTypes: RecoveryValue{}.AttributeTypes(ctx),
+ },
+ "scheduled_backups": basetypes.ObjectType{
+ AttrTypes: ScheduledBackupsValue{}.AttributeTypes(ctx),
+ },
+ "type": basetypes.StringType{},
+ "version": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = RecoveryType{}
+
+type RecoveryType struct {
+ basetypes.ObjectType
+}
+
+func (t RecoveryType) Equal(o attr.Type) bool {
+ other, ok := o.(RecoveryType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t RecoveryType) String() string {
+ return "RecoveryType"
+}
+
+func (t RecoveryType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ exclusiveAttribute, ok := attributes["exclusive"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `exclusive is missing from object`)
+
+ return nil, diags
+ }
+
+ exclusiveVal, ok := exclusiveAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`exclusive expected to be basetypes.BoolValue, was: %T`, exclusiveAttribute))
+ }
+
+ sourceAttribute, ok := attributes["source"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `source is missing from object`)
+
+ return nil, diags
+ }
+
+ sourceVal, ok := sourceAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`source expected to be basetypes.StringValue, was: %T`, sourceAttribute))
+ }
+
+ targetLsnAttribute, ok := attributes["target_lsn"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_lsn is missing from object`)
+
+ return nil, diags
+ }
+
+ targetLsnVal, ok := targetLsnAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_lsn expected to be basetypes.StringValue, was: %T`, targetLsnAttribute))
+ }
+
+ targetNameAttribute, ok := attributes["target_name"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_name is missing from object`)
+
+ return nil, diags
+ }
+
+ targetNameVal, ok := targetNameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_name expected to be basetypes.StringValue, was: %T`, targetNameAttribute))
+ }
+
+ targetTimeAttribute, ok := attributes["target_time"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_time is missing from object`)
+
+ return nil, diags
+ }
+
+ targetTimeVal, ok := targetTimeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_time expected to be basetypes.StringValue, was: %T`, targetTimeAttribute))
+ }
+
+ targetXidAttribute, ok := attributes["target_xid"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_xid is missing from object`)
+
+ return nil, diags
+ }
+
+ targetXidVal, ok := targetXidAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_xid expected to be basetypes.StringValue, was: %T`, targetXidAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return RecoveryValue{
+ Exclusive: exclusiveVal,
+ Source: sourceVal,
+ TargetLsn: targetLsnVal,
+ TargetName: targetNameVal,
+ TargetTime: targetTimeVal,
+ TargetXid: targetXidVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewRecoveryValueNull() RecoveryValue {
+ return RecoveryValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewRecoveryValueUnknown() RecoveryValue {
+ return RecoveryValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewRecoveryValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (RecoveryValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing RecoveryValue Attribute Value",
+ "While creating a RecoveryValue value, a missing attribute value was detected. "+
+ "A RecoveryValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("RecoveryValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid RecoveryValue Attribute Type",
+ "While creating a RecoveryValue value, an invalid attribute value was detected. "+
+ "A RecoveryValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("RecoveryValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("RecoveryValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra RecoveryValue Attribute Value",
+ "While creating a RecoveryValue value, an extra attribute value was detected. "+
+ "A RecoveryValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra RecoveryValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ exclusiveAttribute, ok := attributes["exclusive"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `exclusive is missing from object`)
+
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ exclusiveVal, ok := exclusiveAttribute.(basetypes.BoolValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`exclusive expected to be basetypes.BoolValue, was: %T`, exclusiveAttribute))
+ }
+
+ sourceAttribute, ok := attributes["source"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `source is missing from object`)
+
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ sourceVal, ok := sourceAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`source expected to be basetypes.StringValue, was: %T`, sourceAttribute))
+ }
+
+ targetLsnAttribute, ok := attributes["target_lsn"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_lsn is missing from object`)
+
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ targetLsnVal, ok := targetLsnAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_lsn expected to be basetypes.StringValue, was: %T`, targetLsnAttribute))
+ }
+
+ targetNameAttribute, ok := attributes["target_name"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_name is missing from object`)
+
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ targetNameVal, ok := targetNameAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_name expected to be basetypes.StringValue, was: %T`, targetNameAttribute))
+ }
+
+ targetTimeAttribute, ok := attributes["target_time"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_time is missing from object`)
+
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ targetTimeVal, ok := targetTimeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_time expected to be basetypes.StringValue, was: %T`, targetTimeAttribute))
+ }
+
+ targetXidAttribute, ok := attributes["target_xid"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `target_xid is missing from object`)
+
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ targetXidVal, ok := targetXidAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`target_xid expected to be basetypes.StringValue, was: %T`, targetXidAttribute))
+ }
+
+ if diags.HasError() {
+ return NewRecoveryValueUnknown(), diags
+ }
+
+ return RecoveryValue{
+ Exclusive: exclusiveVal,
+ Source: sourceVal,
+ TargetLsn: targetLsnVal,
+ TargetName: targetNameVal,
+ TargetTime: targetTimeVal,
+ TargetXid: targetXidVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewRecoveryValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) RecoveryValue {
+ object, diags := NewRecoveryValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewRecoveryValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t RecoveryType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewRecoveryValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewRecoveryValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewRecoveryValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewRecoveryValueMust(RecoveryValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t RecoveryType) ValueType(ctx context.Context) attr.Value {
+ return RecoveryValue{}
+}
+
+var _ basetypes.ObjectValuable = RecoveryValue{}
+
+type RecoveryValue struct {
+ Exclusive basetypes.BoolValue `tfsdk:"exclusive"`
+ Source basetypes.StringValue `tfsdk:"source"`
+ TargetLsn basetypes.StringValue `tfsdk:"target_lsn"`
+ TargetName basetypes.StringValue `tfsdk:"target_name"`
+ TargetTime basetypes.StringValue `tfsdk:"target_time"`
+ TargetXid basetypes.StringValue `tfsdk:"target_xid"`
+ state attr.ValueState
+}
+
+func (v RecoveryValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 6)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["exclusive"] = basetypes.BoolType{}.TerraformType(ctx)
+ attrTypes["source"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["target_lsn"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["target_name"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["target_time"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["target_xid"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 6)
+
+ val, err = v.Exclusive.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["exclusive"] = val
+
+ val, err = v.Source.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["source"] = val
+
+ val, err = v.TargetLsn.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["target_lsn"] = val
+
+ val, err = v.TargetName.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["target_name"] = val
+
+ val, err = v.TargetTime.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["target_time"] = val
+
+ val, err = v.TargetXid.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["target_xid"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v RecoveryValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v RecoveryValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v RecoveryValue) String() string {
+ return "RecoveryValue"
+}
+
+func (v RecoveryValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ objVal, diags := types.ObjectValue(
+ map[string]attr.Type{
+ "exclusive": basetypes.BoolType{},
+ "source": basetypes.StringType{},
+ "target_lsn": basetypes.StringType{},
+ "target_name": basetypes.StringType{},
+ "target_time": basetypes.StringType{},
+ "target_xid": basetypes.StringType{},
+ },
+ map[string]attr.Value{
+ "exclusive": v.Exclusive,
+ "source": v.Source,
+ "target_lsn": v.TargetLsn,
+ "target_name": v.TargetName,
+ "target_time": v.TargetTime,
+ "target_xid": v.TargetXid,
+ })
+
+ return objVal, diags
+}
+
+func (v RecoveryValue) Equal(o attr.Value) bool {
+ other, ok := o.(RecoveryValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Exclusive.Equal(other.Exclusive) {
+ return false
+ }
+
+ if !v.Source.Equal(other.Source) {
+ return false
+ }
+
+ if !v.TargetLsn.Equal(other.TargetLsn) {
+ return false
+ }
+
+ if !v.TargetName.Equal(other.TargetName) {
+ return false
+ }
+
+ if !v.TargetTime.Equal(other.TargetTime) {
+ return false
+ }
+
+ if !v.TargetXid.Equal(other.TargetXid) {
+ return false
+ }
+
+ return true
+}
+
+func (v RecoveryValue) Type(ctx context.Context) attr.Type {
+ return RecoveryType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v RecoveryValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "exclusive": basetypes.BoolType{},
+ "source": basetypes.StringType{},
+ "target_lsn": basetypes.StringType{},
+ "target_name": basetypes.StringType{},
+ "target_time": basetypes.StringType{},
+ "target_xid": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = ScheduledBackupsType{}
+
+type ScheduledBackupsType struct {
+ basetypes.ObjectType
+}
+
+func (t ScheduledBackupsType) Equal(o attr.Type) bool {
+ other, ok := o.(ScheduledBackupsType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t ScheduledBackupsType) String() string {
+ return "ScheduledBackupsType"
+}
+
+func (t ScheduledBackupsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ retentionAttribute, ok := attributes["retention"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `retention is missing from object`)
+
+ return nil, diags
+ }
+
+ retentionVal, ok := retentionAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`retention expected to be basetypes.Int64Value, was: %T`, retentionAttribute))
+ }
+
+ scheduleAttribute, ok := attributes["schedule"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `schedule is missing from object`)
+
+ return nil, diags
+ }
+
+ scheduleVal, ok := scheduleAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`schedule expected to be basetypes.ObjectValue, was: %T`, scheduleAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return ScheduledBackupsValue{
+ Retention: retentionVal,
+ Schedule: scheduleVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewScheduledBackupsValueNull() ScheduledBackupsValue {
+ return ScheduledBackupsValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewScheduledBackupsValueUnknown() ScheduledBackupsValue {
+ return ScheduledBackupsValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewScheduledBackupsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (ScheduledBackupsValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing ScheduledBackupsValue Attribute Value",
+ "While creating a ScheduledBackupsValue value, a missing attribute value was detected. "+
+ "A ScheduledBackupsValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ScheduledBackupsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid ScheduledBackupsValue Attribute Type",
+ "While creating a ScheduledBackupsValue value, an invalid attribute value was detected. "+
+ "A ScheduledBackupsValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ScheduledBackupsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("ScheduledBackupsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra ScheduledBackupsValue Attribute Value",
+ "While creating a ScheduledBackupsValue value, an extra attribute value was detected. "+
+ "A ScheduledBackupsValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra ScheduledBackupsValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewScheduledBackupsValueUnknown(), diags
+ }
+
+ retentionAttribute, ok := attributes["retention"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `retention is missing from object`)
+
+ return NewScheduledBackupsValueUnknown(), diags
+ }
+
+ retentionVal, ok := retentionAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`retention expected to be basetypes.Int64Value, was: %T`, retentionAttribute))
+ }
+
+ scheduleAttribute, ok := attributes["schedule"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `schedule is missing from object`)
+
+ return NewScheduledBackupsValueUnknown(), diags
+ }
+
+ scheduleVal, ok := scheduleAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`schedule expected to be basetypes.ObjectValue, was: %T`, scheduleAttribute))
+ }
+
+ if diags.HasError() {
+ return NewScheduledBackupsValueUnknown(), diags
+ }
+
+ return ScheduledBackupsValue{
+ Retention: retentionVal,
+ Schedule: scheduleVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewScheduledBackupsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) ScheduledBackupsValue {
+ object, diags := NewScheduledBackupsValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewScheduledBackupsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t ScheduledBackupsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewScheduledBackupsValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewScheduledBackupsValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewScheduledBackupsValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewScheduledBackupsValueMust(ScheduledBackupsValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t ScheduledBackupsType) ValueType(ctx context.Context) attr.Value {
+ return ScheduledBackupsValue{}
+}
+
+var _ basetypes.ObjectValuable = ScheduledBackupsValue{}
+
+type ScheduledBackupsValue struct {
+ Retention basetypes.Int64Value `tfsdk:"retention"`
+ Schedule basetypes.ObjectValue `tfsdk:"schedule"`
+ state attr.ValueState
+}
+
+func (v ScheduledBackupsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 2)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["retention"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["schedule"] = basetypes.ObjectType{
+ AttrTypes: ScheduleValue{}.AttributeTypes(ctx),
+ }.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 2)
+
+ val, err = v.Retention.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["retention"] = val
+
+ val, err = v.Schedule.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["schedule"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v ScheduledBackupsValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v ScheduledBackupsValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v ScheduledBackupsValue) String() string {
+ return "ScheduledBackupsValue"
+}
+
+func (v ScheduledBackupsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ var schedule basetypes.ObjectValue
+
+ if v.Schedule.IsNull() {
+ schedule = types.ObjectNull(
+ ScheduleValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if v.Schedule.IsUnknown() {
+ schedule = types.ObjectUnknown(
+ ScheduleValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if !v.Schedule.IsNull() && !v.Schedule.IsUnknown() {
+ schedule = types.ObjectValueMust(
+ ScheduleValue{}.AttributeTypes(ctx),
+ v.Schedule.Attributes(),
+ )
+ }
+
+ objVal, diags := types.ObjectValue(
+ map[string]attr.Type{
+ "retention": basetypes.Int64Type{},
+ "schedule": basetypes.ObjectType{
+ AttrTypes: ScheduleValue{}.AttributeTypes(ctx),
+ },
+ },
+ map[string]attr.Value{
+ "retention": v.Retention,
+ "schedule": schedule,
+ })
+
+ return objVal, diags
+}
+
+func (v ScheduledBackupsValue) Equal(o attr.Value) bool {
+ other, ok := o.(ScheduledBackupsValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Retention.Equal(other.Retention) {
+ return false
+ }
+
+ if !v.Schedule.Equal(other.Schedule) {
+ return false
+ }
+
+ return true
+}
+
+func (v ScheduledBackupsValue) Type(ctx context.Context) attr.Type {
+ return ScheduledBackupsType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v ScheduledBackupsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "retention": basetypes.Int64Type{},
+ "schedule": basetypes.ObjectType{
+ AttrTypes: ScheduleValue{}.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v ScheduledBackupsValue) ToDBaaSSdkObject(ctx context.Context) (*sys11dbaassdk.PSQLScheduledBackups, diag.Diagnostics) {
+ var diags diag.Diagnostics
+ scheduleObj, d := NewScheduleValue(v.Schedule.AttributeTypes(ctx), v.Schedule.Attributes())
+ diags.Append(d...)
+ schedule, d := scheduleObj.ToDBaaSSdkObject(ctx)
+ diags.Append(d...)
+
+ var retention *int
+ retention = nil
+ if !v.Retention.IsNull() && !v.Retention.IsUnknown() {
+ retention = sys11dbaassdk.Int64ToIntPtr(v.Retention.ValueInt64())
+ }
+
+ return &sys11dbaassdk.PSQLScheduledBackups{
+ Retention: retention,
+ Schedule: schedule,
+ }, diags
+}
+
+var _ basetypes.ObjectTypable = ScheduleType{}
+
+type ScheduleType struct {
+ basetypes.ObjectType
+}
+
+func (t ScheduleType) Equal(o attr.Type) bool {
+ other, ok := o.(ScheduleType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t ScheduleType) String() string {
+ return "ScheduleType"
+}
+
+func (t ScheduleType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ hourAttribute, ok := attributes["hour"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `hour is missing from object`)
+
+ return nil, diags
+ }
+
+ hourVal, ok := hourAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`hour expected to be basetypes.Int64Value, was: %T`, hourAttribute))
+ }
+
+ minuteAttribute, ok := attributes["minute"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `minute is missing from object`)
+
+ return nil, diags
+ }
+
+ minuteVal, ok := minuteAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`minute expected to be basetypes.Int64Value, was: %T`, minuteAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return ScheduleValue{
+ Hour: hourVal,
+ Minute: minuteVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewScheduleValueNull() ScheduleValue {
+ return ScheduleValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewScheduleValueUnknown() ScheduleValue {
+ return ScheduleValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewScheduleValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (ScheduleValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing ScheduleValue Attribute Value",
+ "While creating a ScheduleValue value, a missing attribute value was detected. "+
+ "A ScheduleValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ScheduleValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid ScheduleValue Attribute Type",
+ "While creating a ScheduleValue value, an invalid attribute value was detected. "+
+ "A ScheduleValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ScheduleValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("ScheduleValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra ScheduleValue Attribute Value",
+ "While creating a ScheduleValue value, an extra attribute value was detected. "+
+ "A ScheduleValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra ScheduleValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewScheduleValueUnknown(), diags
+ }
+
+ hourAttribute, ok := attributes["hour"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `hour is missing from object`)
+
+ return NewScheduleValueUnknown(), diags
+ }
+
+ hourVal, ok := hourAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`hour expected to be basetypes.Int64Value, was: %T`, hourAttribute))
+ }
+
+ minuteAttribute, ok := attributes["minute"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `minute is missing from object`)
+
+ return NewScheduleValueUnknown(), diags
+ }
+
+ minuteVal, ok := minuteAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`minute expected to be basetypes.Int64Value, was: %T`, minuteAttribute))
+ }
+
+ if diags.HasError() {
+ return NewScheduleValueUnknown(), diags
+ }
+
+ return ScheduleValue{
+ Hour: hourVal,
+ Minute: minuteVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewScheduleValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) ScheduleValue {
+ object, diags := NewScheduleValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewScheduleValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t ScheduleType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewScheduleValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewScheduleValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewScheduleValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewScheduleValueMust(ScheduleValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t ScheduleType) ValueType(ctx context.Context) attr.Value {
+ return ScheduleValue{}
+}
+
+var _ basetypes.ObjectValuable = ScheduleValue{}
+
+type ScheduleValue struct {
+ Hour basetypes.Int64Value `tfsdk:"hour"`
+ Minute basetypes.Int64Value `tfsdk:"minute"`
+ state attr.ValueState
+}
+
+func (v ScheduleValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 2)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["hour"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["minute"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 2)
+
+ val, err = v.Hour.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["hour"] = val
+
+ val, err = v.Minute.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["minute"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v ScheduleValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v ScheduleValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v ScheduleValue) String() string {
+ return "ScheduleValue"
+}
+
+func (v ScheduleValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ objVal, diags := types.ObjectValue(
+ map[string]attr.Type{
+ "hour": basetypes.Int64Type{},
+ "minute": basetypes.Int64Type{},
+ },
+ map[string]attr.Value{
+ "hour": v.Hour,
+ "minute": v.Minute,
+ })
+
+ return objVal, diags
+}
+
+func (v ScheduleValue) Equal(o attr.Value) bool {
+ other, ok := o.(ScheduleValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Hour.Equal(other.Hour) {
+ return false
+ }
+
+ if !v.Minute.Equal(other.Minute) {
+ return false
+ }
+
+ return true
+}
+
+func (v ScheduleValue) Type(ctx context.Context) attr.Type {
+ return ScheduleType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v ScheduleValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "hour": basetypes.Int64Type{},
+ "minute": basetypes.Int64Type{},
+ }
+}
+
+func (v ScheduleValue) ToDBaaSSdkObject(ctx context.Context) (*sys11dbaassdk.PSQLScheduledBackupsSchedule, diag.Diagnostics) {
+
+ var hour *int
+ hour = nil
+ if !v.Hour.IsNull() && !v.Hour.IsUnknown() {
+ hour = sys11dbaassdk.Int64ToIntPtr(v.Hour.ValueInt64())
+ }
+
+ var minute *int
+ minute = nil
+ if !v.Minute.IsNull() && !v.Minute.IsUnknown() {
+ minute = sys11dbaassdk.Int64ToIntPtr(v.Minute.ValueInt64())
+ }
+
+ return &sys11dbaassdk.PSQLScheduledBackupsSchedule{
+ Hour: hour,
+ Minute: minute,
+ }, diag.Diagnostics{}
+}
diff --git a/internal/provider/database_resource.go b/internal/provider/database_resource.go
new file mode 100644
index 0000000..d3155a2
--- /dev/null
+++ b/internal/provider/database_resource.go
@@ -0,0 +1,586 @@
+package provider
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ sys11dbaassdk "github.com/syseleven/sys11dbaas-sdk"
+)
+
+const CREATE_RETRY_LIMIT = 30 * time.Minute
+
+type DatabaseModel struct {
+ ApplicationConfig types.Object `tfsdk:"application_config"`
+ CreatedAt types.String `tfsdk:"created_at"`
+ CreatedBy types.String `tfsdk:"created_by"`
+ Description types.String `tfsdk:"description"`
+ LastModifiedAt types.String `tfsdk:"last_modified_at"`
+ LastModifiedBy types.String `tfsdk:"last_modified_by"`
+ Name types.String `tfsdk:"name"`
+ ServiceConfig types.Object `tfsdk:"service_config"`
+ Status types.String `tfsdk:"status"`
+ Phase types.String `tfsdk:"phase"`
+ ResourceStatus types.String `tfsdk:"resource_status"`
+ Uuid types.String `tfsdk:"uuid"`
+}
+
+// resource
+
+type DatabaseResource struct {
+ client *sys11dbaassdk.Client
+ project types.String
+ organization types.String
+ waitForCreation types.Bool
+}
+
+func NewDatabaseResource() resource.Resource {
+ return &DatabaseResource{}
+}
+
+// Metadata returns the resource type name.
+func (r *DatabaseResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_database"
+}
+
+// Configure adds the provider configured client to the resource.
+func (r *DatabaseResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ if req.ProviderData == nil {
+ return
+ }
+
+ providerData, ok := req.ProviderData.(*sys11DBaaSProviderData)
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected Data Source Configure Type",
+ fmt.Sprintf("Expected *sys11DBaaSProviderData, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = providerData.client
+ r.organization = providerData.organization
+ r.project = providerData.project
+ r.waitForCreation = providerData.waitForCreation
+}
+
+// Read resource information.
+func (r *DatabaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ // Get current state
+ var state DatabaseModel
+ diags := req.State.Get(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ psqlRequest := &sys11dbaassdk.GetPostgreSQLRequest{
+ UUID: state.Uuid.ValueString(),
+ Organization: r.organization.ValueString(),
+ Project: r.project.ValueString(),
+ }
+
+ psqlDB, err := r.client.GetPostgreSQLDB(psqlRequest)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Unable to Read database",
+ err.Error(),
+ )
+ return
+ }
+
+ diags = psqlGetResponseToModel(ctx, psqlDB, &state, state)
+ ctx = tflog.SetField(ctx, "read_target_state", state)
+ tflog.Debug(ctx, "Reading database", nil)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Set refreshed state
+ diags = resp.State.Set(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+// Create resource.
+func (r *DatabaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ // Retrieve values from plan
+ var plan DatabaseModel
+ diags := req.Plan.Get(ctx, &plan)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ serviceConfig, diags := NewServiceConfigValue(plan.ServiceConfig.AttributeTypes(ctx), plan.ServiceConfig.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var ipList []string
+
+ for _, e := range serviceConfig.RemoteIps.Elements() {
+ ipList = append(ipList, strings.Trim(e.String(), "\""))
+ }
+
+ var maintenanceWindow *sys11dbaassdk.MaintenanceWindow
+ if !serviceConfig.MaintenanceWindow.IsUnknown() {
+ maintenanceWindowObj, diags := NewMaintenanceWindowValue(serviceConfig.MaintenanceWindow.AttributeTypes(ctx), serviceConfig.MaintenanceWindow.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ maintenanceWindow, diags = maintenanceWindowObj.ToDBaaSSdkObject(ctx)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ applicationConfig, diags := NewApplicationConfigValue(plan.ApplicationConfig.AttributeTypes(ctx), plan.ApplicationConfig.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var backupSchedule *sys11dbaassdk.PSQLScheduledBackups
+ if !applicationConfig.ScheduledBackups.IsUnknown() {
+ scheduledBackupsObj, diags := NewScheduledBackupsValue(applicationConfig.ScheduledBackups.AttributeTypes(ctx), applicationConfig.ScheduledBackups.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ backupSchedule, diags = scheduledBackupsObj.ToDBaaSSdkObject(ctx)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ createRequest := &sys11dbaassdk.CreatePostgreSQLRequest{
+ Organization: r.organization.ValueString(),
+ Project: r.project.ValueString(),
+ Name: plan.Name.ValueString(),
+ Description: plan.Description.ValueString(),
+ ServiceConfig: &sys11dbaassdk.PSQLServiceConfigRequest{
+ Disksize: sys11dbaassdk.Int64ToIntPtr(serviceConfig.Disksize.ValueInt64()),
+ Type: serviceConfig.ServiceConfigType.ValueString(),
+ Flavor: serviceConfig.Flavor.ValueString(),
+ Region: serviceConfig.Region.ValueString(),
+ MaintenanceWindow: maintenanceWindow,
+ RemoteIPs: ipList,
+ },
+ ApplicationConfig: &sys11dbaassdk.PSQLApplicationConfigRequest{
+ Type: applicationConfig.ApplicationConfigType.ValueString(),
+ Password: applicationConfig.Password.ValueString(),
+ Instances: sys11dbaassdk.IntPtr(int(applicationConfig.Instances.ValueInt64())),
+ Version: applicationConfig.Version.ValueString(),
+ ScheduledBackups: backupSchedule,
+ },
+ }
+
+ d, _ := json.Marshal(createRequest)
+ tflog.Debug(ctx, string(d), nil)
+
+ // Create new db
+ createResponse, err := r.client.CreatePostgreSQLDB(createRequest)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error creating database",
+ "Could not create database, unexpected error: "+err.Error(),
+ )
+ return
+ }
+
+ // Map response body to schema and populate Computed attribute values
+ targetState := DatabaseModel{}
+ diags = psqlCreateResponseToModel(ctx, createResponse, plan, &targetState)
+
+ ctx = tflog.SetField(ctx, "create_target_state", &targetState)
+ tflog.Debug(ctx, "[CREATE] Created", nil)
+ resp.Diagnostics.Append(diags...)
+
+ tflog.Debug(ctx, "[CREATE] Wait for creation: "+r.waitForCreation.String(), nil)
+ if r.waitForCreation.ValueBool() {
+ getRequest := &sys11dbaassdk.GetPostgreSQLRequest{
+ Organization: r.organization.ValueString(),
+ Project: r.project.ValueString(),
+ UUID: createResponse.UUID,
+ }
+ sleepFor := time.Duration(30 * time.Second)
+ for retryLimit := 0; targetState.Status.ValueString() != sys11dbaassdk.STATE_READY; {
+ if retryLimit == int((CREATE_RETRY_LIMIT / sleepFor).Abs()) {
+ diags = resp.State.Set(ctx, &targetState)
+ resp.Diagnostics.Append(diags...)
+ resp.Diagnostics.AddError("RetryLimit reached during wait_for_creation", "The retry limit of "+CREATE_RETRY_LIMIT.String()+" was reached while waiting for creation of database")
+ return
+ }
+ time.Sleep(sleepFor)
+ getResponse, err := r.client.GetPostgreSQLDB(getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error read database during wait",
+ "Could not read database during wait, unexpected error: "+err.Error(),
+ )
+ return
+ }
+ diags = psqlGetResponseToModel(ctx, getResponse, &targetState, plan)
+
+ ctx = tflog.SetField(ctx, "create_target_state", &targetState)
+ tflog.Debug(ctx, "[CREATE] Current creation state", nil)
+ resp.Diagnostics.Append(diags...)
+ retryLimit++
+ }
+ }
+
+ // Set state to fully populated data
+ diags = resp.State.Set(ctx, &targetState)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+// Delete resource.
+func (r *DatabaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ // Get current state
+ var state DatabaseModel
+ diags := req.State.Get(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ psqlRequest := &sys11dbaassdk.DeletePostgreSQLRequest{
+ UUID: state.Uuid.ValueString(),
+ Organization: r.organization.ValueString(),
+ Project: r.project.ValueString(),
+ }
+
+ _, err := r.client.DeletePostgreSQLDB(psqlRequest)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Unable to Delete Database",
+ err.Error(),
+ )
+ return
+ }
+
+ // Set refreshed state
+ resp.State.RemoveResource(ctx)
+}
+
+// Update resource.
+func (r *DatabaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ // Get plan
+ var plan DatabaseModel
+ diags := req.Plan.Get(ctx, &plan)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ // Get current state
+ var state DatabaseModel
+ diags = req.State.Get(ctx, &state)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ serviceConfig, diags := NewServiceConfigValue(plan.ServiceConfig.AttributeTypes(ctx), plan.ServiceConfig.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var ipList []string
+
+ for _, e := range serviceConfig.RemoteIps.Elements() {
+ ipList = append(ipList, strings.Trim(e.String(), "\""))
+ }
+
+ var maintenanceWindow *sys11dbaassdk.MaintenanceWindow
+ if !serviceConfig.MaintenanceWindow.IsUnknown() {
+ maintenanceWindowObj, diags := NewMaintenanceWindowValue(serviceConfig.MaintenanceWindow.AttributeTypes(ctx), serviceConfig.MaintenanceWindow.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ maintenanceWindow, diags = maintenanceWindowObj.ToDBaaSSdkObject(ctx)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ applicationConfig, diags := NewApplicationConfigValue(plan.ApplicationConfig.AttributeTypes(ctx), plan.ApplicationConfig.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var backupSchedule *sys11dbaassdk.PSQLScheduledBackups
+ if !applicationConfig.ScheduledBackups.IsUnknown() {
+ scheduledBackupsObj, diags := NewScheduledBackupsValue(applicationConfig.ScheduledBackups.AttributeTypes(ctx), applicationConfig.ScheduledBackups.Attributes())
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ backupSchedule, diags = scheduledBackupsObj.ToDBaaSSdkObject(ctx)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ updateRequest := &sys11dbaassdk.UpdatePostgreSQLRequest{
+ UUID: state.Uuid.ValueString(),
+ Organization: r.organization.ValueString(),
+ Project: r.project.ValueString(),
+ Name: plan.Name.ValueString(),
+ Description: plan.Description.ValueString(),
+ ServiceConfig: &sys11dbaassdk.PSQLServiceConfigUpdateRequest{
+ Disksize: sys11dbaassdk.Int64ToIntPtr(serviceConfig.Disksize.ValueInt64()),
+ Type: serviceConfig.ServiceConfigType.ValueString(),
+ Flavor: serviceConfig.Flavor.ValueString(),
+ MaintenanceWindow: maintenanceWindow,
+ RemoteIPs: ipList,
+ },
+ ApplicationConfig: &sys11dbaassdk.PSQLApplicationConfigUpdateRequest{
+ Password: applicationConfig.Password.ValueString(),
+ Instances: sys11dbaassdk.IntPtr(int(applicationConfig.Instances.ValueInt64())),
+ Version: applicationConfig.Version.ValueString(),
+ ScheduledBackups: backupSchedule,
+ },
+ }
+
+ d, _ := json.Marshal(updateRequest)
+ tflog.Debug(ctx, string(d), nil)
+
+ // Update psql
+ _, err := r.client.UpdatePostgreSQLDB(updateRequest)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error updating database",
+ "Could not update database, unexpected error: "+err.Error(),
+ )
+ resp.Diagnostics.Append(diags...)
+ return
+ }
+
+ for i := 0; i < 10; i++ {
+ time.Sleep(2 * time.Second) // give DBaaS time to propagate changes
+ tflog.Debug(ctx, "wait 2 seconds to give DBaaS time to propagate", nil)
+
+ // Update psql
+ getRequest := &sys11dbaassdk.GetPostgreSQLRequest{
+ Organization: r.organization.ValueString(),
+ Project: r.project.ValueString(),
+ UUID: state.Uuid.ValueString(),
+ }
+ getResponse, err := r.client.GetPostgreSQLDB(getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Error reading updated database",
+ "Could not read updated database, unexpected error: "+err.Error(),
+ )
+ return
+ }
+
+ // Map response body to schema and populate Computed attribute values
+ targetState := DatabaseModel{}
+ diags = psqlGetResponseToModel(ctx, getResponse, &targetState, plan)
+
+ ctx = tflog.SetField(ctx, "update_target_state", &targetState)
+ tflog.Debug(ctx, "Updated State", nil)
+ resp.Diagnostics.Append(diags...)
+
+ // Set state to fully populated data
+ diags = resp.State.Set(ctx, &targetState)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ continue
+ }
+ }
+}
+
+// Schema defines the schema for the resource.
+func (r *DatabaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = DatabaseResourceSchema(ctx)
+}
+
+func psqlCreateResponseToModel(ctx context.Context, db *sys11dbaassdk.CreatePostgreSQLResponse, plan DatabaseModel, targetState *DatabaseModel) diag.Diagnostics {
+
+ var diags diag.Diagnostics
+
+ ctx = tflog.SetField(ctx, "conversion_create_source_response", db)
+ tflog.Debug(ctx, "Converting create api response")
+
+ ipList, d := types.ListValueFrom(ctx, types.StringType, (*db).ServiceConfig.RemoteIPs)
+
+ var recoveryObjValue basetypes.ObjectValue
+ if db.ApplicationConfig.Recovery != nil {
+ recoveryObjValue, _ = RecoveryValue{
+ Exclusive: types.BoolValue(db.ApplicationConfig.Recovery.Exclusive),
+ Source: types.StringValue(db.ApplicationConfig.Recovery.Source),
+ TargetLsn: types.StringValue(db.ApplicationConfig.Recovery.TargetLSN),
+ TargetName: types.StringValue(db.ApplicationConfig.Recovery.TargetName),
+ TargetXid: types.StringValue(db.ApplicationConfig.Recovery.TargetXID),
+ TargetTime: types.StringValue(db.ApplicationConfig.Recovery.TargetTime.Format(time.RFC3339)),
+ }.ToObjectValue(ctx)
+ }
+
+ scheduleObjVal, _ := ScheduleValue{
+ Hour: types.Int64Value(int64(*db.ApplicationConfig.ScheduledBackups.Schedule.Hour)),
+ Minute: types.Int64Value(int64(*db.ApplicationConfig.ScheduledBackups.Schedule.Minute)),
+ }.ToObjectValue(ctx)
+
+ scheduledBackupsObjVal, _ := ScheduledBackupsValue{
+ Schedule: scheduleObjVal,
+ Retention: types.Int64Value(int64(*db.ApplicationConfig.ScheduledBackups.Retention)),
+ }.ToObjectValue(ctx)
+
+ maintenanceWindowObjVal, _ := MaintenanceWindowValue{
+ DayOfWeek: types.Int64Value(int64(*db.ServiceConfig.MaintenanceWindow.DayOfWeek)),
+ StartHour: types.Int64Value(int64(*db.ServiceConfig.MaintenanceWindow.StartHour)),
+ StartMinute: types.Int64Value(int64(*db.ServiceConfig.MaintenanceWindow.StartMinute)),
+ }.ToObjectValue(ctx)
+
+ diags.Append(d...)
+
+ var targetServiceConfig ServiceConfigValue
+ targetServiceConfig.Disksize = types.Int64Value(int64(*db.ServiceConfig.Disksize))
+ targetServiceConfig.ServiceConfigType = types.StringValue(db.ServiceConfig.Type)
+ targetServiceConfig.Flavor = types.StringValue(db.ServiceConfig.Flavor)
+ targetServiceConfig.Region = types.StringValue(db.ServiceConfig.Region)
+ targetServiceConfig.MaintenanceWindow = maintenanceWindowObjVal
+ targetServiceConfig.RemoteIps = ipList
+
+ targetServiceConfigObj, diags := targetServiceConfig.ToObjectValue(ctx)
+
+ var targetApplicationConfig ApplicationConfigValue
+ targetApplicationConfig.ApplicationConfigType = types.StringValue(db.ApplicationConfig.Type)
+ targetApplicationConfig.Password = types.StringValue(strings.Trim(plan.ApplicationConfig.Attributes()["password"].String(), "\"")) // take this from the plan, since it is not included in the response
+ targetApplicationConfig.Instances = types.Int64Value(int64(*db.ApplicationConfig.Instances))
+ targetApplicationConfig.Version = types.StringValue(db.ApplicationConfig.Version)
+ targetApplicationConfig.Hostname = types.StringValue(db.ApplicationConfig.Hostname)
+ targetApplicationConfig.IpAddress = types.StringValue(db.ApplicationConfig.IPAddress)
+ targetApplicationConfig.ScheduledBackups = scheduledBackupsObjVal
+ targetApplicationConfig.Recovery = recoveryObjValue
+
+ targetApplicationConfigObj, diags := targetApplicationConfig.ToObjectValue(ctx)
+
+ targetState.Uuid = types.StringValue(db.UUID)
+ targetState.Name = types.StringValue(db.Name)
+ targetState.Description = types.StringValue(db.Description)
+ targetState.Status = types.StringValue(db.Status)
+ targetState.Phase = types.StringValue(db.Phase)
+ targetState.ResourceStatus = types.StringValue(db.ResourceStatus)
+ targetState.CreatedBy = types.StringValue(db.CreatedBy)
+ targetState.CreatedAt = types.StringValue(db.CreatedAt)
+ targetState.LastModifiedBy = types.StringValue(db.LastModifiedBy)
+ targetState.LastModifiedAt = types.StringValue(db.LastModifiedAt)
+ targetState.ApplicationConfig = targetApplicationConfigObj
+ targetState.ServiceConfig = targetServiceConfigObj
+
+ ctx = tflog.SetField(ctx, "conversion_create_target_state", &targetState)
+ tflog.Debug(ctx, "Converted api create response to state")
+
+ return diags
+
+}
+
+func psqlGetResponseToModel(ctx context.Context, db *sys11dbaassdk.GetPostgreSQLResponse, targetState *DatabaseModel, previousState DatabaseModel) diag.Diagnostics {
+
+ var diags diag.Diagnostics
+
+ ctx = tflog.SetField(ctx, "conversion_read_source_response", db)
+ tflog.Debug(ctx, "Converting read api response")
+
+ ipList, d := types.ListValueFrom(ctx, types.StringType, (*db).ServiceConfig.RemoteIPs)
+
+ var recoveryObjValue basetypes.ObjectValue
+ if db.ApplicationConfig.Recovery != nil {
+ recoveryObjValue, _ = RecoveryValue{
+ Exclusive: types.BoolValue(db.ApplicationConfig.Recovery.Exclusive),
+ Source: types.StringValue(db.ApplicationConfig.Recovery.Source),
+ TargetLsn: types.StringValue(db.ApplicationConfig.Recovery.TargetLSN),
+ TargetName: types.StringValue(db.ApplicationConfig.Recovery.TargetName),
+ TargetXid: types.StringValue(db.ApplicationConfig.Recovery.TargetXID),
+ TargetTime: types.StringValue(db.ApplicationConfig.Recovery.TargetTime.Format(time.RFC3339)),
+ }.ToObjectValue(ctx)
+ }
+
+ scheduleObjVal, _ := ScheduleValue{
+ Hour: types.Int64Value(int64(*db.ApplicationConfig.ScheduledBackups.Schedule.Hour)),
+ Minute: types.Int64Value(int64(*db.ApplicationConfig.ScheduledBackups.Schedule.Minute)),
+ }.ToObjectValue(ctx)
+
+ scheduledBackupsObjVal, _ := ScheduledBackupsValue{
+ Schedule: scheduleObjVal,
+ Retention: types.Int64Value(int64(*db.ApplicationConfig.ScheduledBackups.Retention)),
+ }.ToObjectValue(ctx)
+
+ maintenanceWindowObjVal, _ := MaintenanceWindowValue{
+ DayOfWeek: types.Int64Value(int64(*db.ServiceConfig.MaintenanceWindow.DayOfWeek)),
+ StartHour: types.Int64Value(int64(*db.ServiceConfig.MaintenanceWindow.StartHour)),
+ StartMinute: types.Int64Value(int64(*db.ServiceConfig.MaintenanceWindow.StartMinute)),
+ }.ToObjectValue(ctx)
+
+ diags.Append(d...)
+
+ var targetServiceConfig ServiceConfigValue
+ targetServiceConfig.Disksize = types.Int64Value(int64(*db.ServiceConfig.Disksize))
+ targetServiceConfig.ServiceConfigType = types.StringValue(db.ServiceConfig.Type)
+ targetServiceConfig.Flavor = types.StringValue(db.ServiceConfig.Flavor)
+ targetServiceConfig.Region = types.StringValue(db.ServiceConfig.Region)
+ targetServiceConfig.MaintenanceWindow = maintenanceWindowObjVal
+ targetServiceConfig.RemoteIps = ipList
+
+ targetServiceConfigObj, diags := targetServiceConfig.ToObjectValue(ctx)
+
+ previousPassword := strings.Trim(previousState.ApplicationConfig.Attributes()["password"].String(), "\"")
+
+ var targetApplicationConfig ApplicationConfigValue
+ targetApplicationConfig.ApplicationConfigType = types.StringValue(db.ApplicationConfig.Type)
+ targetApplicationConfig.Password = types.StringValue(previousPassword)
+ targetApplicationConfig.Instances = types.Int64Value(int64(*db.ApplicationConfig.Instances))
+ targetApplicationConfig.Version = types.StringValue(db.ApplicationConfig.Version)
+ targetApplicationConfig.Hostname = types.StringValue(db.ApplicationConfig.Hostname)
+ targetApplicationConfig.IpAddress = types.StringValue(db.ApplicationConfig.IPAddress)
+ targetApplicationConfig.ScheduledBackups = scheduledBackupsObjVal
+ targetApplicationConfig.Recovery = recoveryObjValue
+
+ targetApplicationConfigObj, diags := targetApplicationConfig.ToObjectValue(ctx)
+
+ targetState.Uuid = types.StringValue(db.UUID)
+ targetState.Name = types.StringValue(db.Name)
+ targetState.Description = types.StringValue(db.Description)
+ targetState.Status = types.StringValue(db.Status)
+ targetState.Phase = types.StringValue(db.Phase)
+ targetState.ResourceStatus = types.StringValue(db.ResourceStatus)
+ targetState.CreatedBy = types.StringValue(db.CreatedBy)
+ targetState.CreatedAt = types.StringValue(db.CreatedAt)
+ targetState.LastModifiedBy = types.StringValue(db.LastModifiedBy)
+ targetState.LastModifiedAt = types.StringValue(db.LastModifiedAt)
+ targetState.ApplicationConfig = targetApplicationConfigObj
+ targetState.ServiceConfig = targetServiceConfigObj
+
+ ctx = tflog.SetField(ctx, "conversion_read_target_state", targetState)
+ tflog.Debug(ctx, "Converted api read response to state")
+
+ return diags
+
+}
diff --git a/internal/provider/schema.go b/internal/provider/schema.go
new file mode 100644
index 0000000..1d4a41d
--- /dev/null
+++ b/internal/provider/schema.go
@@ -0,0 +1,320 @@
+package provider
+
+import (
+ "context"
+ "regexp"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func DatabaseResourceSchema(ctx context.Context) schema.Schema {
+ return schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "application_config": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "hostname": schema.StringAttribute{
+ Computed: true,
+ Description: "The dns name of the database in the format uuid.postgresql.syseleven.services.",
+ MarkdownDescription: "The dns name of the database in the format uuid.postgresql.syseleven.services.",
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "instances": schema.Int64Attribute{
+ Required: true,
+ Description: "How many nodes the cluster should have",
+ MarkdownDescription: "How many nodes the cluster should have",
+ Validators: []validator.Int64{
+ int64validator.AtMost(5),
+ },
+ },
+ "ip_address": schema.StringAttribute{
+ Computed: true,
+ Description: "The public IP address of the database. It will be pending if no address has been assigned yet.",
+ MarkdownDescription: "The public IP address of the database. It will be pending if no address has been assigned yet.",
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "password": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Sensitive: true,
+ Description: "The password for the admin user",
+ MarkdownDescription: "The password for the admin user",
+ Validators: []validator.String{
+ stringvalidator.LengthAtLeast(16),
+ },
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "recovery": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "exclusive": schema.BoolAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Bool{
+ boolplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "source": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "target_lsn": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "target_name": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "target_time": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "target_xid": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ },
+ Optional: true,
+ PlanModifiers: []planmodifier.Object{
+ objectplanmodifier.UseStateForUnknown(),
+ objectplanmodifier.RequiresReplaceIfConfigured(),
+ },
+ },
+ "scheduled_backups": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "retention": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "How long Backups should be stored",
+ MarkdownDescription: "How long Backups should be stored",
+ Validators: []validator.Int64{
+ int64validator.Between(7, 90),
+ },
+ Default: int64default.StaticInt64(7),
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ "schedule": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "hour": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "The hour when the full backup should start. If this value is omitted, a random hour between 1am and 5am will be generated.",
+ MarkdownDescription: "The hour when the full backup should start. If this value is omitted, a random hour between 1am and 5am will be generated.",
+ Validators: []validator.Int64{
+ int64validator.Between(0, 23),
+ },
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ "minute": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "The minute when the full backup should start. If this value is omitted, a random minute will be generated.",
+ MarkdownDescription: "The minute when the full backup should start. If this value is omitted, a random minute will be generated.",
+ Validators: []validator.Int64{
+ int64validator.Between(0, 59),
+ },
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ },
+ Optional: true,
+ Computed: true,
+ Description: "The schedules for the backup policy.",
+ MarkdownDescription: "The schedules for the backup policy.",
+ },
+ },
+ Optional: true,
+ Computed: true,
+ Description: "The scheduled backup policy for the database.",
+ MarkdownDescription: "The scheduled backup policy for the database.",
+ },
+ "type": schema.StringAttribute{
+ Required: true,
+ },
+ "version": schema.StringAttribute{
+ Required: true,
+ Description: "minor version of postgresql",
+ MarkdownDescription: "minor version of postgresql",
+ },
+ },
+ Required: true,
+ },
+ "created_at": schema.StringAttribute{
+ Computed: true,
+ Description: "the date when the database was created",
+ MarkdownDescription: "the date when the database was created",
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "created_by": schema.StringAttribute{
+ Computed: true,
+ Description: "the initial creator of the database",
+ MarkdownDescription: "the initial creator of the database",
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "description": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ Description: "fulltext description of the database",
+ MarkdownDescription: "fulltext description of the database",
+ Validators: []validator.String{
+ stringvalidator.LengthBetween(0, 128),
+ },
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Default: stringdefault.StaticString(""),
+ },
+ "last_modified_at": schema.StringAttribute{
+ Computed: true,
+ Description: "the date when the database was last modified",
+ MarkdownDescription: "the date when the database was last modified",
+ },
+ "last_modified_by": schema.StringAttribute{
+ Computed: true,
+ Description: "the user who last changed of the database",
+ MarkdownDescription: "the user who last changed of the database",
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: "The name of the database.",
+ MarkdownDescription: "The name of the database.",
+ Validators: []validator.String{
+ stringvalidator.LengthBetween(1, 64),
+ stringvalidator.RegexMatches(regexp.MustCompile("^(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])?$"), ""),
+ },
+ },
+ "service_config": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "disksize": schema.Int64Attribute{
+ Required: true,
+ Description: "Disksize in GB",
+ MarkdownDescription: "Disksize in GB",
+ Validators: []validator.Int64{
+ int64validator.Between(5, 500),
+ },
+ },
+ "flavor": schema.StringAttribute{
+ Required: true,
+ Description: "vm flavor to use",
+ MarkdownDescription: "vm flavor to use",
+ },
+ "maintenance_window": schema.SingleNestedAttribute{
+ Attributes: map[string]schema.Attribute{
+ "day_of_week": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Day of week as a cron time (0=Sun, 1=Mon, ..., 6=Sat). If omitted, a random day will be used.",
+ MarkdownDescription: "Day of week as a cron time (0=Sun, 1=Mon, ..., 6=Sat). If omitted, a random day will be used.",
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ "start_hour": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Hour when the maintenance window starts. If omitted, a random hour between 20 and 4 will be used.",
+ MarkdownDescription: "Hour when the maintenance window starts. If omitted, a random hour between 20 and 4 will be used.",
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ "start_minute": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ Description: "Minute when the maintenance window starts. If omitted, a random minute will be used.",
+ MarkdownDescription: "Minute when the maintenance window starts. If omitted, a random minute will be used.",
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ },
+ Optional: true,
+ Computed: true,
+ Description: "The maintenance window. This will be a time window for updates and maintenance. If omitted, a random window will be generated.",
+ MarkdownDescription: "The maintenance window. This will be a time window for updates and maintenance. If omitted, a random window will be generated.",
+ PlanModifiers: []planmodifier.Object{
+ objectplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "region": schema.StringAttribute{
+ Required: true,
+ Description: "the region for the database",
+ MarkdownDescription: "the region for the database",
+ },
+ "remote_ips": schema.ListAttribute{
+ ElementType: types.StringType,
+ Optional: true,
+ Computed: true,
+ Description: "List of IP addresses, that should be allowed to connect to the database",
+ MarkdownDescription: "List of IP addresses, that should be allowed to connect to the database",
+ PlanModifiers: []planmodifier.List{
+ listplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "type": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ Required: true,
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ "phase": schema.StringAttribute{
+ Computed: true,
+ },
+ "resource_status": schema.StringAttribute{
+ Computed: true,
+ },
+ "uuid": schema.StringAttribute{
+ Computed: true,
+ Description: "The UUID of the database.",
+ MarkdownDescription: "The UUID of the database.",
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ },
+ }
+}
diff --git a/internal/provider/service_config.go b/internal/provider/service_config.go
new file mode 100644
index 0000000..6fa289f
--- /dev/null
+++ b/internal/provider/service_config.go
@@ -0,0 +1,1106 @@
+package provider
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-go/tftypes"
+ sys11dbaassdk "github.com/syseleven/sys11dbaas-sdk"
+)
+
+var _ basetypes.ObjectTypable = ServiceConfigType{}
+
+type ServiceConfigType struct {
+ basetypes.ObjectType
+}
+
+func (t ServiceConfigType) Equal(o attr.Type) bool {
+ other, ok := o.(ServiceConfigType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t ServiceConfigType) String() string {
+ return "ServiceConfigType"
+}
+
+func (t ServiceConfigType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ disksizeAttribute, ok := attributes["disksize"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `disksize is missing from object`)
+
+ return nil, diags
+ }
+
+ disksizeVal, ok := disksizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`disksize expected to be basetypes.Int64Value, was: %T`, disksizeAttribute))
+ }
+
+ flavorAttribute, ok := attributes["flavor"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `flavor is missing from object`)
+
+ return nil, diags
+ }
+
+ flavorVal, ok := flavorAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`flavor expected to be basetypes.StringValue, was: %T`, flavorAttribute))
+ }
+
+ maintenanceWindowAttribute, ok := attributes["maintenance_window"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `maintenance_window is missing from object`)
+
+ return nil, diags
+ }
+
+ maintenanceWindowVal, ok := maintenanceWindowAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`maintenance_window expected to be basetypes.ObjectValue, was: %T`, maintenanceWindowAttribute))
+ }
+
+ regionAttribute, ok := attributes["region"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `region is missing from object`)
+
+ return nil, diags
+ }
+
+ regionVal, ok := regionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`region expected to be basetypes.StringValue, was: %T`, regionAttribute))
+ }
+
+ remoteIpsAttribute, ok := attributes["remote_ips"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `remote_ips is missing from object`)
+
+ return nil, diags
+ }
+
+ remoteIpsVal, ok := remoteIpsAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`remote_ips expected to be basetypes.ListValue, was: %T`, remoteIpsAttribute))
+ }
+
+ typeAttribute, ok := attributes["type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `type is missing from object`)
+
+ return nil, diags
+ }
+
+ typeVal, ok := typeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`type expected to be basetypes.StringValue, was: %T`, typeAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return ServiceConfigValue{
+ Disksize: disksizeVal,
+ Flavor: flavorVal,
+ MaintenanceWindow: maintenanceWindowVal,
+ Region: regionVal,
+ RemoteIps: remoteIpsVal,
+ ServiceConfigType: typeVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewServiceConfigValueNull() ServiceConfigValue {
+ return ServiceConfigValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewServiceConfigValueUnknown() ServiceConfigValue {
+ return ServiceConfigValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewServiceConfigValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (ServiceConfigValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing ServiceConfigValue Attribute Value",
+ "While creating a ServiceConfigValue value, a missing attribute value was detected. "+
+ "A ServiceConfigValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ServiceConfigValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid ServiceConfigValue Attribute Type",
+ "While creating a ServiceConfigValue value, an invalid attribute value was detected. "+
+ "A ServiceConfigValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("ServiceConfigValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("ServiceConfigValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra ServiceConfigValue Attribute Value",
+ "While creating a ServiceConfigValue value, an extra attribute value was detected. "+
+ "A ServiceConfigValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra ServiceConfigValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ disksizeAttribute, ok := attributes["disksize"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `disksize is missing from object`)
+
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ disksizeVal, ok := disksizeAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`disksize expected to be basetypes.Int64Value, was: %T`, disksizeAttribute))
+ }
+
+ flavorAttribute, ok := attributes["flavor"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `flavor is missing from object`)
+
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ flavorVal, ok := flavorAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`flavor expected to be basetypes.StringValue, was: %T`, flavorAttribute))
+ }
+
+ maintenanceWindowAttribute, ok := attributes["maintenance_window"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `maintenance_window is missing from object`)
+
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ maintenanceWindowVal, ok := maintenanceWindowAttribute.(basetypes.ObjectValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`maintenance_window expected to be basetypes.ObjectValue, was: %T`, maintenanceWindowAttribute))
+ }
+
+ regionAttribute, ok := attributes["region"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `region is missing from object`)
+
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ regionVal, ok := regionAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`region expected to be basetypes.StringValue, was: %T`, regionAttribute))
+ }
+
+ remoteIpsAttribute, ok := attributes["remote_ips"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `remote_ips is missing from object`)
+
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ remoteIpsVal, ok := remoteIpsAttribute.(basetypes.ListValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`remote_ips expected to be basetypes.ListValue, was: %T`, remoteIpsAttribute))
+ }
+
+ typeAttribute, ok := attributes["type"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `type is missing from object`)
+
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ typeVal, ok := typeAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`type expected to be basetypes.StringValue, was: %T`, typeAttribute))
+ }
+
+ if diags.HasError() {
+ return NewServiceConfigValueUnknown(), diags
+ }
+
+ return ServiceConfigValue{
+ Disksize: disksizeVal,
+ Flavor: flavorVal,
+ MaintenanceWindow: maintenanceWindowVal,
+ Region: regionVal,
+ RemoteIps: remoteIpsVal,
+ ServiceConfigType: typeVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewServiceConfigValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) ServiceConfigValue {
+ object, diags := NewServiceConfigValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewServiceConfigValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t ServiceConfigType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewServiceConfigValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewServiceConfigValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewServiceConfigValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewServiceConfigValueMust(ServiceConfigValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t ServiceConfigType) ValueType(ctx context.Context) attr.Value {
+ return ServiceConfigValue{}
+}
+
+var _ basetypes.ObjectValuable = ServiceConfigValue{}
+
+type ServiceConfigValue struct {
+ Disksize basetypes.Int64Value `tfsdk:"disksize"`
+ Flavor basetypes.StringValue `tfsdk:"flavor"`
+ MaintenanceWindow basetypes.ObjectValue `tfsdk:"maintenance_window"`
+ Region basetypes.StringValue `tfsdk:"region"`
+ RemoteIps basetypes.ListValue `tfsdk:"remote_ips"`
+ ServiceConfigType basetypes.StringValue `tfsdk:"type"`
+ state attr.ValueState
+}
+
+func (v ServiceConfigValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 6)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["disksize"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["flavor"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["maintenance_window"] = basetypes.ObjectType{
+ AttrTypes: MaintenanceWindowValue{}.AttributeTypes(ctx),
+ }.TerraformType(ctx)
+ attrTypes["region"] = basetypes.StringType{}.TerraformType(ctx)
+ attrTypes["remote_ips"] = basetypes.ListType{
+ ElemType: types.StringType,
+ }.TerraformType(ctx)
+ attrTypes["type"] = basetypes.StringType{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 6)
+
+ val, err = v.Disksize.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["disksize"] = val
+
+ val, err = v.Flavor.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["flavor"] = val
+
+ val, err = v.MaintenanceWindow.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["maintenance_window"] = val
+
+ val, err = v.Region.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["region"] = val
+
+ val, err = v.RemoteIps.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["remote_ips"] = val
+
+ val, err = v.ServiceConfigType.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["type"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v ServiceConfigValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v ServiceConfigValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v ServiceConfigValue) String() string {
+ return "ServiceConfigValue"
+}
+
+func (v ServiceConfigValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ var maintenanceWindow basetypes.ObjectValue
+
+ if v.MaintenanceWindow.IsNull() {
+ maintenanceWindow = types.ObjectNull(
+ MaintenanceWindowValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if v.MaintenanceWindow.IsUnknown() {
+ maintenanceWindow = types.ObjectUnknown(
+ MaintenanceWindowValue{}.AttributeTypes(ctx),
+ )
+ }
+
+ if !v.MaintenanceWindow.IsNull() && !v.MaintenanceWindow.IsUnknown() {
+ maintenanceWindow = types.ObjectValueMust(
+ MaintenanceWindowValue{}.AttributeTypes(ctx),
+ v.MaintenanceWindow.Attributes(),
+ )
+ }
+
+ remoteIpsVal, d := types.ListValue(types.StringType, v.RemoteIps.Elements())
+
+ diags.Append(d...)
+
+ if d.HasError() {
+ return types.ObjectUnknown(map[string]attr.Type{
+ "disksize": basetypes.Int64Type{},
+ "flavor": basetypes.StringType{},
+ "maintenance_window": basetypes.ObjectType{
+ AttrTypes: MaintenanceWindowValue{}.AttributeTypes(ctx),
+ },
+ "region": basetypes.StringType{},
+ "remote_ips": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "type": basetypes.StringType{},
+ }), diags
+ }
+
+ objVal, diags := types.ObjectValue(
+ map[string]attr.Type{
+ "disksize": basetypes.Int64Type{},
+ "flavor": basetypes.StringType{},
+ "maintenance_window": basetypes.ObjectType{
+ AttrTypes: MaintenanceWindowValue{}.AttributeTypes(ctx),
+ },
+ "region": basetypes.StringType{},
+ "remote_ips": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "type": basetypes.StringType{},
+ },
+ map[string]attr.Value{
+ "disksize": v.Disksize,
+ "flavor": v.Flavor,
+ "maintenance_window": maintenanceWindow,
+ "region": v.Region,
+ "remote_ips": remoteIpsVal,
+ "type": v.ServiceConfigType,
+ })
+
+ return objVal, diags
+}
+
+func (v ServiceConfigValue) Equal(o attr.Value) bool {
+ other, ok := o.(ServiceConfigValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.Disksize.Equal(other.Disksize) {
+ return false
+ }
+
+ if !v.Flavor.Equal(other.Flavor) {
+ return false
+ }
+
+ if !v.MaintenanceWindow.Equal(other.MaintenanceWindow) {
+ return false
+ }
+
+ if !v.Region.Equal(other.Region) {
+ return false
+ }
+
+ if !v.RemoteIps.Equal(other.RemoteIps) {
+ return false
+ }
+
+ if !v.ServiceConfigType.Equal(other.ServiceConfigType) {
+ return false
+ }
+
+ return true
+}
+
+func (v ServiceConfigValue) Type(ctx context.Context) attr.Type {
+ return ServiceConfigType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v ServiceConfigValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "disksize": basetypes.Int64Type{},
+ "flavor": basetypes.StringType{},
+ "maintenance_window": basetypes.ObjectType{
+ AttrTypes: MaintenanceWindowValue{}.AttributeTypes(ctx),
+ },
+ "region": basetypes.StringType{},
+ "remote_ips": basetypes.ListType{
+ ElemType: types.StringType,
+ },
+ "type": basetypes.StringType{},
+ }
+}
+
+var _ basetypes.ObjectTypable = MaintenanceWindowType{}
+
+type MaintenanceWindowType struct {
+ basetypes.ObjectType
+}
+
+func (t MaintenanceWindowType) Equal(o attr.Type) bool {
+ other, ok := o.(MaintenanceWindowType)
+
+ if !ok {
+ return false
+ }
+
+ return t.ObjectType.Equal(other.ObjectType)
+}
+
+func (t MaintenanceWindowType) String() string {
+ return "MaintenanceWindowType"
+}
+
+func (t MaintenanceWindowType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ attributes := in.Attributes()
+
+ dayOfWeekAttribute, ok := attributes["day_of_week"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `day_of_week is missing from object`)
+
+ return nil, diags
+ }
+
+ dayOfWeekVal, ok := dayOfWeekAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`day_of_week expected to be basetypes.Int64Value, was: %T`, dayOfWeekAttribute))
+ }
+
+ startHourAttribute, ok := attributes["start_hour"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `start_hour is missing from object`)
+
+ return nil, diags
+ }
+
+ startHourVal, ok := startHourAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`start_hour expected to be basetypes.Int64Value, was: %T`, startHourAttribute))
+ }
+
+ startMinuteAttribute, ok := attributes["start_minute"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `start_minute is missing from object`)
+
+ return nil, diags
+ }
+
+ startMinuteVal, ok := startMinuteAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`start_minute expected to be basetypes.Int64Value, was: %T`, startMinuteAttribute))
+ }
+
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return MaintenanceWindowValue{
+ DayOfWeek: dayOfWeekVal,
+ StartHour: startHourVal,
+ StartMinute: startMinuteVal,
+ state: attr.ValueStateKnown,
+ }.ToObjectValue(ctx)
+}
+
+func NewMaintenanceWindowValueNull() MaintenanceWindowValue {
+ return MaintenanceWindowValue{
+ state: attr.ValueStateNull,
+ }
+}
+
+func NewMaintenanceWindowValueUnknown() MaintenanceWindowValue {
+ return MaintenanceWindowValue{
+ state: attr.ValueStateUnknown,
+ }
+}
+
+func NewMaintenanceWindowValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (MaintenanceWindowValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
+ ctx := context.Background()
+
+ for name, attributeType := range attributeTypes {
+ attribute, ok := attributes[name]
+
+ if !ok {
+ diags.AddError(
+ "Missing MaintenanceWindowValue Attribute Value",
+ "While creating a MaintenanceWindowValue value, a missing attribute value was detected. "+
+ "A MaintenanceWindowValue must contain values for all attributes, even if null or unknown. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("MaintenanceWindowValue Attribute Name (%s) Expected Type: %s, attributes: %v", name, attributeType.String(), attributes),
+ )
+
+ continue
+ }
+
+ if !attributeType.Equal(attribute.Type(ctx)) {
+ diags.AddError(
+ "Invalid MaintenanceWindowValue Attribute Type",
+ "While creating a MaintenanceWindowValue value, an invalid attribute value was detected. "+
+ "A MaintenanceWindowValue must use a matching attribute type for the value. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("MaintenanceWindowValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
+ fmt.Sprintf("MaintenanceWindowValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
+ )
+ }
+ }
+
+ for name := range attributes {
+ _, ok := attributeTypes[name]
+
+ if !ok {
+ diags.AddError(
+ "Extra MaintenanceWindowValue Attribute Value",
+ "While creating a MaintenanceWindowValue value, an extra attribute value was detected. "+
+ "A MaintenanceWindowValue must not contain values beyond the expected attribute types. "+
+ "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
+ fmt.Sprintf("Extra MaintenanceWindowValue Attribute Name: %s", name),
+ )
+ }
+ }
+
+ if diags.HasError() {
+ return NewMaintenanceWindowValueUnknown(), diags
+ }
+
+ dayOfWeekAttribute, ok := attributes["day_of_week"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `day_of_week is missing from object`)
+
+ return NewMaintenanceWindowValueUnknown(), diags
+ }
+
+ dayOfWeekVal, ok := dayOfWeekAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`day_of_week expected to be basetypes.Int64Value, was: %T`, dayOfWeekAttribute))
+ }
+
+ startHourAttribute, ok := attributes["start_hour"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `start_hour is missing from object`)
+
+ return NewMaintenanceWindowValueUnknown(), diags
+ }
+
+ startHourVal, ok := startHourAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`start_hour expected to be basetypes.Int64Value, was: %T`, startHourAttribute))
+ }
+
+ startMinuteAttribute, ok := attributes["start_minute"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `start_minute is missing from object`)
+
+ return NewMaintenanceWindowValueUnknown(), diags
+ }
+
+ startMinuteVal, ok := startMinuteAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`start_minute expected to be basetypes.Int64Value, was: %T`, startMinuteAttribute))
+ }
+
+ if diags.HasError() {
+ return NewMaintenanceWindowValueUnknown(), diags
+ }
+
+ return MaintenanceWindowValue{
+ DayOfWeek: dayOfWeekVal,
+ StartHour: startHourVal,
+ StartMinute: startMinuteVal,
+ state: attr.ValueStateKnown,
+ }, diags
+}
+
+func NewMaintenanceWindowValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) MaintenanceWindowValue {
+ object, diags := NewMaintenanceWindowValue(attributeTypes, attributes)
+
+ if diags.HasError() {
+ // This could potentially be added to the diag package.
+ diagsStrings := make([]string, 0, len(diags))
+
+ for _, diagnostic := range diags {
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "%s | %s | %s",
+ diagnostic.Severity(),
+ diagnostic.Summary(),
+ diagnostic.Detail()))
+ }
+
+ panic("NewMaintenanceWindowValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
+ }
+
+ return object
+}
+
+func (t MaintenanceWindowType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
+ if in.Type() == nil {
+ return NewMaintenanceWindowValueNull(), nil
+ }
+
+ if !in.Type().Equal(t.TerraformType(ctx)) {
+ return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
+ }
+
+ if !in.IsKnown() {
+ return NewMaintenanceWindowValueUnknown(), nil
+ }
+
+ if in.IsNull() {
+ return NewMaintenanceWindowValueNull(), nil
+ }
+
+ attributes := map[string]attr.Value{}
+
+ val := map[string]tftypes.Value{}
+
+ err := in.As(&val)
+
+ if err != nil {
+ return nil, err
+ }
+
+ for k, v := range val {
+ a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
+
+ if err != nil {
+ return nil, err
+ }
+
+ attributes[k] = a
+ }
+
+ return NewMaintenanceWindowValueMust(MaintenanceWindowValue{}.AttributeTypes(ctx), attributes), nil
+}
+
+func (t MaintenanceWindowType) ValueType(ctx context.Context) attr.Value {
+ return MaintenanceWindowValue{}
+}
+
+var _ basetypes.ObjectValuable = MaintenanceWindowValue{}
+
+type MaintenanceWindowValue struct {
+ DayOfWeek basetypes.Int64Value `tfsdk:"day_of_week"`
+ StartHour basetypes.Int64Value `tfsdk:"start_hour"`
+ StartMinute basetypes.Int64Value `tfsdk:"start_minute"`
+ state attr.ValueState
+}
+
+func (v MaintenanceWindowValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+ attrTypes := make(map[string]tftypes.Type, 3)
+
+ var val tftypes.Value
+ var err error
+
+ attrTypes["day_of_week"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["start_hour"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["start_minute"] = basetypes.Int64Type{}.TerraformType(ctx)
+
+ objectType := tftypes.Object{AttributeTypes: attrTypes}
+
+ switch v.state {
+ case attr.ValueStateKnown:
+ vals := make(map[string]tftypes.Value, 3)
+
+ val, err = v.DayOfWeek.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["day_of_week"] = val
+
+ val, err = v.StartHour.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["start_hour"] = val
+
+ val, err = v.StartMinute.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["start_minute"] = val
+
+ if err := tftypes.ValidateValue(objectType, vals); err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ return tftypes.NewValue(objectType, vals), nil
+ case attr.ValueStateNull:
+ return tftypes.NewValue(objectType, nil), nil
+ case attr.ValueStateUnknown:
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
+ default:
+ panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
+ }
+}
+
+func (v MaintenanceWindowValue) IsNull() bool {
+ return v.state == attr.ValueStateNull
+}
+
+func (v MaintenanceWindowValue) IsUnknown() bool {
+ return v.state == attr.ValueStateUnknown
+}
+
+func (v MaintenanceWindowValue) String() string {
+ return "MaintenanceWindowValue"
+}
+
+func (v MaintenanceWindowValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+ var diags diag.Diagnostics
+
+ objVal, diags := types.ObjectValue(
+ map[string]attr.Type{
+ "day_of_week": basetypes.Int64Type{},
+ "start_hour": basetypes.Int64Type{},
+ "start_minute": basetypes.Int64Type{},
+ },
+ map[string]attr.Value{
+ "day_of_week": v.DayOfWeek,
+ "start_hour": v.StartHour,
+ "start_minute": v.StartMinute,
+ })
+
+ return objVal, diags
+}
+
+func (v MaintenanceWindowValue) Equal(o attr.Value) bool {
+ other, ok := o.(MaintenanceWindowValue)
+
+ if !ok {
+ return false
+ }
+
+ if v.state != other.state {
+ return false
+ }
+
+ if v.state != attr.ValueStateKnown {
+ return true
+ }
+
+ if !v.DayOfWeek.Equal(other.DayOfWeek) {
+ return false
+ }
+
+ if !v.StartHour.Equal(other.StartHour) {
+ return false
+ }
+
+ if !v.StartMinute.Equal(other.StartMinute) {
+ return false
+ }
+
+ return true
+}
+
+func (v MaintenanceWindowValue) Type(ctx context.Context) attr.Type {
+ return MaintenanceWindowType{
+ basetypes.ObjectType{
+ AttrTypes: v.AttributeTypes(ctx),
+ },
+ }
+}
+
+func (v MaintenanceWindowValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+ return map[string]attr.Type{
+ "day_of_week": basetypes.Int64Type{},
+ "start_hour": basetypes.Int64Type{},
+ "start_minute": basetypes.Int64Type{},
+ }
+}
+
+func (v MaintenanceWindowValue) ToDBaaSSdkObject(ctx context.Context) (*sys11dbaassdk.MaintenanceWindow, diag.Diagnostics) {
+
+ var dayOfWeek *int
+ dayOfWeek = nil
+ if !v.DayOfWeek.IsNull() && !v.DayOfWeek.IsUnknown() {
+ dayOfWeek = sys11dbaassdk.Int64ToIntPtr(v.DayOfWeek.ValueInt64())
+ }
+
+ var startHour *int
+ startHour = nil
+ if !v.StartHour.IsNull() && !v.StartHour.IsUnknown() {
+ startHour = sys11dbaassdk.Int64ToIntPtr(v.StartHour.ValueInt64())
+ }
+
+ var startMinute *int
+ startMinute = nil
+ if !v.StartMinute.IsNull() && !v.StartMinute.IsUnknown() {
+ startMinute = sys11dbaassdk.Int64ToIntPtr(v.StartMinute.ValueInt64())
+ }
+
+ return &sys11dbaassdk.MaintenanceWindow{
+ DayOfWeek: dayOfWeek,
+ StartHour: startHour,
+ StartMinute: startMinute,
+ }, diag.Diagnostics{}
+}
diff --git a/internal/provider/sys11dbaas_provider.go b/internal/provider/sys11dbaas_provider.go
new file mode 100644
index 0000000..9dcaaf6
--- /dev/null
+++ b/internal/provider/sys11dbaas_provider.go
@@ -0,0 +1,266 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package provider
+
+import (
+ "context"
+ "os"
+ "strconv"
+
+ sys11dbaassdk "github.com/syseleven/sys11dbaas-sdk"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/provider"
+ "github.com/hashicorp/terraform-plugin-framework/provider/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+)
+
+// Ensure Sys11DBaaSProvider satisfies various provider interfaces.
+var _ provider.Provider = &Sys11DBaaSProvider{}
+
+// Sys11DBaaSProvider defines the provider implementation.
+type Sys11DBaaSProvider struct {
+ // version is set to the provider version on release, "dev" when the
+ // provider is built and ran locally, and "test" when running acceptance
+ // testing.
+ version string
+}
+
+// Sys11DBaaSProvider maps provider schema data to a Go type.
+type Sys11DBaaSProviderModel struct {
+ URL types.String `tfsdk:"url"`
+ ApiKey types.String `tfsdk:"api_key"`
+ Project types.String `tfsdk:"project"`
+ Organization types.String `tfsdk:"organization"`
+ WaitForCreation types.Bool `tfsdk:"wait_for_creation"`
+}
+
+type sys11DBaaSProviderData struct {
+ client *sys11dbaassdk.Client
+ project types.String `tfsdk:"project"`
+ organization types.String `tfsdk:"organization"`
+ waitForCreation types.Bool `tfsdk:"wait_for_creation"`
+}
+
+func (p *Sys11DBaaSProvider) Metadata(ctx context.Context, req provider.MetadataRequest, resp *provider.MetadataResponse) {
+ resp.TypeName = "sys11dbaas"
+ resp.Version = p.version
+}
+
+// Schema defines the provider-level schema for configuration data.
+func (p *Sys11DBaaSProvider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ Attributes: map[string]schema.Attribute{
+ "url": schema.StringAttribute{
+ Required: true,
+ },
+ "api_key": schema.StringAttribute{
+ Required: true,
+ },
+ "organization": schema.StringAttribute{
+ Required: true,
+ },
+ "project": schema.StringAttribute{
+ Required: true,
+ },
+ "wait_for_creation": schema.BoolAttribute{
+ Required: true,
+ Optional: false,
+ Description: "Wait until databases are fully deployed and usable",
+ },
+ },
+ }
+}
+
+func (p *Sys11DBaaSProvider) Configure(ctx context.Context, req provider.ConfigureRequest, resp *provider.ConfigureResponse) {
+ tflog.Info(ctx, "Configuring Sys11DBaaS client")
+
+ // Retrieve provider data from configuration
+ var config Sys11DBaaSProviderModel
+ diags := req.Config.Get(ctx, &config)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // If practitioner provided a configuration value for any of the
+ // attributes, it must be a known value.
+
+ if config.URL.IsUnknown() {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("url"),
+ "Unknown Sys11DBaaS API Url",
+ "The provider cannot create the Sys11DBaaS API client as there is an unknown configuration value for the Sys11DBaaS API url. "+
+ "Either target apply the source of the value first, set the value statically in the configuration, or use the SYS11DBAAS_URL environment variable.",
+ )
+ }
+
+ if config.ApiKey.IsUnknown() {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("api_key"),
+ "Unknown Sys11DBaaS API ApiKey",
+ "The provider cannot create the Sys11DBaaS API client as there is an unknown configuration value for the Sys11DBaaS API username. "+
+ "Either target apply the source of the value first, set the value statically in the configuration, or use the SYS11DBAAS_USERNAME environment variable.",
+ )
+ }
+
+ if config.Organization.IsUnknown() {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("organization"),
+ "Unknown Sys11DBaaS API org",
+ "The provider cannot create the Sys11DBaaS API client as there is an unknown configuration value for the Sys11DBaaS API username. "+
+ "Either target apply the source of the value first, set the value statically in the configuration, or use the SYS11DBAAS_USERNAME environment variable.",
+ )
+ }
+
+ if config.Project.IsUnknown() {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("project"),
+ "Unknown Sys11DBaaS API project",
+ "The provider cannot create the Sys11DBaaS API client as there is an unknown configuration value for the Sys11DBaaS API username. "+
+ "Either target apply the source of the value first, set the value statically in the configuration, or use the SYS11DBAAS_USERNAME environment variable.",
+ )
+ }
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Default values to environment variables, but override
+ // with Terraform configuration value if set.
+
+ url := os.Getenv("SYS11DBAAS_URL")
+ apikey := os.Getenv("SYS11DBAAS_API_KEY")
+ organization := os.Getenv("SYS11DBAAS_ORGANIZATION")
+ project := os.Getenv("SYS11DBAAS_PROJECT")
+ waitForCreation, _ := strconv.ParseBool(os.Getenv("SYS11DBAAS_WAIT_FOR_CREATION"))
+
+ if !config.URL.IsNull() {
+ url = config.URL.ValueString()
+ }
+
+ if !config.ApiKey.IsNull() {
+ apikey = config.ApiKey.ValueString()
+ }
+
+ if !config.Organization.IsNull() {
+ organization = config.Organization.ValueString()
+ }
+
+ if !config.Project.IsNull() {
+ project = config.Project.ValueString()
+ }
+
+ if !config.WaitForCreation.IsNull() {
+ waitForCreation = config.WaitForCreation.ValueBool()
+ }
+
+ // If any of the expected configurations are missing, return
+ // errors with provider-specific guidance.
+
+ if url == "" {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("url"),
+ "Missing Sys11DBaaS API Url",
+ "The provider cannot create the Sys11DBaaS API client as there is a missing or empty value for the Sys11DBaaS API url. "+
+ "Set the url value in the configuration or use the SYS11DBAAS_URL environment variable. "+
+ "If either is already set, ensure the value is not empty.",
+ )
+ }
+
+ if apikey == "" {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("api_key"),
+ "Missing Sys11DBaaS API ApiKey",
+ "The provider cannot create the Sys11DBaaS API client as there is a missing or empty value for the Sys11DBaaS API username. "+
+ "Set the username value in the configuration or use the SYS11DBAAS_USERNAME environment variable. "+
+ "If either is already set, ensure the value is not empty.",
+ )
+ }
+
+ if organization == "" {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("organization"),
+ "Missing Sys11DBaaS API ApiKey",
+ "The provider cannot create the Sys11DBaaS API client as there is a missing or empty value for the Sys11DBaaS API username. "+
+ "Set the username value in the configuration or use the SYS11DBAAS_USERNAME environment variable. "+
+ "If either is already set, ensure the value is not empty.",
+ )
+ }
+
+ if project == "" {
+ resp.Diagnostics.AddAttributeError(
+ path.Root("project"),
+ "Missing Sys11DBaaS API ApiKey",
+ "The provider cannot create the Sys11DBaaS API client as there is a missing or empty value for the Sys11DBaaS API username. "+
+ "Set the username value in the configuration or use the SYS11DBAAS_USERNAME environment variable. "+
+ "If either is already set, ensure the value is not empty.",
+ )
+ }
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx = tflog.SetField(ctx, "sys11dbaas_url", url)
+ ctx = tflog.SetField(ctx, "sys11dbaas_api_key", apikey)
+ ctx = tflog.SetField(ctx, "sys11dbaas_organization", organization)
+ ctx = tflog.SetField(ctx, "sys11dbaas_project", project)
+ ctx = tflog.SetField(ctx, "sys11dbaas_wait_for_creation", waitForCreation)
+ ctx = tflog.MaskFieldValuesWithFieldKeys(ctx, "sys11dbaas_api_key")
+
+ tflog.Debug(ctx, "Creating Sys11DBaaS client")
+
+ agent := "sys11dbaas-terraform/" + p.version
+
+ // Create a new Sys11DBaaS client using the configuration values
+ client, err := sys11dbaassdk.NewClient(url, apikey, agent, 60, sys11dbaassdk.AuthModeApiKey)
+ if err != nil {
+ resp.Diagnostics.AddError(
+ "Unable to Create Sys11DBaaS API Client",
+ "An unexpected error occurred when creating the Sys11DBaaS API client. "+
+ "If the error is not clear, please contact the provider developers.\n\n"+
+ "Sys11DBaaS Client Error: "+err.Error(),
+ )
+ return
+ }
+
+ // Make the Sys11DBaaS client available during DataSource and Resource
+ // type Configure methods.
+ resp.DataSourceData = &sys11DBaaSProviderData{
+ client: client,
+ project: config.Project,
+ organization: config.Organization,
+ waitForCreation: config.WaitForCreation,
+ }
+ resp.ResourceData = &sys11DBaaSProviderData{
+ client: client,
+ project: config.Project,
+ organization: config.Organization,
+ waitForCreation: config.WaitForCreation,
+ }
+
+ tflog.Info(ctx, "Configured Sys11DBaaS client", map[string]any{"success": true})
+}
+
+func (p *Sys11DBaaSProvider) Resources(ctx context.Context) []func() resource.Resource {
+ return []func() resource.Resource{
+ NewDatabaseResource,
+ }
+}
+
+func (p *Sys11DBaaSProvider) DataSources(ctx context.Context) []func() datasource.DataSource {
+ return []func() datasource.DataSource{}
+}
+
+func New(version string) func() provider.Provider {
+ return func() provider.Provider {
+ return &Sys11DBaaSProvider{
+ version: version,
+ }
+ }
+}
diff --git a/main.go b/main.go
new file mode 100644
index 0000000..499b09d
--- /dev/null
+++ b/main.go
@@ -0,0 +1,52 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package main
+
+import (
+ "context"
+ "flag"
+ "log"
+
+ "terraform-provider-sys11dbaas/internal/provider"
+
+ "github.com/hashicorp/terraform-plugin-framework/providerserver"
+)
+
+// Run "go generate" to format example terraform files and generate the docs for the registry/website
+
+// If you do not have terraform installed, you can remove the formatting command, but its suggested to
+// ensure the documentation is formatted properly.
+//go:generate terraform fmt -recursive ./examples/
+
+// Run the docs generation tool, check its repository for more information on how it works and how docs
+// can be customized.
+//go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs
+
+var (
+ // these will be set by the goreleaser configuration
+ // to appropriate values for the compiled binary.
+ version string = "dev"
+
+ // goreleaser can pass other information to the main package, such as the specific commit
+ // https://goreleaser.com/cookbooks/using-main.version/
+)
+
+func main() {
+ var debug bool
+
+ flag.BoolVar(&debug, "debug", false, "set to true to run the provider with support for debuggers like delve")
+ flag.Parse()
+
+ opts := providerserver.ServeOpts{
+ // TODO: Update this string with the published name of your provider.
+ Address: "github.com/syseleven/terraform-provider-sys11dbaas",
+ Debug: debug,
+ }
+
+ err := providerserver.Serve(context.Background(), provider.New(version), opts)
+
+ if err != nil {
+ log.Fatal(err.Error())
+ }
+}
diff --git a/provider-spec.json b/provider-spec.json
new file mode 100644
index 0000000..aeeee23
--- /dev/null
+++ b/provider-spec.json
@@ -0,0 +1,447 @@
+{
+ "provider": {
+ "name": "sys11dbaas"
+ },
+ "resources": [
+ {
+ "name": "database",
+ "schema": {
+ "attributes": [
+ {
+ "name": "application_config",
+ "single_nested": {
+ "computed_optional_required": "required",
+ "attributes": [
+ {
+ "name": "instances",
+ "int64": {
+ "computed_optional_required": "required",
+ "description": "How many nodes the cluster should have",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.AtMost(5)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "password",
+ "string": {
+ "computed_optional_required": "computed_optional",
+ "description": "The password for the admin user",
+ "sensitive": true,
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ }
+ ],
+ "schema_definition": "stringvalidator.LengthAtLeast(16)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "recovery",
+ "single_nested": {
+ "computed_optional_required": "computed_optional",
+ "attributes": [
+ {
+ "name": "exclusive",
+ "bool": {
+ "computed_optional_required": "computed_optional"
+ }
+ },
+ {
+ "name": "source",
+ "string": {
+ "computed_optional_required": "computed_optional"
+ }
+ },
+ {
+ "name": "target_lsn",
+ "string": {
+ "computed_optional_required": "computed_optional"
+ }
+ },
+ {
+ "name": "target_name",
+ "string": {
+ "computed_optional_required": "computed_optional"
+ }
+ },
+ {
+ "name": "target_time",
+ "string": {
+ "computed_optional_required": "computed_optional"
+ }
+ },
+ {
+ "name": "target_xid",
+ "string": {
+ "computed_optional_required": "computed_optional"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "scheduled_backups",
+ "single_nested": {
+ "computed_optional_required": "computed_optional",
+ "attributes": [
+ {
+ "name": "retention",
+ "int64": {
+ "computed_optional_required": "computed_optional",
+ "default": {
+ "static": 7
+ },
+ "description": "How long Backups should be stored",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(7, 90)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "schedule",
+ "single_nested": {
+ "computed_optional_required": "computed_optional",
+ "attributes": [
+ {
+ "name": "hour",
+ "int64": {
+ "computed_optional_required": "computed_optional",
+ "description": "The hour when the full backup should start. If this value is omitted, a random hour between 1am and 5am will be generated.",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(0, 23)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "minute",
+ "int64": {
+ "computed_optional_required": "computed_optional",
+ "description": "The minute when the full backup should start. If this value is omitted, a random minute will be generated.",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(0, 59)"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "description": "The schedules for the backup policy."
+ }
+ }
+ ],
+ "description": "The scheduled backup policy for the database."
+ }
+ },
+ {
+ "name": "type",
+ "string": {
+ "computed_optional_required": "required"
+ }
+ },
+ {
+ "name": "version",
+ "string": {
+ "computed_optional_required": "required",
+ "description": "minor version of postgresql"
+ }
+ },
+ {
+ "name": "hostname",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "The dns name of the database in the format uuid.postgresql.syseleven.services."
+ }
+ },
+ {
+ "name": "ip_address",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "The public IP address of the database. It will be pending if no address has been assigned yet."
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "description",
+ "string": {
+ "computed_optional_required": "computed_optional",
+ "default": {
+ "static": ""
+ },
+ "description": "fulltext description of the database",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ }
+ ],
+ "schema_definition": "stringvalidator.LengthBetween(0, 128)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "name",
+ "string": {
+ "computed_optional_required": "required",
+ "description": "The name of the database.",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ }
+ ],
+ "schema_definition": "stringvalidator.LengthBetween(1, 64)"
+ }
+ },
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "regexp"
+ },
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ }
+ ],
+ "schema_definition": "stringvalidator.RegexMatches(regexp.MustCompile(\"^(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])?$\"), \"\")"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "service_config",
+ "single_nested": {
+ "computed_optional_required": "required",
+ "attributes": [
+ {
+ "name": "disksize",
+ "int64": {
+ "computed_optional_required": "required",
+ "description": "Disksize in GB",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(5, 500)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "flavor",
+ "string": {
+ "computed_optional_required": "required",
+ "description": "vm flavor to use"
+ }
+ },
+ {
+ "name": "maintenance_window",
+ "single_nested": {
+ "computed_optional_required": "computed_optional",
+ "attributes": [
+ {
+ "name": "day_of_week",
+ "int64": {
+ "computed_optional_required": "computed_optional",
+ "description": "Day of week as a cron time (0=Sun, 1=Mon, ..., 7=Sun). If omitted, a random day will be used.",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(0, 6)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "start_hour",
+ "int64": {
+ "computed_optional_required": "computed_optional",
+ "description": "Hour when the maintenance window starts. If omitted, a random hour between 20 and 4 will be used.",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(0, 23)"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "start_minute",
+ "int64": {
+ "computed_optional_required": "computed_optional",
+ "description": "Minute when the maintenance window starts. If omitted, a random minute will be used.",
+ "validators": [
+ {
+ "custom": {
+ "imports": [
+ {
+ "path": "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ }
+ ],
+ "schema_definition": "int64validator.Between(0, 59)"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "description": "The maintenance window. This will be a time window for updates and maintenance. If omitted, a random window will be generated."
+ }
+ },
+ {
+ "name": "region",
+ "string": {
+ "computed_optional_required": "required",
+ "description": "the region for the database"
+ }
+ },
+ {
+ "name": "remote_ips",
+ "list": {
+ "computed_optional_required": "computed_optional",
+ "element_type": {
+ "string": {}
+ },
+ "description": "List of IP addresses, that should be allowed to connect to the database"
+ }
+ },
+ {
+ "name": "type",
+ "string": {
+ "computed_optional_required": "required"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "name": "created_at",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "the date when the database was created"
+ }
+ },
+ {
+ "name": "created_by",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "the initial creator of the database"
+ }
+ },
+ {
+ "name": "last_modified_at",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "the date when the database was last modified"
+ }
+ },
+ {
+ "name": "last_modified_by",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "the user who last changed of the database"
+ }
+ },
+ {
+ "name": "phase",
+ "string": {
+ "computed_optional_required": "computed"
+ }
+ },
+ {
+ "name": "resource_status",
+ "string": {
+ "computed_optional_required": "computed"
+ }
+ },
+ {
+ "name": "status",
+ "string": {
+ "computed_optional_required": "computed"
+ }
+ },
+ {
+ "name": "uuid",
+ "string": {
+ "computed_optional_required": "computed",
+ "description": "The UUID of the database."
+ }
+ },
+ {
+ "name": "db_uuid",
+ "string": {
+ "computed_optional_required": "computed_optional"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "version": "0.1"
+}