From ab68bdd9ce4330802382b1d3c6aaed85515bdc23 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev <125960514+fivetran-aleksandrboldyrev@users.noreply.github.com> Date: Fri, 3 Nov 2023 09:18:50 +0100 Subject: [PATCH] Add ADLS destination support (#216) --- CHANGELOG.md | 8 +++ docs/data-sources/connector.md | 44 +++++++++++++ docs/data-sources/destination.md | 5 ++ docs/resources/connector.md | 44 +++++++++++++ docs/resources/destination.md | 5 ++ fivetran/data_source_destination.go | 33 +++++++++- fivetran/resource_destination.go | 61 +++++++++++++++++++ fivetran/resource_external_logging_test.go | 3 +- .../tests/mock/datasource_destination_test.go | 5 ++ .../tests/mock/resource_destination_test.go | 18 +++++- go.mod | 2 +- go.sum | 2 + 12 files changed, 226 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d1b19c5..bbc72ba9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased](https://github.com/fivetran/terraform-provider-fivetran/compare/v1.1.1...HEAD) +## Added +- New fields have been added to the recourse `fivetran_destination.config` and datasource `fivetran_destination` to support Azure Data Lake Storage: + - Added field `storage_account_name` + - Added field `container_name` + - Added field `tenant_id` + - Added field `client_id` + - Added field `secret_value` + ## [1.1.1](https://github.com/fivetran/terraform-provider-fivetran/compare/v1.1.0...v1.1.1) ## Added diff --git a/docs/data-sources/connector.md b/docs/data-sources/connector.md index fd73b470..825cc45a 100644 --- a/docs/data-sources/connector.md +++ b/docs/data-sources/connector.md @@ -65,11 +65,13 @@ Read-Only: - Service `adobe_analytics_data_feed`: Azure Blob Storage public key - `access_key` (String) Field usage depends on `service` value: - Service `gainsight_customer_success`: The access key for API authentication. + - Service `gongio`: Your Gongio Access key. - Service `retailnext`: Your RetailNext access key. - `access_key_id` (String) Field usage depends on `service` value: - Service `appsflyer`: Your AWS access key ID. - Service `s3`: Access Key ID - `access_key_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `gongio`: Your Gongio Access Key Secret. - Service `s3`: Access Key Secret - `access_token` (String, Sensitive) Field usage depends on `service` value: - Service `big_commerce`: API access token of your store. @@ -211,6 +213,7 @@ Read-Only: - Service `oracle_sap_hva_netweaver`: Require TLS. - Service `postgres`: Require TLS through Tunnel - Service `postgres_rds`: Require TLS through Tunnel + - Service `sap_hana_db`: - Service `sql_server`: Require TLS. - Service `sql_server_hva`: Require TLS. - Service `sql_server_rds`: Require TLS. @@ -235,7 +238,9 @@ Read-Only: - Service `clickup`: Your ClickUp API key. - Service `confluent_cloud`: API Key - Service `coupa`: Your Coupa API key. + - Service `dcl_logistics`: Your DCL Logistics API key. - Service `delighted`: API Key for your Delighted account + - Service `destini`: Your Destini API Key. - Service `easypost`: Your EasyPost API Key. - Service `everhour`: Your Everhour API Token. - Service `freshdesk`: Your Freshdesk API Key. @@ -250,6 +255,7 @@ Read-Only: - Service `iterable`: Your Iterable API key. - Service `klaviyo`: Your Klaviyo API key. - Service `lever`: Your Lever API key. + - Service `luma`: Your Luma API key. - Service `mailgun`: Your Mailgun API key. - Service `mandrill`: Your Mandrill API key. - Service `ortto`: Your Ortto API key. @@ -259,8 +265,10 @@ Read-Only: - Service `recurly`: The Recurly API key. - Service `rootly`: Your Rootly API key. - Service `sailthru`: The Sailthru API key. + - Service `salsify`: Your Salsify API Key. - Service `sendgrid`: The SendGrid API key. - Service `sendinblue`: Your Sendinblue API key. + - Service `simplesat`: Your Simplesat API key. - Service `sonarqube`: Your Sonarqube API key. - Service `squarespace`: Your Squarespace API key. - Service `stackadapt`: Your StackAdapt API key. @@ -383,6 +391,7 @@ Read-Only: - Service `webhooks`: The authentication mechanism you want to use - `auth_mode` (String) Field usage depends on `service` value: - Service `anaplan`: The Anaplan authentication method. + - Service `concur`: The Authentication Mode used by SAP Concur. It can be PasswordGrant or CompanyLevel auth mode - Service `github`: Authorization type. - `auth_type` (String) Field usage depends on `service` value: - Service `airtable`: Type of authentication being used by connector @@ -403,6 +412,7 @@ Read-Only: - `base_url` (String) Field usage depends on `service` value: - Service `brex`: Your Brex Base URL - Service `financial_force`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. + - Service `gongio`: Your Gong API Base URL. - Service `ironclad`: Your Ironclad base url. - Service `mailgun`: Your Mailgun base URL. - Service `ortto`: Your Ortto base URL. Possible values: `api`, `api.au`, `api.eu`. @@ -444,6 +454,8 @@ Read-Only: - `click_attribution_window` (String) Field usage depends on `service` value: - Service `facebook`: Time period to attribute conversions based on clicks. [Possible click_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#clickattributionwindow). - Service `pinterest_ads`: The number of days to use as the conversion attribution window for a 'click' action. +- `client` (String) Field usage depends on `service` value: + - Service `sap_hana_db`: - `client_cert` (String, Sensitive) Field usage depends on `service` value: - Service `apache_kafka`: Kafka client certificate. - Service `heroku_kafka`: Heroku Kafka client certificate. Required for `TLS` security protocol. @@ -475,6 +487,7 @@ Read-Only: - `client_key` (String, Sensitive) Field usage depends on `service` value: - Service `appfigures`: Your Appfigures Client Key. - `client_name` (String) Field usage depends on `service` value: + - Service `destini`: Your Destini Client Name. - Service `medallia`: Medallia company name - `client_secret` (String, Sensitive) Field usage depends on `service` value: - Service `adobe_analytics`: Client Secret from the Service Account (JWT) credentials of your Adobe Project. @@ -504,6 +517,11 @@ Read-Only: - Service `sage_intacct`: Company ID - `company_key` (String, Sensitive) Field usage depends on `service` value: - Service `khoros_care`: Your Khoros Care companyKey. + - Service `upland`: Your Upland Software Company Key. +- `company_request_token` (String, Sensitive) Field usage depends on `service` value: + - Service `concur`: The SAP Concur Company Request Token +- `company_uuid` (String) Field usage depends on `service` value: + - Service `concur`: The SAP Concur Company UUID - `compression` (String) Field usage depends on `service` value: - Service `azure_blob_storage`: The secrets that should be passed to the function at runtime. - Service `box`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. @@ -584,6 +602,7 @@ Read-Only: - Service `postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - Service `postgres_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - Service `s3`: Connection method. Default value: `Directly`. + - Service `sap_hana_db`: - Service `snowflake_db`: Directly or Private Link - Service `sql_server`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - Service `sql_server_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. @@ -680,6 +699,7 @@ Read-Only: - Service `oracle_sap_hva`: The database name. - Service `postgres`: The database name. - Service `postgres_rds`: The database name. + - Service `sap_hana_db`: - Service `snowflake_db`: The database name: Snowflake - Service `sql_server`: The database name. - Service `sql_server_hva`: The database name. @@ -898,6 +918,7 @@ Read-Only: - Service `oracle_sap_hva_netweaver`: DB instance host or IP address. - Service `postgres`: DB instance host or IP address. - Service `postgres_rds`: DB instance host or IP address. + - Service `sap_hana_db`: - Service `sftp`: SFTP host address. - Service `snowflake_db`: Host name - Service `splunk`: The Splunk service host address. @@ -1148,6 +1169,7 @@ Read-Only: - Service `salesforce`: - Service `salesforce_sandbox`: - Service `sap_business_by_design`: The SAP Business ByDesign password. + - Service `sap_hana_db`: - Service `scorm`: Your Scorm Secret Key. - Service `servicenow`: Your account password. - Service `sftp`: SFTP password. @@ -1161,11 +1183,17 @@ Read-Only: - Service `sql_server_rds`: The user's password. - Service `teamwork`: Your Teamwork password. - Service `the_trade_desk`: The Trade Desk password. It is a part of the login credentials. + - Service `upland`: Your Upland Software Password. - Service `when_i_work`: Your When I Work password. + - Service `wherefour`: Your Wherefour password. - Service `workday`: Workday password. - Service `workday_hcm`: Workday password. - `pat` (String, Sensitive) Field usage depends on `service` value: - Service `github`: The `Personal Access Token` generated in Github. +- `pat_name` (String) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source PAT Name. +- `pat_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source PAT Secret. - `path` (String) Field usage depends on `service` value: - Service `jira`: A URL subdirectory where the Jira instance is working. - `pattern` (String) Field usage depends on `service` value: @@ -1241,6 +1269,7 @@ Read-Only: - Service `oracle_sap_hva`: The port number. - Service `postgres`: The port number. - Service `postgres_rds`: The port number. + - Service `sap_hana_db`: - Service `sftp`: SFTP port. - Service `snowflake_db`: The Snowflake optional port number. - Service `splunk`: The Splunk service host port. @@ -1328,6 +1357,7 @@ Read-Only: - Service `oracle_sap_hva`: Public Key - Service `postgres`: Public Key - Service `postgres_rds`: Public Key + - Service `sap_hana_db`: - Service `sftp`: Public Key - Service `sql_server`: Public Key. - Service `sql_server_hva`: Public Key. @@ -1522,6 +1552,8 @@ Read-Only: - Service `sage_intacct`: Your Sender ID - `sender_password` (String, Sensitive) Field usage depends on `service` value: - Service `sage_intacct`: Your Sender Password +- `server_address` (String) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source server address. - `server_url` (String) Field usage depends on `service` value: - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud Instance URL. - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud Instance URL. @@ -1573,6 +1605,7 @@ Read-Only: - Service `salesforce_commerce_cloud`: The name of the site from which you want to sync data. - `site_name` (String) Field usage depends on `service` value: - Service `microsoft_lists`: The Name of the SharePoint site. The Site Name is the `name` field in the Graph API response for sites. + - Service `tableau_source`: Your Tableau Source site name. - `site_urls` (Set of String) Field usage depends on `service` value: - Service `google_search_console`: Specific Site URLs to sync. Must be populated if `sync_mode` is set to `SpecificSites`. - `skip_after` (String) Field usage depends on `service` value: @@ -1626,8 +1659,10 @@ Read-Only: - Service `posthog`: Your PostHog data region (`app` or `eu`). - Service `recurly`: Your company's Recurly subdomain. - Service `salesforce_marketing_cloud`: Your Salesforce Marketing Cloud subdomain. + - Service `salsify`: Your Salsify Organization ID. - Service `sonarqube`: Your Sonarqube subdomain. - Service `tempo`: Your Tempo subdomain. + - Service `upland`: Your Upland Software subDomain. - Service `workable`: Your Workable Subdomain. - Service `wrike`: Your Wrike Subdomain. - `subdomain` (String) Field usage depends on `service` value: @@ -1710,6 +1745,8 @@ Read-Only: - Service `confluent_cloud`: Kafka sync type. Unpacked messages must be valid JSON. - Service `heroku_kafka`: Heroku Kafka sync type. Unpacked messages must be valid JSON. - Service `segment`: The Segment connector sync type. +- `sysnr` (String) Field usage depends on `service` value: + - Service `sap_hana_db`: - `table_name` (String) Field usage depends on `service` value: - Service `airtable`: Name of table in Airtable - `tde_certificate` (String, Sensitive) Field usage depends on `service` value: @@ -1823,6 +1860,7 @@ Read-Only: - Service `oracle_sap_hva_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `postgres_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sap_hana_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `sftp`: Tunnel host address, specify only to connect via SSH tunnel. - Service `sql_server`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `sql_server_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). @@ -1863,6 +1901,7 @@ Read-Only: - Service `oracle_sap_hva_netweaver`: SSH port, specify only to connect via an SSH tunnel. - Service `postgres`: SSH port, specify only to connect via an SSH tunnel. - Service `postgres_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `sap_hana_db`: SSH port, specify only to connect via an SSH tunnel. - Service `sftp`: Tunnel port, specify only to connect via SSH tunnel. - Service `sql_server`: SSH port, specify only to connect via an SSH tunnel. - Service `sql_server_hva`: SSH port, specify only to connect via an SSH tunnel. @@ -1903,6 +1942,7 @@ Read-Only: - Service `oracle_sap_hva_netweaver`: SSH user, specify only to connect via an SSH tunnel. - Service `postgres`: SSH user, specify only to connect via an SSH tunnel. - Service `postgres_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `sap_hana_db`: SSH user, specify only to connect via an SSH tunnel. - Service `sftp`: Tunnel user, specify only to connect via SSH tunnel. - Service `sql_server`: SSH user, specify only to connect via an SSH tunnel. - Service `sql_server_hva`: SSH user, specify only to connect via an SSH tunnel. @@ -2004,6 +2044,7 @@ Read-Only: - Service `outbrain`: The username or email of the Outbrain user. - Service `postgres`: The user name. - Service `postgres_rds`: The user name. + - Service `sap_hana_db`: - Service `sftp`: SFTP user. - Service `snowflake_db`: The Snowflake username. - Service `splunk`: The Splunk username. @@ -2026,6 +2067,7 @@ Read-Only: - Service `cin7`: Your Cin7 API Username. - Service `collibra`: Your collibra username. - Service `concur`: The SAP Concur username. + - Service `dcl_logistics`: Your DCL Logistics username. - Service `financial_force`: - Service `github`: `Login` of your GitHub profile. - Service `gladly`: Your Gladly Username. @@ -2049,7 +2091,9 @@ Read-Only: - Service `shiphero`: Your ShipHero username. - Service `shipstation`: Your ShipStation username. - Service `teamwork`: Your Teamwork username. + - Service `upland`: Your Upland Software Username. - Service `when_i_work`: Your When I Work username. + - Service `wherefour`: Your Wherefour username. - Service `workday_hcm`: Username of your Workday Integration System User account - `view_attribution_window` (String) Field usage depends on `service` value: - Service `facebook`: Time period to attribute conversions based on views. [Possible view_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#viewattributionwindow). diff --git a/docs/data-sources/destination.md b/docs/data-sources/destination.md index 9e85c600..67934b55 100644 --- a/docs/data-sources/destination.md +++ b/docs/data-sources/destination.md @@ -42,9 +42,11 @@ Optional: - `auth_type` (String) Authentication type. Default value: `PASSWORD`. - `bucket` (String) Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. - `catalog` (String) Catalog name +- `client_id` (String) ClientId of your Azure Data Lake Storage - `cluster_id` (String) Cluster ID. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. - `cluster_region` (String) Cluster region. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. - `connection_type` (String) Connection method. Default value: `Directly`. +- `container_name` (String) Container Name of your Azure Data Lake Storage - `create_external_tables` (String) Whether to create external tables - `data_set_location` (String) Data location. Datasets will reside in this location. - `database` (String) Database name @@ -64,7 +66,10 @@ Optional: - `role` (String) The group role that you would like to assign this new user to. Supported group roles: ‘Destination Administrator‘, ‘Destination Reviewer‘, ‘Destination Analyst‘, ‘Connector Creator‘, or a custom destination role - `role_arn` (String, Sensitive) Role ARN with Redshift permissions. Required if authentication type is `IAM`. - `secret_key` (String, Sensitive) Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. +- `secret_value` (String, Sensitive) Secret Value of your Azure Data Lake Storage - `server_host_name` (String) Server name +- `storage_account_name` (String) Storage Account Name of your Azure Data Lake Storage +- `tenant_id` (String) TenantId of your Azure Data Lake Storage - `tunnel_host` (String) SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - `tunnel_port` (String) SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - `tunnel_user` (String) SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. diff --git a/docs/resources/connector.md b/docs/resources/connector.md index 546f4411..998be0e3 100644 --- a/docs/resources/connector.md +++ b/docs/resources/connector.md @@ -140,11 +140,13 @@ Optional: - Service `adobe_analytics_data_feed`: Azure Blob Storage public key - `access_key` (String) Field usage depends on `service` value: - Service `gainsight_customer_success`: The access key for API authentication. + - Service `gongio`: Your Gongio Access key. - Service `retailnext`: Your RetailNext access key. - `access_key_id` (String) Field usage depends on `service` value: - Service `appsflyer`: Your AWS access key ID. - Service `s3`: Access Key ID - `access_key_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `gongio`: Your Gongio Access Key Secret. - Service `s3`: Access Key Secret - `access_token` (String, Sensitive) Field usage depends on `service` value: - Service `big_commerce`: API access token of your store. @@ -286,6 +288,7 @@ Optional: - Service `oracle_sap_hva_netweaver`: Require TLS. - Service `postgres`: Require TLS through Tunnel - Service `postgres_rds`: Require TLS through Tunnel + - Service `sap_hana_db`: - Service `sql_server`: Require TLS. - Service `sql_server_hva`: Require TLS. - Service `sql_server_rds`: Require TLS. @@ -310,7 +313,9 @@ Optional: - Service `clickup`: Your ClickUp API key. - Service `confluent_cloud`: API Key - Service `coupa`: Your Coupa API key. + - Service `dcl_logistics`: Your DCL Logistics API key. - Service `delighted`: API Key for your Delighted account + - Service `destini`: Your Destini API Key. - Service `easypost`: Your EasyPost API Key. - Service `everhour`: Your Everhour API Token. - Service `freshdesk`: Your Freshdesk API Key. @@ -325,6 +330,7 @@ Optional: - Service `iterable`: Your Iterable API key. - Service `klaviyo`: Your Klaviyo API key. - Service `lever`: Your Lever API key. + - Service `luma`: Your Luma API key. - Service `mailgun`: Your Mailgun API key. - Service `mandrill`: Your Mandrill API key. - Service `ortto`: Your Ortto API key. @@ -334,8 +340,10 @@ Optional: - Service `recurly`: The Recurly API key. - Service `rootly`: Your Rootly API key. - Service `sailthru`: The Sailthru API key. + - Service `salsify`: Your Salsify API Key. - Service `sendgrid`: The SendGrid API key. - Service `sendinblue`: Your Sendinblue API key. + - Service `simplesat`: Your Simplesat API key. - Service `sonarqube`: Your Sonarqube API key. - Service `squarespace`: Your Squarespace API key. - Service `stackadapt`: Your StackAdapt API key. @@ -458,6 +466,7 @@ Optional: - Service `webhooks`: The authentication mechanism you want to use - `auth_mode` (String) Field usage depends on `service` value: - Service `anaplan`: The Anaplan authentication method. + - Service `concur`: The Authentication Mode used by SAP Concur. It can be PasswordGrant or CompanyLevel auth mode - Service `github`: Authorization type. - `auth_type` (String) Field usage depends on `service` value: - Service `airtable`: Type of authentication being used by connector @@ -477,6 +486,7 @@ Optional: - `base_url` (String) Field usage depends on `service` value: - Service `brex`: Your Brex Base URL - Service `financial_force`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. + - Service `gongio`: Your Gong API Base URL. - Service `ironclad`: Your Ironclad base url. - Service `mailgun`: Your Mailgun base URL. - Service `ortto`: Your Ortto base URL. Possible values: `api`, `api.au`, `api.eu`. @@ -518,6 +528,8 @@ Optional: - `click_attribution_window` (String) Field usage depends on `service` value: - Service `facebook`: Time period to attribute conversions based on clicks. [Possible click_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#clickattributionwindow). - Service `pinterest_ads`: The number of days to use as the conversion attribution window for a 'click' action. +- `client` (String) Field usage depends on `service` value: + - Service `sap_hana_db`: - `client_cert` (String, Sensitive) Field usage depends on `service` value: - Service `apache_kafka`: Kafka client certificate. - Service `heroku_kafka`: Heroku Kafka client certificate. Required for `TLS` security protocol. @@ -549,6 +561,7 @@ Optional: - `client_key` (String, Sensitive) Field usage depends on `service` value: - Service `appfigures`: Your Appfigures Client Key. - `client_name` (String) Field usage depends on `service` value: + - Service `destini`: Your Destini Client Name. - Service `medallia`: Medallia company name - `client_secret` (String, Sensitive) Field usage depends on `service` value: - Service `adobe_analytics`: Client Secret from the Service Account (JWT) credentials of your Adobe Project. @@ -578,6 +591,11 @@ Optional: - Service `sage_intacct`: Company ID - `company_key` (String, Sensitive) Field usage depends on `service` value: - Service `khoros_care`: Your Khoros Care companyKey. + - Service `upland`: Your Upland Software Company Key. +- `company_request_token` (String, Sensitive) Field usage depends on `service` value: + - Service `concur`: The SAP Concur Company Request Token +- `company_uuid` (String) Field usage depends on `service` value: + - Service `concur`: The SAP Concur Company UUID - `compression` (String) Field usage depends on `service` value: - Service `azure_blob_storage`: The secrets that should be passed to the function at runtime. - Service `box`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. @@ -658,6 +676,7 @@ Optional: - Service `postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - Service `postgres_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - Service `s3`: Connection method. Default value: `Directly`. + - Service `sap_hana_db`: - Service `snowflake_db`: Directly or Private Link - Service `sql_server`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - Service `sql_server_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. @@ -754,6 +773,7 @@ Optional: - Service `oracle_sap_hva`: The database name. - Service `postgres`: The database name. - Service `postgres_rds`: The database name. + - Service `sap_hana_db`: - Service `snowflake_db`: The database name: Snowflake - Service `sql_server`: The database name. - Service `sql_server_hva`: The database name. @@ -972,6 +992,7 @@ Optional: - Service `oracle_sap_hva_netweaver`: DB instance host or IP address. - Service `postgres`: DB instance host or IP address. - Service `postgres_rds`: DB instance host or IP address. + - Service `sap_hana_db`: - Service `sftp`: SFTP host address. - Service `snowflake_db`: Host name - Service `splunk`: The Splunk service host address. @@ -1220,6 +1241,7 @@ Optional: - Service `salesforce`: - Service `salesforce_sandbox`: - Service `sap_business_by_design`: The SAP Business ByDesign password. + - Service `sap_hana_db`: - Service `scorm`: Your Scorm Secret Key. - Service `servicenow`: Your account password. - Service `sftp`: SFTP password. @@ -1233,11 +1255,17 @@ Optional: - Service `sql_server_rds`: The user's password. - Service `teamwork`: Your Teamwork password. - Service `the_trade_desk`: The Trade Desk password. It is a part of the login credentials. + - Service `upland`: Your Upland Software Password. - Service `when_i_work`: Your When I Work password. + - Service `wherefour`: Your Wherefour password. - Service `workday`: Workday password. - Service `workday_hcm`: Workday password. - `pat` (String, Sensitive) Field usage depends on `service` value: - Service `github`: The `Personal Access Token` generated in Github. +- `pat_name` (String) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source PAT Name. +- `pat_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source PAT Secret. - `path` (String) Field usage depends on `service` value: - Service `jira`: A URL subdirectory where the Jira instance is working. - `pattern` (String) Field usage depends on `service` value: @@ -1313,6 +1341,7 @@ Optional: - Service `oracle_sap_hva`: The port number. - Service `postgres`: The port number. - Service `postgres_rds`: The port number. + - Service `sap_hana_db`: - Service `sftp`: SFTP port. - Service `snowflake_db`: The Snowflake optional port number. - Service `splunk`: The Splunk service host port. @@ -1400,6 +1429,7 @@ Optional: - Service `oracle_sap_hva`: Public Key - Service `postgres`: Public Key - Service `postgres_rds`: Public Key + - Service `sap_hana_db`: - Service `sftp`: Public Key - Service `sql_server`: Public Key. - Service `sql_server_hva`: Public Key. @@ -1594,6 +1624,8 @@ Optional: - Service `sage_intacct`: Your Sender ID - `sender_password` (String, Sensitive) Field usage depends on `service` value: - Service `sage_intacct`: Your Sender Password +- `server_address` (String) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source server address. - `server_url` (String) Field usage depends on `service` value: - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud Instance URL. - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud Instance URL. @@ -1644,6 +1676,7 @@ Optional: - Service `salesforce_commerce_cloud`: The name of the site from which you want to sync data. - `site_name` (String) Field usage depends on `service` value: - Service `microsoft_lists`: The Name of the SharePoint site. The Site Name is the `name` field in the Graph API response for sites. + - Service `tableau_source`: Your Tableau Source site name. - `site_urls` (Set of String) Field usage depends on `service` value: - Service `google_search_console`: Specific Site URLs to sync. Must be populated if `sync_mode` is set to `SpecificSites`. - `skip_after` (String) Field usage depends on `service` value: @@ -1697,8 +1730,10 @@ Optional: - Service `posthog`: Your PostHog data region (`app` or `eu`). - Service `recurly`: Your company's Recurly subdomain. - Service `salesforce_marketing_cloud`: Your Salesforce Marketing Cloud subdomain. + - Service `salsify`: Your Salsify Organization ID. - Service `sonarqube`: Your Sonarqube subdomain. - Service `tempo`: Your Tempo subdomain. + - Service `upland`: Your Upland Software subDomain. - Service `workable`: Your Workable Subdomain. - Service `wrike`: Your Wrike Subdomain. - `subdomain` (String) Field usage depends on `service` value: @@ -1781,6 +1816,8 @@ Optional: - Service `confluent_cloud`: Kafka sync type. Unpacked messages must be valid JSON. - Service `heroku_kafka`: Heroku Kafka sync type. Unpacked messages must be valid JSON. - Service `segment`: The Segment connector sync type. +- `sysnr` (String) Field usage depends on `service` value: + - Service `sap_hana_db`: - `table_name` (String) Field usage depends on `service` value: - Service `airtable`: Name of table in Airtable - `tde_certificate` (String, Sensitive) Field usage depends on `service` value: @@ -1894,6 +1931,7 @@ Optional: - Service `oracle_sap_hva_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `postgres_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sap_hana_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `sftp`: Tunnel host address, specify only to connect via SSH tunnel. - Service `sql_server`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - Service `sql_server_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). @@ -1934,6 +1972,7 @@ Optional: - Service `oracle_sap_hva_netweaver`: SSH port, specify only to connect via an SSH tunnel. - Service `postgres`: SSH port, specify only to connect via an SSH tunnel. - Service `postgres_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `sap_hana_db`: SSH port, specify only to connect via an SSH tunnel. - Service `sftp`: Tunnel port, specify only to connect via SSH tunnel. - Service `sql_server`: SSH port, specify only to connect via an SSH tunnel. - Service `sql_server_hva`: SSH port, specify only to connect via an SSH tunnel. @@ -1974,6 +2013,7 @@ Optional: - Service `oracle_sap_hva_netweaver`: SSH user, specify only to connect via an SSH tunnel. - Service `postgres`: SSH user, specify only to connect via an SSH tunnel. - Service `postgres_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `sap_hana_db`: SSH user, specify only to connect via an SSH tunnel. - Service `sftp`: Tunnel user, specify only to connect via SSH tunnel. - Service `sql_server`: SSH user, specify only to connect via an SSH tunnel. - Service `sql_server_hva`: SSH user, specify only to connect via an SSH tunnel. @@ -2075,6 +2115,7 @@ Optional: - Service `outbrain`: The username or email of the Outbrain user. - Service `postgres`: The user name. - Service `postgres_rds`: The user name. + - Service `sap_hana_db`: - Service `sftp`: SFTP user. - Service `snowflake_db`: The Snowflake username. - Service `splunk`: The Splunk username. @@ -2097,6 +2138,7 @@ Optional: - Service `cin7`: Your Cin7 API Username. - Service `collibra`: Your collibra username. - Service `concur`: The SAP Concur username. + - Service `dcl_logistics`: Your DCL Logistics username. - Service `financial_force`: - Service `github`: `Login` of your GitHub profile. - Service `gladly`: Your Gladly Username. @@ -2120,7 +2162,9 @@ Optional: - Service `shiphero`: Your ShipHero username. - Service `shipstation`: Your ShipStation username. - Service `teamwork`: Your Teamwork username. + - Service `upland`: Your Upland Software Username. - Service `when_i_work`: Your When I Work username. + - Service `wherefour`: Your Wherefour username. - Service `workday_hcm`: Username of your Workday Integration System User account - `view_attribution_window` (String) Field usage depends on `service` value: - Service `facebook`: Time period to attribute conversions based on views. [Possible view_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#viewattributionwindow). diff --git a/docs/resources/destination.md b/docs/resources/destination.md index 4cc39580..fb38c2f9 100644 --- a/docs/resources/destination.md +++ b/docs/resources/destination.md @@ -62,9 +62,11 @@ Optional: - `auth_type` (String) Authentication type. Default value: `PASSWORD`. - `bucket` (String) Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. - `catalog` (String) Catalog name +- `client_id` (String) ClientId of your Azure Data Lake Storage - `cluster_id` (String) Cluster ID. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. - `cluster_region` (String) Cluster region. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. - `connection_type` (String) Connection method. Default value: `Directly`. +- `container_name` (String) Container Name of your Azure Data Lake Storage - `create_external_tables` (String) Whether to create external tables - `data_set_location` (String) Data location. Datasets will reside in this location. - `database` (String) Database name @@ -84,7 +86,10 @@ Optional: - `role` (String) The group role that you would like to assign this new user to. Supported group roles: ‘Destination Administrator‘, ‘Destination Reviewer‘, ‘Destination Analyst‘, ‘Connector Creator‘, or a custom destination role - `role_arn` (String, Sensitive) Role ARN with Redshift permissions. Required if authentication type is `IAM`. - `secret_key` (String, Sensitive) Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. +- `secret_value` (String, Sensitive) Secret Value of your Azure Data Lake Storage - `server_host_name` (String) Server name +- `storage_account_name` (String) Storage Account Name of your Azure Data Lake Storage +- `tenant_id` (String) TenantId of your Azure Data Lake Storage - `tunnel_host` (String) SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - `tunnel_port` (String) SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - `tunnel_user` (String) SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. diff --git a/fivetran/data_source_destination.go b/fivetran/data_source_destination.go index ad6a144c..0f93174d 100644 --- a/fivetran/data_source_destination.go +++ b/fivetran/data_source_destination.go @@ -224,6 +224,32 @@ func dataSourceDestinationSchemaConfig() *schema.Schema { Optional: true, Description: "Region of your AWS S3 bucket", }, + "storage_account_name": { + Type: schema.TypeString, + Optional: true, + Description: "Storage Account Name of your Azure Data Lake Storage", + }, + "container_name": { + Type: schema.TypeString, + Optional: true, + Description: "Container Name of your Azure Data Lake Storage", + }, + "tenant_id": { + Type: schema.TypeString, + Optional: true, + Description: "TenantId of your Azure Data Lake Storage", + }, + "client_id": { + Type: schema.TypeString, + Optional: true, + Description: "ClientId of your Azure Data Lake Storage", + }, + "secret_value": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: "Secret Value of your Azure Data Lake Storage", + }, }, }, } @@ -314,7 +340,12 @@ func dataSourceDestinationConfig(resp *destinations.DestinationDetailsResponse) c["fivetran_role_arn"] = resp.Data.Config.FivetranRoleArn c["prefix_path"] = resp.Data.Config.PrefixPath c["region"] = resp.Data.Config.Region - + c["storage_account_name"] = resp.Data.Config.StorageAccountName + c["container_name"] = resp.Data.Config.ContainerName + c["tenant_id"] = resp.Data.Config.TenantId + c["client_id"] = resp.Data.Config.ClientId + c["secret_value"] = resp.Data.Config.SecretValue + config = append(config, c) return config, nil diff --git a/fivetran/resource_destination.go b/fivetran/resource_destination.go index 31b26bfc..fce3a111 100644 --- a/fivetran/resource_destination.go +++ b/fivetran/resource_destination.go @@ -253,6 +253,32 @@ func resourceDestinationSchemaConfig() *schema.Schema { Optional: true, Description: "Region of your AWS S3 bucket", }, + "storage_account_name": { + Type: schema.TypeString, + Optional: true, + Description: "Storage Account Name of your Azure Data Lake Storage", + }, + "container_name": { + Type: schema.TypeString, + Optional: true, + Description: "Container Name of your Azure Data Lake Storage", + }, + "tenant_id": { + Type: schema.TypeString, + Optional: true, + Description: "TenantId of your Azure Data Lake Storage", + }, + "client_id": { + Type: schema.TypeString, + Optional: true, + Description: "ClientId of your Azure Data Lake Storage", + }, + "secret_value": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: "Secret Value of your Azure Data Lake Storage", + }, }, }, } @@ -439,6 +465,7 @@ func resourceDestinationReadConfig(resp *destinations.DestinationDetailsResponse c["personal_access_token"] = currentConfigMap["personal_access_token"].(string) c["role_arn"] = currentConfigMap["role_arn"].(string) c["passphrase"] = currentConfigMap["passphrase"].(string) + c["secret_value"] = currentConfigMap["secret_value"].(string) if _, ok := currentConfigMap["is_private_key_encrypted"]; ok { // if `is_private_key_encrypted` is configured locally we should read upstream value @@ -479,6 +506,10 @@ func resourceDestinationReadConfig(resp *destinations.DestinationDetailsResponse c["fivetran_role_arn"] = resp.Data.Config.FivetranRoleArn c["prefix_path"] = resp.Data.Config.PrefixPath c["region"] = resp.Data.Config.Region + c["storage_account_name"] = resp.Data.Config.StorageAccountName + c["container_name"] = resp.Data.Config.ContainerName + c["tenant_id"] = resp.Data.Config.TenantId + c["client_id"] = resp.Data.Config.ClientId config = append(config, c) @@ -623,5 +654,35 @@ func resourceDestinationCreateConfig(config []interface{}) (*destinations.Destin hasConfig = true } + if v := c["region"].(string); v != "" { + fivetranConfig.Region(v) + hasConfig = true + } + + if v := c["storage_account_name"].(string); v != "" { + fivetranConfig.StorageAccountName(v) + hasConfig = true + } + + if v := c["container_name"].(string); v != "" { + fivetranConfig.ContainerName(v) + hasConfig = true + } + + if v := c["tenant_id"].(string); v != "" { + fivetranConfig.TenantId(v) + hasConfig = true + } + + if v := c["client_id"].(string); v != "" { + fivetranConfig.ClientId(v) + hasConfig = true + } + + if v := c["secret_value"].(string); v != "" { + fivetranConfig.SecretValue(v) + hasConfig = true + } + return fivetranConfig, hasConfig } diff --git a/fivetran/resource_external_logging_test.go b/fivetran/resource_external_logging_test.go index 2222ffa9..e2ef6b0d 100644 --- a/fivetran/resource_external_logging_test.go +++ b/fivetran/resource_external_logging_test.go @@ -5,6 +5,7 @@ import ( "errors" "testing" "fmt" + "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -115,7 +116,7 @@ func testFivetranExternalLoggingResourceDestroy(s *terraform.State) error { if err.Error() != "status code: 404; expected: 200" { return err } - if response.Code != "NotFound" { + if !strings.HasPrefix(response.Code, "NotFound") { return errors.New("External Logging " + rs.Primary.ID + " still exists. Response code: " + response.Code) } diff --git a/fivetran/tests/mock/datasource_destination_test.go b/fivetran/tests/mock/datasource_destination_test.go index b88afef7..93458d85 100644 --- a/fivetran/tests/mock/datasource_destination_test.go +++ b/fivetran/tests/mock/datasource_destination_test.go @@ -78,6 +78,11 @@ func TestDataSourceDestinationConfigMappingMock(t *testing.T) { resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.fivetran_role_arn", "fivetran_role_arn"), resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.prefix_path", "prefix_path"), resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.region", "region"), + resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.storage_account_name", "storage_account_name"), + resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.container_name", "container_name"), + resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.tenant_id", "tenant_id"), + resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.client_id", "client_id"), + resource.TestCheckResourceAttr("data.fivetran_destination.test_destintion", "config.0.secret_value", "******"), ), } diff --git a/fivetran/tests/mock/resource_destination_test.go b/fivetran/tests/mock/resource_destination_test.go index 89330959..5ae134ae 100644 --- a/fivetran/tests/mock/resource_destination_test.go +++ b/fivetran/tests/mock/resource_destination_test.go @@ -81,7 +81,13 @@ const ( "catalog": "catalog", "fivetran_role_arn": "fivetran_role_arn", "prefix_path": "prefix_path", - "region": "region" + "region": "region", + "storage_account_name": "storage_account_name", + "container_name": "container_name", + "tenant_id": "tenant_id", + "client_id": "client_id", + "secret_value": "******" + } } ` @@ -136,6 +142,11 @@ func setupMockClientDestinationConfigMapping(t *testing.T) { assertKeyExistsAndHasValue(t, config, "fivetran_role_arn", "fivetran_role_arn") assertKeyExistsAndHasValue(t, config, "prefix_path", "prefix_path") assertKeyExistsAndHasValue(t, config, "region", "region") + assertKeyExistsAndHasValue(t, config, "storage_account_name", "storage_account_name") + assertKeyExistsAndHasValue(t, config, "container_name", "container_name") + assertKeyExistsAndHasValue(t, config, "tenant_id", "tenant_id") + assertKeyExistsAndHasValue(t, config, "client_id", "client_id") + assertKeyExistsAndHasValue(t, config, "secret_value", "secret_value") assertKeyExistsAndHasValue(t, config, "tunnel_port", "123") @@ -200,6 +211,11 @@ func TestResourceDestinationMappingMock(t *testing.T) { fivetran_role_arn = "fivetran_role_arn" prefix_path = "prefix_path" region = "region" + storage_account_name = "storage_account_name" + container_name = "container_name" + tenant_id = "tenant_id" + client_id = "client_id" + secret_value = "secret_value" } }`, diff --git a/go.mod b/go.mod index 162f3b4b..868fd65d 100644 --- a/go.mod +++ b/go.mod @@ -1,7 +1,7 @@ module github.com/fivetran/terraform-provider-fivetran require ( - github.com/fivetran/go-fivetran v0.7.12 + github.com/fivetran/go-fivetran v0.7.13 github.com/hashicorp/terraform-plugin-sdk/v2 v2.10.0 ) diff --git a/go.sum b/go.sum index 13b01349..0a9ba428 100644 --- a/go.sum +++ b/go.sum @@ -91,6 +91,8 @@ github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fivetran/go-fivetran v0.7.12 h1:XX4ZqQUgiWCZQeFWxUYCmgY2MowPPVHKhf1CMw2+bvI= github.com/fivetran/go-fivetran v0.7.12/go.mod h1:EIy5Uwn1zylQCr/7O+8rrwvmjvhW3PPpzHkQj26ON7Y= +github.com/fivetran/go-fivetran v0.7.13 h1:FYb4+5OzAFGnUnrTLDElyA8c0ZmI3WGYv+KuKWsaVPw= +github.com/fivetran/go-fivetran v0.7.13/go.mod h1:EIy5Uwn1zylQCr/7O+8rrwvmjvhW3PPpzHkQj26ON7Y= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=