Skip to content

Commit

Permalink
feat(dataarts): add new datasource to get factory jobs (#5286)
Browse files Browse the repository at this point in the history
  • Loading branch information
wuzhuanhong authored Jul 29, 2024
1 parent a6e01e2 commit 12e01e2
Show file tree
Hide file tree
Showing 4 changed files with 570 additions and 0 deletions.
93 changes: 93 additions & 0 deletions docs/data-sources/dataarts_factory_jobs.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
---
subcategory: "DataArts Studio"
layout: "huaweicloud"
page_title: "HuaweiCloud: huaweicloud_dataarts_studio_data_connections"
description: |-
Use this data source to get the list of the Factory jobs within HuaweiCloud.
---
# huaweicloud_dataarts_factory_jobs

Use this data source to get the list of the Factory jobs within HuaweiCloud.

## Example Usage

```hcl
variable "workspace_id" {}
data "huaweicloud_dataarts_factory_jobs" "test" {
workspace_id = var.workspace_id
}
```

## Argument Reference

The following arguments are supported:

* `region` - (Optional, String) Specifies the region in which to query the data source.
If omitted, the provider-level region will be used.

* `workspace_id` - (Optional, String) The ID of the workspace to which the jobs belong.
If omitted, default workspace will be used.

* `name` - (Optional, String) Specified the job name to be queried. Fuzzy search is supported.

* `process_type` - (Optional, String) Specified the job type to be queried.
If omitted, the default value is **BATCH**.
The valid values are as follows:
+ **REAL_TIME**: Real-time processing.
+ **BATCH**: Batch processing.

## Attribute Reference

In addition to all arguments above, the following attributes are exported:

* `id` - The data source ID.

* `jobs` - All jobs that match the filter parameters.
The [jobs](#factory_jobs) structure is documented below.

<a name="factory_jobs"></a>
The `jobs` block supports:

* `name` - The name of the job.

* `process_type` - The type of the job.

* `priority` - The priority of the job.
+ **0**: High priority.
+ **1**: Medium priority.
+ **2**: Low priority.

* `owner` - The owner of the job.

* `is_single_task_job` - Whether the job is single task.

* `directory` - The directory tree path of the job.

* `status` - The current status of the job.
+ **NORMAL**
+ **STOPPED**
+ **SCHEDULING**
+ **PAUSED**
+ **EXCEPTION**

* `start_time` - The start time of the job scheduling, in RFC3339 format.

* `end_time` - The end time of the job scheduling, in RFC3339 format.

* `created_by` - The creator of the job.

* `created_at` - The creation time of the job, in RFC3339 format.

* `updated_by` - The name of the user who last updated the job.

* `updated_at` - The latest update time of the job, in RFC3339 format.

* `last_instance_status` - The latest running status of the instance corresponding to the job.
+ **running**
+ **success**
+ **fail**
+ **running-exception**
+ **manual-stop**

* `last_instance_end_time` - The latest end time of the instance corresponding to the job, in RFC3339 format.
3 changes: 3 additions & 0 deletions huaweicloud/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -569,6 +569,9 @@ func Provider() *schema.Provider {
// DataArts DataService
"huaweicloud_dataarts_dataservice_apis": dataarts.DataSourceDataServiceApis(),

// DataArts Factory
"huaweicloud_dataarts_factory_jobs": dataarts.DataSourceFactoryJobs(),

"huaweicloud_dbss_flavors": dbss.DataSourceDbssFlavors(),

"huaweicloud_dc_connections": dc.DataSourceDcConnections(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
package dataarts

import (
"fmt"
"regexp"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"

"github.com/huaweicloud/terraform-provider-huaweicloud/huaweicloud/services/acceptance"
)

func TestAccDatasourceFactoryJobs_basic(t *testing.T) {
var (
rName = acceptance.RandomAccResourceName()
dataSource = "data.huaweicloud_dataarts_factory_jobs.test"
dc = acceptance.InitDataSourceCheck(dataSource)

byName = "data.huaweicloud_dataarts_factory_jobs.filter_by_name"
dcByName = acceptance.InitDataSourceCheck(byName)

byBatchType = "data.huaweicloud_dataarts_factory_jobs.filter_by_batch_type"
dcByBatchType = acceptance.InitDataSourceCheck(byBatchType)

byRealTimeType = "data.huaweicloud_dataarts_factory_jobs.filter_by_real_time_type"
dcByRealTimeType = acceptance.InitDataSourceCheck(byRealTimeType)
)

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() {
acceptance.TestAccPreCheck(t)
acceptance.TestAccPreCheckDataArtsWorkSpaceID(t)
},
ProviderFactories: acceptance.TestAccProviderFactories,
Steps: []resource.TestStep{
{
Config: testAccDatasourceFactoryJobs_workspace_id_not_found,
ExpectError: regexp.MustCompile("detail msg Workspace does not exists"),
},
{
Config: testAccDatasourceFactoryJobs_basic(rName),
Check: resource.ComposeTestCheckFunc(
dc.CheckResourceExists(),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.name"),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.process_type"),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.priority"),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.directory"),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.status"),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.created_by"),
resource.TestMatchResourceAttr(dataSource, "jobs.0.created_at",
regexp.MustCompile(`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}?(Z|([+-]\d{2}:\d{2}))$`)),
resource.TestCheckResourceAttrSet(dataSource, "jobs.0.updated_by"),
resource.TestMatchResourceAttr(dataSource, "jobs.0.updated_at",
regexp.MustCompile(`^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}?(Z|([+-]\d{2}:\d{2}))$`)),
dcByName.CheckResourceExists(),
dcByBatchType.CheckResourceExists(),
dcByRealTimeType.CheckResourceExists(),
resource.TestCheckOutput("is_name_filter_useful", "true"),
resource.TestCheckOutput("is_fuzzy_name_filter_useful", "true"),
resource.TestCheckOutput("not_found_name", "true"),
resource.TestCheckOutput("is_batch_type_filter_useful", "true"),
resource.TestCheckOutput("is_real_time_type_filter_useful", "true"),
),
},
},
})
}

const testAccDatasourceFactoryJobs_workspace_id_not_found = `
data "huaweicloud_dataarts_factory_jobs" "test" {
workspace_id = "not_found"
}
`

func testAccDatasourceFactoryJobs_base(name string) string {
return fmt.Sprintf(`
%[1]s
resource "huaweicloud_smn_topic" "test" {
name = "%[2]s"
}
resource "huaweicloud_dataarts_factory_job" "batch_job" {
name = "%[2]s_batch_job"
workspace_id = "%[3]s"
process_type = "BATCH"
nodes {
name = "SMN_%[2]s_batch_job"
type = "SMN"
location {
x = 10
y = 11
}
properties {
name = "topic"
value = huaweicloud_smn_topic.test.topic_urn
}
properties {
name = "messageType"
value = "NORMAL"
}
properties {
name = "message"
value = "terraform acceptance test"
}
}
schedule {
type = "EXECUTE_ONCE"
}
}
`, testFactoryJob_basic(name), name, acceptance.HW_DATAARTS_WORKSPACE_ID)
}

func testAccDatasourceFactoryJobs_basic(name string) string {
return fmt.Sprintf(`
%s
data "huaweicloud_dataarts_factory_jobs" "test" {
depends_on = [
huaweicloud_dataarts_factory_job.batch_job
]
workspace_id = "%[2]s"
}
locals {
job_name = huaweicloud_dataarts_factory_job.batch_job.name
batch_type = huaweicloud_dataarts_factory_job.batch_job.process_type
real_time_type = huaweicloud_dataarts_factory_job.test.process_type
}
# Filter by name (Exact match)
data "huaweicloud_dataarts_factory_jobs" "filter_by_name" {
depends_on = [
huaweicloud_dataarts_factory_job.batch_job
]
workspace_id = "%[2]s"
name = local.job_name
}
locals {
name_filter_result = [
for v in data.huaweicloud_dataarts_factory_jobs.filter_by_name.jobs[*].name : v == local.job_name
]
}
output "is_name_filter_useful" {
value = length(local.name_filter_result) > 0 && alltrue(local.name_filter_result)
}
# Filter by name (Fuzzy search)
data "huaweicloud_dataarts_factory_jobs" "filter_by_fuzzy_name" {
depends_on = [
huaweicloud_dataarts_factory_job.batch_job
]
workspace_id = "%[2]s"
name = "tf_test"
}
output "is_fuzzy_name_filter_useful" {
value = length(data.huaweicloud_dataarts_factory_jobs.filter_by_fuzzy_name.jobs) >= 1
}
# Filter by name (Not found)
data "huaweicloud_dataarts_factory_jobs" "not_found_name" {
depends_on = [
huaweicloud_dataarts_factory_job.batch_job
]
workspace_id = "%[2]s"
name = "not_found_name"
}
output "not_found_name" {
value = length(data.huaweicloud_dataarts_factory_jobs.not_found_name.jobs) == 0
}
# Filter by "BATCH" type
data "huaweicloud_dataarts_factory_jobs" "filter_by_batch_type" {
depends_on = [
huaweicloud_dataarts_factory_job.batch_job
]
workspace_id = "%[2]s"
process_type = local.batch_type
}
locals {
batch_type_filter_result = [
for v in data.huaweicloud_dataarts_factory_jobs.filter_by_batch_type.jobs[*].process_type : v == local.batch_type
]
}
output "is_batch_type_filter_useful" {
value = length(local.batch_type_filter_result) > 0 && alltrue(local.batch_type_filter_result)
}
# Filter by "REAL_TIME" type
data "huaweicloud_dataarts_factory_jobs" "filter_by_real_time_type" {
depends_on = [
huaweicloud_dataarts_factory_job.test
]
workspace_id = "%[2]s"
process_type = local.real_time_type
}
locals {
real_time_filter_result = [
for v in data.huaweicloud_dataarts_factory_jobs.filter_by_real_time_type.jobs[*].process_type : v == local.real_time_type
]
}
output "is_real_time_type_filter_useful" {
value = length(local.real_time_filter_result) > 0 && alltrue(local.real_time_filter_result)
}
`, testAccDatasourceFactoryJobs_base(name), acceptance.HW_DATAARTS_WORKSPACE_ID)
}
Loading

0 comments on commit 12e01e2

Please sign in to comment.