Skip to content

Commit

Permalink
[CA 873] Modify the schema not to use interface pattern. (#29)
Browse files Browse the repository at this point in the history
* create separate package for cloud-resource-schema

* move cloud resource uid test to right directory

* let publish workflow also generate openapi

* trying to publish a package

* trying to publish a package

* trying to publish a package

* upadte readme

* upadte readme

* address comment

* modify schema not to use oneOf

* new version

* upadte schema version

* fix tests

* address comment

* remove unnecessary assert method name
  • Loading branch information
yonghaoy authored Jul 16, 2020
1 parent 94c9774 commit 5e1b913
Show file tree
Hide file tree
Showing 13 changed files with 120 additions and 100 deletions.
1 change: 1 addition & 0 deletions cloud-resource-schema/build.gradle
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

apply plugin: 'org.openapi.generator'

dependencies {
Expand Down
2 changes: 1 addition & 1 deletion cloud-resource-schema/gradle.properties
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
group = bio.terra.cloud-resource-lib
version = 0.0.0-SNAPSHOT
version = 0.0.2-SNAPSHOT
55 changes: 17 additions & 38 deletions cloud-resource-schema/src/main/resources/cloud_resources_uid.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ info:
title: Terra Cloud Resource UIDs
description: |
Terra Cloud Resources Library definitions of unique identifiers for cloud resources.
version: 0.0.1
version: 0.0.2

# This file defines component models to be reused, not a service path.
paths: {}
Expand All @@ -13,28 +13,20 @@ components:
GoogleBigQueryDatasetUid:
type: object
required:
- resourceType
- projectId
- datasetId
properties:
resourceType:
type: string
default: googleBigQueryDatasetUid
projectId:
type: string
datasetId:
type: string
GoogleBigQueryTableUid:
type: object
required:
- resourceType
- projectId
- datasetId
- tableId
properties:
resourceType:
type: string
default: googleBigQueryTableUid
projectId:
type: string
datasetId:
Expand All @@ -44,54 +36,41 @@ components:
GoogleBlobUid:
type: object
required:
- resourceType
- bucketName
- blobName
properties:
resourceType:
type: string
default: googleBlobUid
bucketName:
type: string
blobName:
type: string
GoogleBucketUid:
type: object
required:
- resourceType
- bucketName
properties:
resourceType:
type: string
default: googleBucketUid
bucketName:
type: string
GoogleProjectUid:
type: object
required:
- resourceType
- projectId
properties:
resourceType:
type: string
default: googleProjectUid
projectId:
type: string
# A cloud resource unique identifier. Each CloudResourceUid represents exactly one cloud resource.
# We are not doing polymorphism at this moment because of lack of support in swagger-codegen or openApiGenerator
# Now CloudResourceUid contains all possible cloud resource types, and we will assume there is only one being set
# when using this.
CloudResourceUid:
oneOf:
- $ref: '#/components/schemas/GoogleBigQueryDatasetUid'
- $ref: '#/components/schemas/GoogleBigQueryTableUid'
- $ref: '#/components/schemas/GoogleBlobUid'
- $ref: '#/components/schemas/GoogleBucketUid'
- $ref: '#/components/schemas/GoogleProjectUid'
discriminator:
# Use the resourceType property to figure out which schema is represented by the CloudResourceUid.
propertyName: resourceType
mapping:
# Mapping should match the default resourceType values. Default resourceType values allow created Java objects
# to have the same value as created -> serialized -> deserialized values.
googleBigQueryDatasetUid: '#/components/schemas/GoogleBigQueryDatasetUid'
googleBigQueryTableUid: '#/components/schemas/GoogleBigQueryTableUid'
googleBlobUid: '#/components/schemas/GoogleBlobUid'
googleBucketUid: '#/components/schemas/GoogleBucketUid'
googleProjectUid: '#/components/schemas/GoogleProjectUid'
type: object
properties:
googleProjectUid:
$ref: '#/components/schemas/GoogleProjectUid'
googleBigQueryDatasetUid:
$ref: '#/components/schemas/GoogleBigQueryDatasetUid'
googleBigQueryTableUid:
$ref: '#/components/schemas/GoogleBigQueryTableUid'
googleBlobUid:
$ref: '#/components/schemas/GoogleBlobUid'
googleBucketUid:
$ref: '#/components/schemas/GoogleBucketUid'
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
package bio.terra.clouders.resources;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;

import bio.terra.cloudres.resources.*;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;

Expand All @@ -18,49 +15,55 @@ public class CloudResourceUidTest {
/**
* Asserts that serializing->deserializing a {@link CloudResourceUid} yields an "equals" instance.
*/
public void assertSerializationIdempotency(CloudResourceUid resource, Class<?> resourceClass)
throws JsonProcessingException {
public void assertSerializationIdempotency(CloudResourceUid resource) throws Exception {
String serialized = objectMapper.writeValueAsString(resource);
CloudResourceUid deserialized = objectMapper.readValue(serialized, resource.getClass());
// Asserts that serializing->deserializing a {@link CloudResourceUid} yields an "equals"
// instance.
assertEquals(resource, deserialized);
// Assert that the deserialized instance is an instance of the same class.
assertThat(resource, Matchers.instanceOf(resourceClass));
}

@Test
public void googleBigQueryDataset() throws Exception {
GoogleBigQueryDatasetUid dataset =
new GoogleBigQueryDatasetUid().projectId("my-project").datasetId("my-dataset");
assertSerializationIdempotency(dataset, GoogleBigQueryDatasetUid.class);
CloudResourceUid dataset =
new CloudResourceUid()
.googleBigQueryDatasetUid(
new GoogleBigQueryDatasetUid().projectId("my-project").datasetId("my-dataset"));

assertSerializationIdempotency(dataset);
}

@Test
public void googleBigQueryTable() throws Exception {
GoogleBigQueryTableUid table =
new GoogleBigQueryTableUid()
.projectId("my-project")
.datasetId("my-dataset")
.tableId("my-table");
assertSerializationIdempotency(table, GoogleBigQueryTableUid.class);
CloudResourceUid table =
new CloudResourceUid()
.googleBigQueryTableUid(
new GoogleBigQueryTableUid()
.projectId("my-project")
.datasetId("my-dataset")
.tableId("my-table"));
assertSerializationIdempotency(table);
}

@Test
public void googleBlob() throws Exception {
GoogleBlobUid blob = new GoogleBlobUid().bucketName("my-bucket").blobName("my-blob");
assertSerializationIdempotency(blob, GoogleBlobUid.class);
CloudResourceUid blob =
new CloudResourceUid()
.googleBlobUid(new GoogleBlobUid().bucketName("my-bucket").blobName("my-blob"));
assertSerializationIdempotency(blob);
}

@Test
public void googleBucket() throws Exception {
GoogleBucketUid bucket = new GoogleBucketUid().bucketName("my-bucket");
assertSerializationIdempotency(bucket, GoogleBucketUid.class);
CloudResourceUid bucket =
new CloudResourceUid().googleBucketUid(new GoogleBucketUid().bucketName("my-bucket"));
assertSerializationIdempotency(bucket);
}

@Test
public void googleProject() throws Exception {
GoogleProjectUid project = new GoogleProjectUid().projectId("my-project");
assertSerializationIdempotency(project, GoogleProjectUid.class);
CloudResourceUid project =
new CloudResourceUid().googleProjectUid(new GoogleProjectUid().projectId("my-project"));
assertSerializationIdempotency(project);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,12 @@ public class CleanupRecorderTest {
.setTimeToLive(Duration.ofMinutes(1))
.build();

private static final CloudResourceUid RESOURCE_1 = new GoogleBucketUid().bucketName("1");
private static final CloudResourceUid RESOURCE_2 = new GoogleBucketUid().bucketName("2");
private static final CloudResourceUid RESOURCE_3 = new GoogleBucketUid().bucketName("3");
private static final CloudResourceUid RESOURCE_1 =
new CloudResourceUid().googleBucketUid(new GoogleBucketUid().bucketName("1"));
private static final CloudResourceUid RESOURCE_2 =
new CloudResourceUid().googleBucketUid(new GoogleBucketUid().bucketName("2"));
private static final CloudResourceUid RESOURCE_3 =
new CloudResourceUid().googleBucketUid(new GoogleBucketUid().bucketName("3"));

@Test
public void recordsForTestingOnlyAfterStart() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import bio.terra.cloudres.common.OperationAnnotator;
import bio.terra.cloudres.common.TransformPage;
import bio.terra.cloudres.common.cleanup.CleanupRecorder;
import bio.terra.cloudres.resources.CloudResourceUid;
import bio.terra.cloudres.resources.GoogleBigQueryDatasetUid;
import bio.terra.cloudres.resources.GoogleBigQueryTableUid;
import com.google.api.gax.paging.Page;
Expand Down Expand Up @@ -43,13 +44,15 @@ public BigQueryCow(ClientConfig clientConfig, BigQueryOptions bigQueryOptions) {

/** See {@link BigQuery#create(DatasetInfo, DatasetOption...)}. */
public DatasetCow create(DatasetInfo datasetInfo, DatasetOption... datasetOptions) {
GoogleBigQueryDatasetUid datasetUid =
new GoogleBigQueryDatasetUid()
.projectId(
datasetInfo.getDatasetId().getProject() == null
? defaultProjectId
: datasetInfo.getDatasetId().getProject())
.datasetId(datasetInfo.getDatasetId().getDataset());
CloudResourceUid datasetUid =
new CloudResourceUid()
.googleBigQueryDatasetUid(
new GoogleBigQueryDatasetUid()
.projectId(
datasetInfo.getDatasetId().getProject() == null
? defaultProjectId
: datasetInfo.getDatasetId().getProject())
.datasetId(datasetInfo.getDatasetId().getDataset()));
CleanupRecorder.record(datasetUid, clientConfig.getCleanupConfig());

return new DatasetCow(
Expand Down Expand Up @@ -91,11 +94,14 @@ public DatasetCow getDataSet(String datasetId, DatasetOption... datasetOptions)
/** See {@link BigQuery#create(TableInfo, TableOption...)}. */
public TableCow create(TableInfo tableInfo, TableOption... tableOptions) {
TableId tableId = tableInfo.getTableId();
GoogleBigQueryTableUid tableUid =
new GoogleBigQueryTableUid()
.projectId(tableId.getProject() == null ? defaultProjectId : tableId.getProject())
.datasetId(tableId.getDataset())
.tableId(tableId.getTable());
CloudResourceUid tableUid =
new CloudResourceUid()
.googleBigQueryTableUid(
new GoogleBigQueryTableUid()
.projectId(
tableId.getProject() == null ? defaultProjectId : tableId.getProject())
.datasetId(tableId.getDataset())
.tableId(tableId.getTable()));
CleanupRecorder.record(tableUid, clientConfig.getCleanupConfig());

return new TableCow(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import bio.terra.cloudres.common.CloudOperation;
import bio.terra.cloudres.common.OperationAnnotator;
import bio.terra.cloudres.common.cleanup.CleanupRecorder;
import bio.terra.cloudres.resources.CloudResourceUid;
import bio.terra.cloudres.resources.GoogleBigQueryTableUid;
import com.google.cloud.bigquery.*;
import com.google.cloud.bigquery.BigQuery.TableOption;
Expand Down Expand Up @@ -63,10 +64,12 @@ public TableCow create(
String tableId, TableDefinition tableDefinition, TableOption... tableOptions) {
DatasetId datasetId = dataset.getDatasetId();
CleanupRecorder.record(
new GoogleBigQueryTableUid()
.projectId(datasetId.getProject())
.datasetId(datasetId.getDataset())
.tableId(tableId),
new CloudResourceUid()
.googleBigQueryTableUid(
new GoogleBigQueryTableUid()
.projectId(datasetId.getProject())
.datasetId(datasetId.getDataset())
.tableId(tableId)),
clientConfig.getCleanupConfig());

return new TableCow(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,11 @@ public void createDataset() {
assertThat(
record,
Matchers.contains(
new GoogleBigQueryDatasetUid()
.projectId(datasetId.getProject())
.datasetId(datasetId.getDataset())));
new CloudResourceUid()
.googleBigQueryDatasetUid(
new GoogleBigQueryDatasetUid()
.projectId(datasetId.getProject())
.datasetId(datasetId.getDataset()))));
}

@Test
Expand Down Expand Up @@ -111,10 +113,12 @@ public void createTable() {
assertThat(
record,
Matchers.contains(
new GoogleBigQueryTableUid()
.projectId(tableId.getProject())
.datasetId(tableId.getDataset())
.tableId(tableId.getTable())));
new CloudResourceUid()
.googleBigQueryTableUid(
new GoogleBigQueryTableUid()
.projectId(tableId.getProject())
.datasetId(tableId.getDataset())
.tableId(tableId.getTable()))));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,12 @@ public void createThenGetTable() {
assertThat(
record,
Matchers.contains(
new GoogleBigQueryTableUid()
.projectId(datasetCow.getDatasetInfo().getDatasetId().getProject())
.datasetId(tableId.getDataset())
.tableId(tableId.getTable())));
new CloudResourceUid()
.googleBigQueryTableUid(
new GoogleBigQueryTableUid()
.projectId(datasetCow.getDatasetInfo().getDatasetId().getProject())
.datasetId(tableId.getDataset())
.tableId(tableId.getTable()))));

bigQueryCow.delete(tableId);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package bio.terra.cloudres.google.storage;

import bio.terra.cloudres.resources.CloudResourceUid;
import bio.terra.cloudres.resources.GoogleBlobUid;
import com.google.cloud.storage.Acl;
import com.google.cloud.storage.BlobId;
Expand All @@ -13,8 +14,10 @@
class SerializeUtils {
private SerializeUtils() {}

static GoogleBlobUid create(BlobId blobId) {
return new GoogleBlobUid().blobName(blobId.getName()).bucketName(blobId.getBucket());
static CloudResourceUid create(BlobId blobId) {
return new CloudResourceUid()
.googleBlobUid(
new GoogleBlobUid().blobName(blobId.getName()).bucketName(blobId.getBucket()));
}

static JsonObject convert(Acl acl) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import bio.terra.cloudres.common.CloudOperation;
import bio.terra.cloudres.common.OperationAnnotator;
import bio.terra.cloudres.common.cleanup.CleanupRecorder;
import bio.terra.cloudres.resources.CloudResourceUid;
import bio.terra.cloudres.resources.GoogleBucketUid;
import com.google.cloud.WriteChannel;
import com.google.cloud.storage.*;
Expand Down Expand Up @@ -44,7 +45,9 @@ public BlobCow create(BlobInfo blobInfo) {
/** See {@link Storage#create(BucketInfo, Storage.BucketTargetOption...)}. */
public BucketCow create(BucketInfo bucketInfo) {
CleanupRecorder.record(
new GoogleBucketUid().bucketName(bucketInfo.getName()), clientConfig.getCleanupConfig());
new CloudResourceUid()
.googleBucketUid(new GoogleBucketUid().bucketName(bucketInfo.getName())),
clientConfig.getCleanupConfig());
Bucket bucket =
operationAnnotator.executeCowOperation(
CloudOperation.GOOGLE_CREATE_BUCKET,
Expand Down
Loading

0 comments on commit 5e1b913

Please sign in to comment.