Skip to content

Commit

Permalink
Merge master
Browse files Browse the repository at this point in the history
  • Loading branch information
davidmreed committed Apr 26, 2020
2 parents c7aba2a + dd126ca commit 4287a4b
Show file tree
Hide file tree
Showing 6 changed files with 86 additions and 11 deletions.
18 changes: 14 additions & 4 deletions amaxa/loader/extract_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,9 @@ def _post_initialize_validate(self):
self._validate_lookup_behaviors()

def _get_field_scope(self, entry):
# Use the 'field-group' or 'field' items to derive the field scope
# Use the 'field-group', 'field', and 'exclude-fields' items to derive the field scope

fields = set()

if "field-group" in entry:
# Don't include types we don't process: geolocations, addresses, and base64 fields.
Expand All @@ -105,12 +107,20 @@ def include(f):
"base64",
]

return set(
fields.update(
self.result.get_filtered_field_map(entry["sobject"], include).keys()
)

# Build the field scope, taking flat lists and maps into account.
return {f if isinstance(f, str) else f["field"] for f in entry["fields"]}
if "fields" in entry:
fields.update(
{f if isinstance(f, str) else f["field"] for f in entry["fields"]}
)

if "exclude-fields" in entry:
for f in entry["exclude-fields"]:
fields.discard(f)

return fields

def _open_files(self):
# Open all of the output files
Expand Down
20 changes: 15 additions & 5 deletions amaxa/loader/load_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,9 @@ def _initialize(self):
self._open_files()

def _get_field_scope(self, entry):
# Use the 'field-group' or 'field' items to derive the field scope
# Use the 'field-group', 'field', and 'exclude-fields' items to derive the field scope

fields = set()

if "field-group" in entry:
# Validation clamps acceptable values to 'writeable' or 'smart' by this point.
Expand All @@ -63,12 +65,20 @@ def _get_field_scope(self, entry):
def include(f):
return f["createable"] and f["type"] != "base64"

return set(
fields.update(
self.result.get_filtered_field_map(entry["sobject"], include).keys()
)
else:
# Build the field scope, taking flat lists and maps into account.
return {f if isinstance(f, str) else f["field"] for f in entry["fields"]}

if "fields" in entry:
fields.update(
{f if isinstance(f, str) else f["field"] for f in entry["fields"]}
)

if "exclude-fields" in entry:
for f in entry["exclude-fields"]:
fields.discard(f)

return fields

def _open_files(self):
# Open all of the input and output files
Expand Down
6 changes: 4 additions & 2 deletions amaxa/loader/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,11 +412,9 @@ def _validate_transform_options(field, value, error):
"field-group": {
"type": "string",
"allowed": ["readable", "writeable", "smart"],
"excludes": ["fields"],
},
"fields": {
"type": "list",
"excludes": ["field-group"],
"schema": {
"type": ["string", "dict"],
"schema": {
Expand Down Expand Up @@ -449,6 +447,10 @@ def _validate_transform_options(field, value, error):
},
},
},
"exclude-fields": {
"type": "list",
"schema": {"type": "string"},
},
},
},
},
Expand Down
15 changes: 15 additions & 0 deletions docs/operations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,21 @@ is an example of a simple field specification.
would extract the ``Description`` field, name the CSV column ``Desc``, and apply the transformations ``strip`` (remove leading and trailing whitespace) and ``lowercase`` (convert text to lower case) on extracted data. On load, Amaxa would look for a CSV column ``Desc``, map it to the ``Description`` field, and apply the same transformations to inbound data.

``fields`` and ``field-group`` can be combined if you wish to customize the behavior of ``field-group`` by adding column mappings or specifying additional fields that don't exist in the group. Additionally, the ``exclude-fields`` key can be used to suppress fields you don't want that might otherwise be included by the chosen ``field-group``.

.. code-block:: yaml
field-group: smart
fields:
-
field: Description
column: Desc
transforms:
- strip
- lowercase
exclude-fields:
- OwnerId
Where is the data going to or coming from?
******************************************

Expand Down
19 changes: 19 additions & 0 deletions test/test_unit/test_ExtractionOperationLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,25 @@ def test_load_extraction_operation_generates_field_list(self):

self.assertEqual({"Name", "Industry", "Id"}, result.steps[0].field_scope)

def test_ExtractionOperationLoader_generates_field_list__all_directives(self):
ex = {
"version": 2,
"operation": [
{
"sobject": "Account",
"field-group": "writeable",
"fields": ["IsDeleted"],
"exclude-fields": ["OwnerId"],
"extract": {"all": True},
"input-validation": "none",
}
],
}

result = self._run_success_test(ex)
self.assertIn("IsDeleted", result.steps[0].field_scope)
self.assertNotIn("OwnerId", result.steps[0].field_scope)

def test_load_extraction_operation_creates_export_mapper(self):
result = self._run_success_test(
{
Expand Down
19 changes: 19 additions & 0 deletions test/test_unit/test_LoadOperationLoader.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,25 @@ def test_LoadOperationLoader_generates_field_list(self):
result = self._run_success_test(ex)
self.assertEqual({"Name", "Industry"}, result.steps[0].field_scope)

def test_LoadOperationLoader_generates_field_list__all_directives(self):
ex = {
"version": 2,
"operation": [
{
"sobject": "Account",
"field-group": "writeable",
"fields": [{"field": "Name", "transforms": ["lowercase"]}],
"exclude-fields": ["Industry"],
"extract": {"all": True},
"input-validation": "none",
}
],
}

result = self._run_success_test(ex)
self.assertIn("Account", result.mappers)
self.assertNotIn("Industry", result.steps[0].field_scope)

def test_LoadOperationLoader_populates_data_mappers(self):
ex = {
"version": 1,
Expand Down

0 comments on commit 4287a4b

Please sign in to comment.