Skip to content

Commit

Permalink
Merge branch 'staging' into prod
Browse files Browse the repository at this point in the history
  • Loading branch information
NotChristianGarcia committed May 3, 2023
2 parents ae2dd41 + d66d23c commit 4df11f3
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 37 deletions.
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,18 @@

All notable changes to this project will be documented in this file.

## 1.3.0 - 2023-05-03
### Breaking Changes:
- No Change.

### New features:
- No Change.

### Bug fixes:
- Row update with a bit broken. Users can now set row values equal to null with JSON null value.
- Users can also now update any individual row value, previously rows with not "null" would set "required" and would mean that updates would also require said values.


## 1.3.0 - 2023-03-09
### Breaking Changes:
- No Change.
Expand Down
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ build-core:
# @echo "Now migrating"
# @docker-compose run api python /home/tapis/manage.py migrate_schemas --shared

local-deploy: build-core
up: build-core
@echo "Running docker-compose up"
@docker-compose up -d api


# Running the pgrest/test.py file
test: local-deploy
test:
@docker-compose run api python /home/tapis/manage.py test -v 2


Expand Down
8 changes: 6 additions & 2 deletions pgrest/db_transactions/table_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,10 @@ def update_row_with_pk(table_name, pk_id, data, tenant, primary_key, db_instance
logger.info(f"Updating row with pk \'{pk_id}\' in {tenant}.{table_name}...")
command = f"UPDATE {tenant}.{table_name} SET"
for col in data:
if type(data[col]) == str:
if isinstance(data[col], str):
command = f"{command} {col} = (\'{data[col]}\'), "
elif isinstance(data[col], type(None)):
command = f"{command} {col} = (NULL), "
else:
command = f"{command} {col} = ({data[col]}), "
if type(pk_id) == 'int' or type(pk_id) == 'float':
Expand Down Expand Up @@ -277,8 +279,10 @@ def update_rows_with_where(table_name, data, tenant, db_instance, search_params=
logger.info(f"Updating rows in {tenant}.{table_name}...")
command = f"UPDATE {tenant}.{table_name} SET"
for col in data:
if type(data[col]) == str:
if isinstance(data[col], str):
command = f"{command} {col} = (\'{data[col]}\'), "
elif isinstance(data[col], type(None)):
command = f"{command} {col} = (NULL), "
else:
command = f"{command} {col} = ({data[col]}), "

Expand Down
54 changes: 51 additions & 3 deletions pgrest/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,52 @@ def test_update_row(self):
content_type='application/json')
self.assertEqual(response.status_code, 200)

def test_update_row_with_null_value(self):
# We should be able to change a row value from filled to null without issue, if it's nullable.
# first, we need to create row
root_url = self.init_resp_1["result"]["root_url"]
table_name = self.init_resp_1["result"]["table_name"]
data = {"col_one": "hello", "col_two": 100, "col_three": 90, "col_four": False, "col_five": "hehe"}
response = self.client.post(f'/v3/pgrest/data/{root_url}',
data=json.dumps({"data": data}),
**auth_headers,
content_type='application/json')
self.assertEqual(response.status_code, 200)
response = self.client.get(f'/v3/pgrest/data/{root_url}', **auth_headers)
self.assertEqual(len(response.json()["result"]), 1)
self.assertEqual(response.status_code, 200)

# now, update
data = {"col_two": None}
row_id = response.json()["result"][0]['_pkid']
response = self.client.put(f'/v3/pgrest/data/{root_url}/{row_id}',
**auth_headers,
data=json.dumps({"data": data}),
content_type='application/json')
self.assertEqual(response.status_code, 200)

def test_update_row_with_null_value_on_non_nullable_column_400(self):
root_url = self.init_resp_1["result"]["root_url"]
table_name = self.init_resp_1["result"]["table_name"]
data = {"col_one": "hello", "col_two": 100, "col_three": 90, "col_four": False, "col_five": "hehe"}
response = self.client.post(f'/v3/pgrest/data/{root_url}',
data=json.dumps({"data": data}),
**auth_headers,
content_type='application/json')
self.assertEqual(response.status_code, 200)
response = self.client.get(f'/v3/pgrest/data/{root_url}', **auth_headers)
self.assertEqual(len(response.json()["result"]), 1)
self.assertEqual(response.status_code, 200)

# now, update
data = {"col_four": None}
row_id = response.json()["result"][0]['_pkid']
response = self.client.put(f'/v3/pgrest/data/{root_url}/{row_id}',
**auth_headers,
data=json.dumps({"data": data}),
content_type='application/json')
self.assertEqual(response.status_code, 400)

def test_update_row_wrong_data_type_400(self):
# first, we need to create row
root_url = self.init_resp_1["result"]["root_url"]
Expand All @@ -356,6 +402,7 @@ def test_update_row_wrong_data_type_400(self):
response = self.client.get(f'/v3/pgrest/data/{root_url}', **auth_headers)
self.assertEqual(len(response.json()["result"]), 1)
self.assertEqual(response.status_code, 200)

# now, update
data = {"col_two": "haha"}
row_id = response.json()["result"][0]['_pkid']
Expand Down Expand Up @@ -388,6 +435,7 @@ def test_update_row_nonexistent_column_400(self):
response = self.client.get(f'/v3/pgrest/data/{root_url}', **auth_headers)
self.assertEqual(len(response.json()["result"]), 1)
self.assertEqual(response.status_code, 200)

# now, update
data = {"col_where": 30}
row_id = response.json()["result"][0]['_pkid']
Expand Down Expand Up @@ -1405,17 +1453,17 @@ def test_all_search_parameters(self):
# Nonexistence checks
def test_get_nonexistent_view(self):
response = self.client.get(f'/v3/pgrest/views/this_view_does_not_exist', **auth_headers)
print(response)
#print(response)
self.assertEqual(response.status_code, 404)

def test_get_nonexistent_manage_view(self):
response = self.client.get(f'/v3/pgrest/manage/views/22', **auth_headers)
print(response)
#print(response)
self.assertEqual(response.status_code, 404)

def test_delete_nonexistent_view(self):
response = self.client.delete(f'/v3/pgrest/manage/views/22', **auth_headers)
print(response)
#print(response)
self.assertEqual(response.status_code, 404)

def test_raw_sql_view_creation(self):
Expand Down
42 changes: 12 additions & 30 deletions pgrest/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,31 +73,9 @@ def create_validate_schema(columns, tenant, existing_enum_names):
schema_update = dict()
schema_create = dict()

for key, key_info in columns.items():
key_type = key_info["data_type"].lower()
info_dict = dict()
if key_type in ["varchar", "char"]:
try:
val_length = int(key_info["char_len"])
info_dict.update({"type": "string", "maxlength": val_length})
except KeyError:
raise KeyError(f"Unable to create table. {key_type} data types requires char_len field.")
elif key_type == "text":
info_dict.update({"type": "string"})
elif key_type == "serial":
info_dict.update({"type": "integer"})
elif key_type == "date":
info_dict.update({"type": "string"})
elif key_type == "timestamp":
info_dict.update({"type": "string"})
elif '[]' in key_type:
info_dict.update({"type": "list"})
elif key_type in existing_enum_names or f'{tenant}.{key_type}' in existing_enum_names:
info_dict.update({"type": "string"})
else:
info_dict.update({"type": key_type})
schema_update[key] = info_dict

# We once had different update and create schemas. I couldn't figure out a valid reason for this.
# Only difference is schema_update should never have "required" field. As users should always be able
# to update a row's value.
for key, key_info in columns.items():
key_type = key_info["data_type"]
info_dict = dict()
Expand All @@ -120,13 +98,17 @@ def create_validate_schema(columns, tenant, existing_enum_names):
info_dict.update({"type": key_type})

if "null" in key_info.keys():
if not key_info["null"]:
info_dict.update({"required": True,
"nullable": True})
else:
if key_info["null"]: #true
info_dict.update({"required": False,
"nullable": True})
schema_create[key] = info_dict
else: #false
info_dict.update({"required": True,
"nullable": False})
schema_create[key] = info_dict.copy()

# Update schema should never have required fields. Users should always be able to update a row's value.
info_dict.update({"required": False})
schema_update[key] = info_dict.copy()

return schema_create, schema_update

Expand Down

0 comments on commit 4df11f3

Please sign in to comment.