Skip to content

Commit

Permalink
fixed functions and tested after ruff formating
Browse files Browse the repository at this point in the history
  • Loading branch information
jrudz committed Oct 16, 2024
1 parent 4e62b29 commit 7d62c31
Show file tree
Hide file tree
Showing 7 changed files with 190 additions and 160 deletions.
8 changes: 4 additions & 4 deletions docs/how_to/use_api_functions.md
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ For example:

```python
metadata_new = {'upload_name': 'Test Upload', 'comment': 'This is a test upload...'}
edit_upload_metadata(upload_id, url='test', **metadata_new)
edit_upload_metadata(upload_id, url='test', upload_metadata=metadata_new)
```

??? success "output"
Expand Down Expand Up @@ -742,7 +742,7 @@ The returned `dataset_id` can then be used to add individual entries (or all ent

```python
metadata_new = {'dataset_id': dataset_id}
edit_upload_metadata(upload_id, url='test', **metadata_new)
edit_upload_metadata(upload_id, url='test', upload_metadata=metadata_new)
```

??? success "output"
Expand Down Expand Up @@ -828,7 +828,7 @@ Alternatively, you can search for datasets, e.g., by `user_id` or `dataset_name`

```python
my_datasets = retrieve_datasets(
user_id=nomad_user_me.user_id, url='test', max_datasets=20
dataset_params={'user_id': nomad_user_me.user_id, 'max_datasets': 20}, url='test'
)
pprint(my_datasets)
```
Expand All @@ -851,7 +851,7 @@ To get the list of entries contained within a dataset, use `query_entries()`:


```python
dataset_entries = query_entries(dataset_id=dataset_id, url='test')
dataset_entries = query_entries(query_params={'dataset_id': dataset_id}, url='test')
for entry in dataset_entries:
pprint(f'entry_id={entry.entry_id}, upload_id={entry.upload_id}')
```
Expand Down
4 changes: 3 additions & 1 deletion src/nomad_utility_workflows/utils/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,9 @@ def retrieve_datasets(
url: str = None,
) -> list[NomadDataset]:
parameters = []
max_datasets = dataset_params.pop('max_datasets')
max_datasets = dataset_params.pop(
'max_datasets', default_dataset_params['max_datasets']
)
for key, value in dataset_params.items():
parameters.append(f'{key}={value}')
url = get_nomad_url(url)
Expand Down
4 changes: 3 additions & 1 deletion src/nomad_utility_workflows/utils/entries.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,9 @@ def get_entries_of_my_uploads(
]


@ttl_cache(maxsize=128, ttl=180)
# @ttl_cache(maxsize=128, ttl=180)
# ! Had to remove caching because of the use of dict as input
# ! which was required to reduce the number of inputs for ruff
def query_entries(
query_params: QueryParams = default_query_params.copy(),
url: str = None,
Expand Down
3 changes: 2 additions & 1 deletion src/nomad_utility_workflows/utils/uploads.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,10 @@ def edit_upload_metadata(
url_name = get_nomad_url_name(url)
logger.info('editing the metadata for upload %s on %s server', upload_id, url_name)
metadata = {'metadata': {}}
if 'dataset_id' in upload_metadata.keys():
upload_metadata['datasets'] = upload_metadata.pop('dataset_id')
for key, value in upload_metadata.items():
metadata['metadata'][key] = value

response = post_nomad_request(
RequestOptions(
section=f'/uploads/{upload_id}/edit',
Expand Down
Loading

0 comments on commit 7d62c31

Please sign in to comment.