diff --git a/broker_rest_api.md b/broker_rest_api.md
index dc4666b..3724807 100644
--- a/broker_rest_api.md
+++ b/broker_rest_api.md
@@ -4,11 +4,11 @@
1. [General remarks](#general_remarks)
2. [/retrieve_from_buffers](#retrieve_from_buffers)
3. [/take_pedestal](#take_pedestal)
-4. [/get_allowed_detectors_list](#get_allowed_detectors_list)
-5. [/get_running_detectors_list](#get_running_detectors_list)
+4. [/get_allowed_detectors](#get_allowed_detectors)
+5. [/get_running_detectors](#get_running_detectors)
6. [/power_on_detector](#power_on_detector)
-7. [/get_next_run_number](#get_next_run_number)
-8. [/get_last_run_number](#get_last_run_number)
+7. [/advance_run_number](#advance_run_number)
+8. [/get_current_run_number](#get_current_run_number)
9. [/get_pvlist](#get_pvlist)
10. [/set_pvlist](#set_pvlist)
11. [/close_pgroup_writing](#close_pgroup_writing)
@@ -41,7 +41,7 @@ When making requests to the broker, it's crucial to handle responses appropriate
import json
# Example API call to retrieve data
-api_call = "/get_allowed_detectors_list"
+api_call = "/get_allowed_detectors"
r = requests.get(f"{broker_address}{api_call}")
# Check if the request was successful (Status Code 200)
@@ -240,7 +240,7 @@ else:
Ensure detectors specified for the pedestal run belong to the respective beamline and are appropriately configured.
-
+
## Get Allowed Detectors List
Retrieve the list of detectors configured for the beamline along with their specific details.
@@ -252,7 +252,7 @@ import requests
import json
# Make the API call to get allowed detectors list
-allowed_detectors_url = f"{broker_address}/get_allowed_detectors_list"
+allowed_detectors_url = f"{broker_address}/get_allowed_detectors"
r = requests.get(allowed_detectors_url)
# Check for a successful response and handle accordingly
@@ -288,7 +288,7 @@ else:
* Successful responses contain a list of detectors configured for the beamline, their human-readable names, and visualisation addresses.
* Failed responses might occur if there's an issue with the request or if detectors aren't properly configured.
-
+
## Get Running Detectors List
Retrieve the list of detectors currently recording data to the DetectorBuffer.
@@ -299,7 +299,7 @@ import requests
import json
# Make the API call to get the list of running detectors
-running_detectors_url = f"{broker_address}/get_running_detectors_list"
+running_detectors_url = f"{broker_address}/get_running_detectors"
r = requests.get(running_detectors_url)
# Check for a successful response and handle accordingly
@@ -371,7 +371,7 @@ else:
Ensure the detector specified for powering on is properly configured and connected to necessary hardware elements such as cooling, power, network, and trigger systems before making this call.
-
+
## Get Next Acquisition Run Number
Generate a run number for the next data acquisition process to ensure sequential data organization.
@@ -386,7 +386,7 @@ import json
pgroup = "p17534"
# Make the API call to get the next run number
-next_run_number_url = f"{broker_address}/get_next_run_number"
+next_run_number_url = f"{broker_address}/advance_run_number"
next_run_number_request = {'pgroup': pgroup}
r = requests.get(next_run_number_url, json=next_run_number_request)
@@ -422,7 +422,7 @@ else:
Ensure the call for the next run number is made in a sequential manner to maintain data organization and prevent loss of acquired data due to non-usage of generated run numbers.
-
+
## Get Last Acquisition Run Number
Retrieve the most recently generated run number for data acquisition in a pgroup.
@@ -437,7 +437,7 @@ import json
pgroup = "p17534"
# Make the API call to get the last run number
-last_run_number_url = f"{broker_address}/get_last_run_number"
+last_run_number_url = f"{broker_address}/get_current_run_number"
last_run_number_request = {'pgroup': pgroup}
r = requests.get(last_run_number_url, json=last_run_number_request)
diff --git a/sf_daq_broker/broker.py b/sf_daq_broker/broker.py
index 43cf712..30c29b9 100644
--- a/sf_daq_broker/broker.py
+++ b/sf_daq_broker/broker.py
@@ -14,6 +14,7 @@
ENDPOINTS_POST = [
+ "advance_run_number",
"retrieve_from_buffers",
"take_pedestal",
"power_on_detector",
@@ -22,10 +23,9 @@
]
ENDPOINTS_GET = [
- "get_allowed_detectors_list",
- "get_running_detectors_list",
- "get_next_run_number",
- "get_last_run_number",
+ "get_allowed_detectors",
+ "get_running_detectors",
+ "get_current_run_number",
"get_pvlist"
]
diff --git a/sf_daq_broker/broker_manager.py b/sf_daq_broker/broker_manager.py
index 5a5a7cd..3154f82 100644
--- a/sf_daq_broker/broker_manager.py
+++ b/sf_daq_broker/broker_manager.py
@@ -63,7 +63,7 @@ def get_pvlist(self, request, remote_ip):
res = {
"status": "ok",
- "message": "successfully retrieved list of PVs",
+ "message": f"successfully retrieved list of PVs for {beamline}",
"pv_list": pv_list
}
return res
@@ -92,14 +92,23 @@ def set_pvlist(self, request, remote_ip):
config_file_timestamped = f"{config_file}.{date_now_str}"
copyfile(config_file, config_file_timestamped)
- return pv_list
+ res = {
+ "status": "ok",
+ "message": f"successfully changed list of PVs for {beamline}",
+ "pv_list": pv_list
+ }
+ return res
- def get_last_run_number(self, request, remote_ip, increment_run_number=False):
- return self.get_next_run_number(request=request, remote_ip=remote_ip, increment_run_number=increment_run_number)
+ def get_current_run_number(self, request, remote_ip):
+ return self._get_run_number(request, remote_ip, False)
- def get_next_run_number(self, request, remote_ip, increment_run_number=True):
+ def advance_run_number(self, request, remote_ip):
+ return self._get_run_number(request, remote_ip, True)
+
+
+ def _get_run_number(self, request, remote_ip, increment_run_number):
validate.request_has(request, "pgroup")
beamline = get_beamline(remote_ip)
@@ -114,8 +123,15 @@ def get_next_run_number(self, request, remote_ip, increment_run_number=True):
validate.pgroup_is_not_closed(daq_directory, path_to_pgroup)
- next_run = get_current_run_number(daq_directory, increment_run_number=increment_run_number)
- return next_run
+ run_number = get_run_number(daq_directory, increment_run_number=increment_run_number)
+ action = "advanced" if increment_run_number else "retrieved"
+
+ res = {
+ "status": "ok",
+ "message": f"successfully {action} run number for {beamline} {pgroup}",
+ "run_number": run_number
+ }
+ return res
def power_on_detector(self, request, remote_ip):
@@ -142,7 +158,7 @@ def power_on_detector(self, request, remote_ip):
return "request to power on detector sent, wait a few minutes"
- def get_running_detectors_list(self, request, remote_ip):
+ def get_running_detectors(self, request, remote_ip):
validate.request_is_empty(request)
beamline = get_beamline(remote_ip)
@@ -160,13 +176,13 @@ def get_running_detectors_list(self, request, remote_ip):
res = {
"status": "ok",
- "message": "successfully retrieved list of running detectors",
+ "message": f"successfully retrieved list of running detectors for {beamline}",
"detectors": running_detectors
}
return res
- def get_allowed_detectors_list(self, request, remote_ip):
+ def get_allowed_detectors(self, request, remote_ip):
validate.request_is_empty(request)
beamline = get_beamline(remote_ip)
@@ -259,15 +275,7 @@ def take_pedestal(self, request, remote_ip):
time_to_wait = PEDESTAL_FRAMES / 100 * rate_multiplicator + 10
- res = {
- "status": "ok",
- "message": f"request to take pedestal sent, wait at least {time_to_wait} seconds",
- #TODO: are these needed?
- "run_number": str(0),
- "acquisition_number": str(0),
- "unique_acquisition_number": str(0)
- }
- return res
+ return f"request to take pedestal sent, wait at least {time_to_wait} seconds"
def retrieve_from_buffers(self, request, remote_ip):
@@ -305,9 +313,9 @@ def retrieve_from_buffers(self, request, remote_ip):
validate.directory_exists(daq_directory)
if "run_number" not in request:
- request["run_number"] = get_current_run_number(daq_directory)
+ request["run_number"] = get_run_number(daq_directory)
else:
- current_known_run_number = get_current_run_number(daq_directory, increment_run_number=False)
+ current_known_run_number = get_run_number(daq_directory, increment_run_number=False)
run_number = request.get("run_number")
validate.allowed_run_number(run_number, current_known_run_number)
@@ -330,11 +338,7 @@ def retrieve_from_buffers(self, request, remote_ip):
write_data = "channels_list" in request or "camera_list" in request or "pv_list" in request or "detectors" in request
if not write_data:
- res = {
- "status": "pass",
- "message": "request did not contain any channels to be written to file"
- }
- return res
+ raise RuntimeError("request did not contain any channels to be written to file")
request_detectors = request.get("detectors", {})
validate.request_detectors_is_dict(request_detectors)
@@ -362,7 +366,7 @@ def retrieve_from_buffers(self, request, remote_ip):
validate.directory_exists(output_data_directory)
current_acq = get_current_step_in_scan(meta_directory)
- unique_acq = get_current_run_number(daq_directory, file_run="LAST_ARUN")
+ unique_acq = get_run_number(daq_directory, file_run="LAST_ARUN")
request["beamline"] = beamline
request["acquisition_number"] = current_acq
@@ -518,7 +522,7 @@ def send_write_request(tag, channels, filename_suffix):
res = {
"status": "ok",
- "message": "OK",
+ "message": "request(s) to retrieve data sent",
"run_number": str(run_number),
"acquisition_number": str(current_acq),
"unique_acquisition_number": str(unique_acq),
@@ -546,7 +550,7 @@ def clean_last_character_user_tag(user_tag, replacement_character="_"):
return user_tag
-def get_current_run_number(daq_directory=None, file_run="LAST_RUN", increment_run_number=True):
+def get_run_number(daq_directory=None, file_run="LAST_RUN", increment_run_number=True):
if daq_directory is None:
return None
diff --git a/sf_daq_broker/broker_manager_slow.py b/sf_daq_broker/broker_manager_slow.py
index e0248df..8262961 100644
--- a/sf_daq_broker/broker_manager_slow.py
+++ b/sf_daq_broker/broker_manager_slow.py
@@ -80,6 +80,7 @@ def get_detector_settings(self, request, remote_ip):
res = {
"status": "ok",
+ "message": f"successfully retrieved detector settings from {detector_name}",
"exptime": exptime,
"detector_mode": detector_mode,
"delay": delay,
@@ -148,7 +149,7 @@ def set_detector_settings(self, request, remote_ip):
event_code_pv.put(254)
event_code_pv.disconnect()
- return "detector settings changed successfully"
+ return f"successfully changed detector settings of {detector_name}",
def copy_user_files(self, request, remote_ip):
@@ -215,7 +216,13 @@ def get_dap_settings(self, request, remote_ip):
validate.dap_parameters_file_exists(dap_parameters_file)
dap_config = json_load(dap_parameters_file)
- return dap_config
+
+ res = {
+ "status": "ok",
+ "message": f"successfully retrieved DAP settings for {detector_name}",
+ "dap_settings": dap_config
+ }
+ return res
def set_dap_settings(self, request, remote_ip):
@@ -260,7 +267,12 @@ def set_dap_settings(self, request, remote_ip):
shutil.copyfile(f"{backup_directory}/pipeline_parameters.{detector_name}.json.{date_now_str}", dap_parameters_file)
raise RuntimeError(f"could not update DAP configuration {dueto(e)}") from e
- return changed_parameters
+ res = {
+ "status": "ok",
+ "message": f"successfully changed DAP settings for {detector_name}",
+ "changed_parameters": changed_parameters
+ }
+ return res
diff --git a/sf_daq_broker/broker_slow.py b/sf_daq_broker/broker_slow.py
index 7484a07..b7f6433 100644
--- a/sf_daq_broker/broker_slow.py
+++ b/sf_daq_broker/broker_slow.py
@@ -13,14 +13,14 @@
ENDPOINTS_POST = [
- "get_detector_settings",
"set_detector_settings",
"copy_user_files",
- "get_dap_settings",
"set_dap_settings"
]
ENDPOINTS_GET = [
+ "get_dap_settings",
+ "get_detector_settings",
"get_detector_temperatures"
]
diff --git a/sf_daq_broker/rest_api/error_handler.py b/sf_daq_broker/rest_api/error_handler.py
index 59febb7..cc3af96 100644
--- a/sf_daq_broker/rest_api/error_handler.py
+++ b/sf_daq_broker/rest_api/error_handler.py
@@ -16,13 +16,14 @@ def error_handler_500(error):
bottle.response.content_type = "application/json"
bottle.response.status = 200
- error_text = str(error.exception)
+ exc = error.exception
- _logger.error(error_text)
+ _logger.exception("Internal Server Error (500)", exc_info=exc)
return json_obj_to_str({
- "state": "error",
- "status": error_text
+ "status": "error",
+ "message": str(exc),
+ "exception": type(exc).__name__
})
diff --git a/sf_daq_broker/rest_api/return_status.py b/sf_daq_broker/rest_api/return_status.py
index 1727a8e..1b6b96a 100644
--- a/sf_daq_broker/rest_api/return_status.py
+++ b/sf_daq_broker/rest_api/return_status.py
@@ -8,7 +8,7 @@ def wrapper(*args, **kwargs):
res = func(*args, **kwargs)
except Exception as e:
return {
- "status": "failed",
+ "status": "error",
"message": str(e),
"exception": type(e).__name__
}
@@ -58,9 +58,9 @@ def test_dict_status_extra():
return {"status": "stat", "message": "already a dict", "other": "whatever"}
- assert test_str_works(123) == {"status": "ok", "message": "test works 123"}
- assert test_str_fails(123) == {"status": "failed", "message": "test fails 123", "exception": "ValueError"}
- assert test_str_works() == {"status": "failed", "message": "test_str_works() missing 1 required positional argument: 'x'", "exception": "TypeError"} # pylint: disable=no-value-for-parameter
+ assert test_str_works(123) == {"status": "ok", "message": "test works 123"}
+ assert test_str_fails(123) == {"status": "error", "message": "test fails 123", "exception": "ValueError"}
+ assert test_str_works() == {"status": "error", "message": "test_str_works() missing 1 required positional argument: 'x'", "exception": "TypeError"} # pylint: disable=no-value-for-parameter
assert test_dict_nostatus() == {"status": "ok", "message": "already a dict"}
assert test_dict_status() == {"status": "stat", "message": "already a dict"}
diff --git a/tests/test_broker.py b/tests/test_broker.py
index 7461ff1..e65e6f1 100644
--- a/tests/test_broker.py
+++ b/tests/test_broker.py
@@ -9,10 +9,9 @@
ENDPOINTS_GET = [
- "get_allowed_detectors_list",
- "get_running_detectors_list",
- "get_next_run_number",
- "get_last_run_number",
+ "get_allowed_detectors",
+ "get_running_detectors",
+ "get_current_run_number",
"get_pvlist"
]
@@ -70,7 +69,7 @@ def test_response_status_get_endpoints(self):
for ep in ENDPOINTS_GET:
response = requests.get(f"{self.address}/{ep}")
status = response.json()["status"]
- self.assertIn(status, ["ok", "failed"])
+ self.assertIn(status, ["ok", "error"])