diff --git a/delete.ipynb b/delete.ipynb new file mode 100644 index 000000000..374c73a0f --- /dev/null +++ b/delete.ipynb @@ -0,0 +1,135 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# AiiDAlab QuantumESPRESSO App\n", + "\n", + "Caution! Deleting this job will also remove all associated nodes, including every calculation initiated by this job and their respective results. This action is irreversible.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import urllib.parse as urlparse\n", + "\n", + "import ipywidgets as widgets\n", + "from aiida import load_profile\n", + "from aiida.orm import load_node\n", + "from aiida.tools import delete_nodes\n", + "from IPython.display import Markdown, display\n", + "\n", + "# Load AiiDA profile\n", + "load_profile()\n", + "\n", + "# Parse the primary key from the Jupyter notebook URL\n", + "url = urlparse.urlsplit(jupyter_notebook_url) # noqa F821\n", + "query = urlparse.parse_qs(url.query)\n", + "pk = int(query['pk'][0])\n", + "\n", + "\n", + "def display_node_details(pk):\n", + " try:\n", + " node = load_node(pk)\n", + " print(f\"Node ID: {node.pk}\")\n", + " print(f\"Node Type: {node.process_label}\")\n", + " print(f\"Label: {node.label}\")\n", + " print(f\"Description: {node.description}\")\n", + " print(f\"Creation Time: {node.ctime}\")\n", + " except Exception as e:\n", + " print(f\"Error loading node: {str(e)}\")\n", + " return False\n", + " return True\n", + "\n", + "\n", + "def delete_node(pk, dry_run=True):\n", + " if dry_run:\n", + " _, was_deleted = delete_nodes([pk], dry_run=True)\n", + " if was_deleted:\n", + " print(f'Dry run: Node {pk} can be deleted.')\n", + " return\n", + " \n", + " _, was_deleted = delete_nodes([pk], dry_run=False)\n", + " if was_deleted:\n", + " print(f'Node {pk} deleted successfully.')\n", + "\n", + "\n", + "def confirm_deletion(b):\n", + " if delete_confirmation.value.lower() in ['y', 'yes']:\n", + " delete_node(pk, dry_run=False)\n", + " else:\n", + " print('Deletion aborted.')\n", + "\n", + "\n", + "def find_linked_qeapp_jobs(root_node_pk, process_label='QeAppWorkChain'):\n", + " \"\"\"Query all linked node with process_label = QeAppWorkChain.\"\"\"\n", + " from aiida.orm import Node, QueryBuilder\n", + " from aiida.orm.nodes.process.workflow.workchain import WorkChainNode\n", + " qb = QueryBuilder()\n", + " qb.append(WorkChainNode, filters={'id': root_node_pk}, tag='root')\n", + " qb.append(Node, with_incoming='root', tag='calcjob')\n", + " # There are seems a bug with `with_ancestors` in the QueryBuilder, so we have to use `with_incoming` instead.\n", + " # For the moment, it's safe to use `with_incoming` since we check it very time we delete a QEApp \n", + " qb.append(WorkChainNode, filters={'attributes.process_label': 'QeAppWorkChain'}, with_incoming='calcjob')\n", + " results = qb.all()\n", + " if len(results) == 0:\n", + " return None\n", + " return results\n", + "\n", + "\n", + "if display_node_details(pk):\n", + " linked_qeapp_jobs = find_linked_qeapp_jobs(pk)\n", + " if linked_qeapp_jobs:\n", + " warning_html = f\"\"\"\n", + "
Copyright (c) 2023 AiiDAlab team (EPFL) Version: {__version__}
'\n", ")\n", "\n", + "url = urlparse.urlsplit(jupyter_notebook_url) # noqa F821\n", + "query = urlparse.parse_qs(url.query)\n", + "\n", + "\n", "app_with_work_chain_selector = App(qe_auto_setup=True)\n", + "# if a pk is provided in the query string, set it as the value of the work_chain_selector\n", + "if 'pk' in query:\n", + " pk = int(query['pk'][0])\n", + " app_with_work_chain_selector.work_chain_selector.value = pk\n", "\n", "output = ipw.Output()\n", "install_create_github_issue_exception_handler(\n", diff --git a/src/aiidalab_qe/app/submission/__init__.py b/src/aiidalab_qe/app/submission/__init__.py index 0dd1af82d..9f38741d2 100644 --- a/src/aiidalab_qe/app/submission/__init__.py +++ b/src/aiidalab_qe/app/submission/__init__.py @@ -401,6 +401,7 @@ def submit(self, _=None): process.base.extras.set("ui_parameters", serialize(self.ui_parameters)) # store the workchain name in extras, this will help to filter the workchain in the future process.base.extras.set("workchain", self.ui_parameters["workchain"]) + process.base.extras.set("structure", self.input_structure.get_formula()) self.process = process self._update_state() diff --git a/src/aiidalab_qe/app/utils/search_jobs.py b/src/aiidalab_qe/app/utils/search_jobs.py new file mode 100644 index 000000000..59f9d297c --- /dev/null +++ b/src/aiidalab_qe/app/utils/search_jobs.py @@ -0,0 +1,179 @@ +import ipywidgets as ipw +import pandas as pd +from aiida.orm import QueryBuilder +from aiidalab_qe.workflows import QeAppWorkChain +from IPython.display import display + + +class QueryInterface: + def __init__(self): + self.df = self.load_data() + self.table = ipw.HTML() + self.setup_widgets() + + def load_data(self): + projections = [ + "id", + "extras.structure", + "ctime", + "attributes.process_state", + "label", + "extras.workchain.relax_type", + "extras.workchain.properties", + ] + headers = [ + "PK", + "Structure", + "ctime", + "State", + "Label", + "Relax_type", + "Properties", + ] + + qb = QueryBuilder() + qb.append(QeAppWorkChain, project=projections, tag="process") + qb.order_by({"process": {"ctime": "desc"}}) + results = qb.all() + + df = pd.DataFrame(results, columns=headers) + for index, row in df.iterrows(): + df.at[index, "Creation time"] = row["ctime"].strftime("%Y-%m-%d %H:%M:%S") + df.at[index, "Delete"] = ( + f"""Delete""" + ) + df.at[index, "Inspect"] = ( + f"""Inspect""" + ) + return df[ + [ + "PK", + "Creation time", + "Structure", + "State", + "Label", + "Relax_type", + "Delete", + "Inspect", + "Properties", + "ctime", + ] + ] + + def setup_widgets(self): + self.css_style = """ + + """ + + unique_properties = set(self.df["Properties"].explode()) + unique_properties.discard(None) + property_checkboxes = [ + ipw.Checkbox( + value=False, + description=prop, + Layout=ipw.Layout(description_width="initial"), + indent=False, + ) + for prop in unique_properties + ] + self.properties_box = ipw.HBox( + children=property_checkboxes, description="Properties:" + ) + # Replace 'None' in 'Properties' with an empty list + self.df["Properties"] = self.df["Properties"].apply( + lambda x: [] if x is None else x + ) + self.job_state_dropdown = ipw.Dropdown( + options=["finished", "except", "killed"], + value="finished", + description="Job State:", + ) + self.label_search_field = ipw.Text( + value="", + placeholder="Enter label to search", + description="Search Label:", + disabled=False, + style={"description_width": "initial"}, + ) + self.time_start = ipw.DatePicker(description="Start Time:") + self.time_end = ipw.DatePicker(description="End Time:") + self.time_box = ipw.HBox([self.time_start, self.time_end]) + # self.apply_filters_btn = ipw.Button(description='Apply Filters') + # self.apply_filters_btn.on_click(self.apply_filters) + for cb in property_checkboxes: + cb.observe(self.apply_filters, names="value") + self.time_start.observe(self.apply_filters, names="value") + self.time_end.observe(self.apply_filters, names="value") + self.job_state_dropdown.observe(self.apply_filters, names="value") + self.label_search_field.observe(self.apply_filters, names="value") + + self.filters_layout = ipw.VBox( + [ + ipw.HTML("Preferences | +Utils |
---|---|