Skip to content

Commit

Permalink
squash - Add tests for Delete Mode
Browse files Browse the repository at this point in the history
  • Loading branch information
alanking committed Oct 1, 2024
1 parent 5d7287d commit 7741dc5
Showing 1 changed file with 17 additions and 12 deletions.
29 changes: 17 additions & 12 deletions irods_capability_automated_ingest/test/test_delete_modes.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,17 @@
from irods_capability_automated_ingest.utils import DeleteMode, Operation
import irods_capability_automated_ingest.examples

# TODO(#286): Derive from the environment?
# This must be set as an environment variable in order for the Celery workers to communicate with the broker.
# Update this value if the hostname, port, or database for the Redis service needs to change.
os.environ["CELERY_BROKER_URL"] = "redis://redis:6379/0"

DEFAULT_JOB_NAME = "test_irods_sync"

# These are useful to have as a global because delete modes only have an effect for sync operations.
non_sync_operations = [Operation.NO_OP, Operation.PUT]


def get_kwargs():
def get_test_irods_client_environment_dict():
# TODO(#286): Derive from the environment?
return {
"host": os.environ.get("IRODS_HOST"),
"port": os.environ.get("IRODS_PORT"),
Expand All @@ -38,9 +40,12 @@ def get_kwargs():
}


# This is a global in order to take advantage of "caching" the Redis configuration.
# Modify get_redis_config if changes are needed.
redis_config = {}


# TODO(#286): Derive from the environment?
def get_redis_config(host="redis", port=6379, db=0):
global redis_config
if redis_config:
Expand Down Expand Up @@ -70,7 +75,7 @@ def start_workers(n=2, args=[]):
return workers


def wait_for_job_to_finish(workers, job_name=DEFAULT_JOB_NAME, timeout=60):
def wait_for_job_to_finish(workers, job_name, timeout=60):
r = get_redis(get_redis_config())
t0 = time.time()
while timeout is None or time.time() - t0 < timeout:
Expand All @@ -94,7 +99,7 @@ def wait_for_job_to_finish(workers, job_name=DEFAULT_JOB_NAME, timeout=60):

def irmtrash():
# TODO(irods/python-irodsclient#182): Needs irmtrash endpoint
with iRODSSession(**get_kwargs()) as session:
with iRODSSession(**get_test_irods_client_environment_dict()) as session:
rods_trash_path = "/".join(
["", session.zone, "trash", "home", session.username]
)
Expand All @@ -104,7 +109,7 @@ def irmtrash():


def delete_collection_if_exists(coll, recurse=True, force=False):
with iRODSSession(**get_kwargs()) as session:
with iRODSSession(**get_test_irods_client_environment_dict()) as session:
if session.collections.exists(coll):
session.collections.remove(coll, recurse=recurse, force=force)

Expand All @@ -126,8 +131,8 @@ def setUpClass(cls):
]
)

cls.irods_session = iRODSSession(**get_kwargs())
cls.job_name = DEFAULT_JOB_NAME
cls.irods_session = iRODSSession(**get_test_irods_client_environment_dict())
cls.job_name = "test_delete_modes_job"

@classmethod
def tearDownClass(cls):
Expand All @@ -153,7 +158,7 @@ def create_directory(directory_dict, parent=None):
f.write(f"contents for {file}")

def setUp(self):
# TODO(#???): Derive /data mountpoint rather than hard-coding
# TODO(#286): Derive /data mountpoint rather than hard-coding
self.source_directory = tempfile.mkdtemp(dir="/data/ufs")
self.directory_tree = {
"name": self.source_directory,
Expand Down Expand Up @@ -338,7 +343,7 @@ def run_sync(

def assert_ingested_contents_exist_in_irods(self):
try:
# TODO(#???): There should be more assertions about the contents of the injested collection.
# TODO(#287): There should be more assertions about the contents of the injested collection.
self.assertTrue(
self.irods_session.collections.exists(
self.target_subcollection_for_removal
Expand Down Expand Up @@ -425,7 +430,7 @@ def do_UNREGISTER_deletes_collections(self, operation):
self.source_directory, self.destination_collection, event_handler_path
)
self.assert_ingested_contents_exist_in_irods()
# TODO(#???): Run a query to get the physical path of the data object in the target subdirectory for removal.
# TODO(#287): Run a query to get the physical path of the data object in the target subdirectory for removal.
# Now delete a subdirectory from the source...
shutil.rmtree(self.target_subdirectory_for_removal)
# Run the job again (sync) and confirm that the deleted directory sync caused the collection to be deleted
Expand All @@ -436,7 +441,7 @@ def do_UNREGISTER_deletes_collections(self, operation):
self.assert_deleted_directory_resulted_in_deleted_collection(
collection_exists=False, collection_in_trash=False
)
# TODO(#???): Also confirm that the data remains in storage. The physical path is fetched beforehand.
# TODO(#287): Also confirm that the data remains in storage. The physical path is fetched beforehand.

def do_TRASH_or_NO_TRASH_deletes_collections(self, operation, delete_mode):
event_handler_contents = (
Expand Down

0 comments on commit 7741dc5

Please sign in to comment.