diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b50d6f56..23d9b2ea 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,6 +29,7 @@ jobs: strategy: matrix: python-version: ["3.11", "3.12"] + timeout-minutes: 30 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/min-deps.yml b/.github/workflows/min-deps.yml index c236a9ef..c1e13c78 100644 --- a/.github/workflows/min-deps.yml +++ b/.github/workflows/min-deps.yml @@ -29,6 +29,7 @@ jobs: strategy: matrix: python-version: ["3.12"] + timeout-minutes: 30 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/upstream.yml b/.github/workflows/upstream.yml index 74867ea5..e07e5163 100644 --- a/.github/workflows/upstream.yml +++ b/.github/workflows/upstream.yml @@ -29,6 +29,7 @@ jobs: strategy: matrix: python-version: ["3.12"] + timeout-minutes: 30 steps: - uses: actions/checkout@v4 diff --git a/conftest.py b/conftest.py index 0781c37e..2e8fc73a 100644 --- a/conftest.py +++ b/conftest.py @@ -103,7 +103,11 @@ def netcdf4_virtual_dataset(netcdf4_file): def netcdf4_inlined_ref(netcdf4_file): from kerchunk.hdf import SingleHdf5ToZarr - return SingleHdf5ToZarr(netcdf4_file, inline_threshold=1000).translate() + return SingleHdf5ToZarr( + netcdf4_file, + inline_threshold=1000, + storage_options={"use_listings_cache": False}, + ).translate() @pytest.fixture diff --git a/virtualizarr/tests/test_readers/conftest.py b/virtualizarr/tests/test_readers/conftest.py index 4884db4a..9f3e20af 100644 --- a/virtualizarr/tests/test_readers/conftest.py +++ b/virtualizarr/tests/test_readers/conftest.py @@ -372,18 +372,22 @@ def scalar_fill_value_hdf5_file(tmpdir): compound_fill = (-9999, -9999.0) fill_values = [ - {"fill_value": -9999, "data": np.random.randint(0, 10, size=(5))}, - {"fill_value": -9999.0, "data": np.random.random(5)}, - {"fill_value": np.nan, "data": np.random.random(5)}, - {"fill_value": False, "data": np.array([True, False, False, True, True])}, - {"fill_value": "NaN", "data": np.array(["three"], dtype="S10")}, - {"fill_value": compound_fill, "data": compound_data}, + {"label": "int", "fill_value": -9999, "data": np.random.randint(0, 10, size=(5))}, + {"label": "float", "fill_value": -9999.0, "data": np.random.random(5)}, + {"label": "npNan", "fill_value": np.nan, "data": np.random.random(5)}, + { + "label": "False", + "fill_value": False, + "data": np.array([True, False, False, True, True]), + }, + {"label": "NaN", "fill_value": "NaN", "data": np.array(["three"], dtype="S10")}, + {"label": "compound", "fill_value": compound_fill, "data": compound_data}, ] @pytest.fixture(params=fill_values) def cf_fill_value_hdf5_file(tmpdir, request): - filepath = f"{tmpdir}/cf_fill_value.nc" + filepath = tmpdir / f"cf_fill_value_{request.param['label']}.nc" f = h5py.File(filepath, "w") dset = f.create_dataset(name="data", data=request.param["data"], chunks=True) dim_scale = f.create_dataset(