From aeaa8c3c39041fa088c4823348bd29a6f94c83ea Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 7 Feb 2024 13:12:43 -0800 Subject: [PATCH 1/2] Test against datasette 1.0a8 and higher, refs #99 --- .github/workflows/test.yml | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c42f0d0..0a52f12 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,23 +7,20 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + datasette-version: ["<1.0", ">=1.0a8"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - uses: actions/cache@v2 - name: Configure pip caching - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: pip + cache-dependency-path: setup.py - name: Install dependencies run: | pip install -e '.[test]' + pip install "datasette${{ matrix.datasette-version }}" - name: Run tests run: | pytest From c32a5ad5e139c258b108ddaa8fbf82ec74da9a1f Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Wed, 7 Feb 2024 17:02:02 -0800 Subject: [PATCH 2/2] Most tests pass now on both Datasette versions --- datasette_graphql/utils.py | 36 ++++++++++++++++++++++++++++-------- tests/test_graphql.py | 4 +--- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/datasette_graphql/utils.py b/datasette_graphql/utils.py index d38087a..4ffe871 100644 --- a/datasette_graphql/utils.py +++ b/datasette_graphql/utils.py @@ -93,9 +93,23 @@ async def schema_for_database(datasette, database=None): db = datasette.get_database(database) hidden_tables = await db.hidden_table_names() + # Load table_configs using appropriate method + table_configs = {} + if hasattr(datasette, "table_config"): + for table_name in await db.table_names(): + table_configs[table_name] = await datasette.table_config( + database, table_name + ) + else: + # Get it from metadata instead + for table_name in await db.table_names(): + table_configs[table_name] = datasette.table_metadata( + table=table_name, database=database + ) + # Perform all introspection in a single call to the execute_fn thread table_metadata = await db.execute_fn( - lambda conn: introspect_tables(conn, datasette, db.name) + lambda conn: introspect_tables(conn, table_configs) ) # Construct the tableFilter classes @@ -270,7 +284,7 @@ class _TableCollection(graphene.ObjectType): edges = graphene.List(_Edge) def resolve_totalCount(parent, info): - return parent["filtered_table_rows_count"] + return parent.get("filtered_table_rows_count") or parent.get("count") def resolve_nodes(parent, info): return parent["rows"] @@ -550,6 +564,7 @@ async def resolve_table( if after: qs["_next"] = after qs["_size"] = first + qs["_extra"] = "count" if search and meta.supports_fts: qs["_search"] = search @@ -593,7 +608,14 @@ async def resolve_table( ) data = (await datasette.client.get(path_with_query_string)).json() - data["rows"] = [dict(zip(data["columns"], row)) for row in data["rows"]] + + if ( + "columns" in data + and "rows" in data + and data["rows"] + and isinstance(data["rows"][0], list) + ): + data["rows"] = [dict(zip(data["columns"], row)) for row in data["rows"]] # If any cells are $base64, decode them into bytes objects for row in data["rows"]: for key, value in row.items(): @@ -659,7 +681,7 @@ def path_from_row_pks(row, pks, use_rowid, quote=True): return ",".join(bits) -def introspect_tables(conn, datasette, db_name): +def introspect_tables(conn, table_configs): db = sqlite_utils.Database(conn) table_names = db.table_names() @@ -669,13 +691,11 @@ def introspect_tables(conn, datasette, db_name): table_namer = Namer("t") for table in table_names + view_names: - datasette_table_metadata = datasette.table_metadata( - table=table, database=db_name - ) + datasette_table_config = table_configs.get(table) or {} columns = db[table].columns_dict foreign_keys = [] pks = [] - supports_fts = bool(datasette_table_metadata.get("fts_table")) + supports_fts = bool(datasette_table_config.get("fts_table")) fks_back = [] if hasattr(db[table], "foreign_keys"): # Views don't have .foreign_keys diff --git a/tests/test_graphql.py b/tests/test_graphql.py index 5402ba8..96918e1 100644 --- a/tests/test_graphql.py +++ b/tests/test_graphql.py @@ -536,9 +536,7 @@ async def test_time_limit_ms(db_path): assert response_json["data"] == {"repos": None} assert len(response_json["errors"]) == 1 assert response_json["errors"][0]["message"].startswith("Time limit exceeded: ") - assert response_json["errors"][0]["message"].endswith( - " > 0.1ms - /test/repos.json?_nofacet=1&_size=10&_search=dogspotter" - ) + assert " > 0.1ms " in response_json["errors"][0]["message"] @pytest.mark.asyncio