Skip to content

Commit

Permalink
Most tests pass now on both Datasette versions
Browse files Browse the repository at this point in the history
  • Loading branch information
simonw committed Feb 8, 2024
1 parent aeaa8c3 commit c32a5ad
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 11 deletions.
36 changes: 28 additions & 8 deletions datasette_graphql/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,23 @@ async def schema_for_database(datasette, database=None):
db = datasette.get_database(database)
hidden_tables = await db.hidden_table_names()

# Load table_configs using appropriate method
table_configs = {}
if hasattr(datasette, "table_config"):
for table_name in await db.table_names():
table_configs[table_name] = await datasette.table_config(
database, table_name
)
else:
# Get it from metadata instead
for table_name in await db.table_names():
table_configs[table_name] = datasette.table_metadata(
table=table_name, database=database
)

# Perform all introspection in a single call to the execute_fn thread
table_metadata = await db.execute_fn(
lambda conn: introspect_tables(conn, datasette, db.name)
lambda conn: introspect_tables(conn, table_configs)
)

# Construct the tableFilter classes
Expand Down Expand Up @@ -270,7 +284,7 @@ class _TableCollection(graphene.ObjectType):
edges = graphene.List(_Edge)

def resolve_totalCount(parent, info):
return parent["filtered_table_rows_count"]
return parent.get("filtered_table_rows_count") or parent.get("count")

def resolve_nodes(parent, info):
return parent["rows"]
Expand Down Expand Up @@ -550,6 +564,7 @@ async def resolve_table(
if after:
qs["_next"] = after
qs["_size"] = first
qs["_extra"] = "count"

if search and meta.supports_fts:
qs["_search"] = search
Expand Down Expand Up @@ -593,7 +608,14 @@ async def resolve_table(
)

data = (await datasette.client.get(path_with_query_string)).json()
data["rows"] = [dict(zip(data["columns"], row)) for row in data["rows"]]

if (
"columns" in data
and "rows" in data
and data["rows"]
and isinstance(data["rows"][0], list)
):
data["rows"] = [dict(zip(data["columns"], row)) for row in data["rows"]]
# If any cells are $base64, decode them into bytes objects
for row in data["rows"]:
for key, value in row.items():
Expand Down Expand Up @@ -659,7 +681,7 @@ def path_from_row_pks(row, pks, use_rowid, quote=True):
return ",".join(bits)


def introspect_tables(conn, datasette, db_name):
def introspect_tables(conn, table_configs):
db = sqlite_utils.Database(conn)

table_names = db.table_names()
Expand All @@ -669,13 +691,11 @@ def introspect_tables(conn, datasette, db_name):
table_namer = Namer("t")

for table in table_names + view_names:
datasette_table_metadata = datasette.table_metadata(
table=table, database=db_name
)
datasette_table_config = table_configs.get(table) or {}
columns = db[table].columns_dict
foreign_keys = []
pks = []
supports_fts = bool(datasette_table_metadata.get("fts_table"))
supports_fts = bool(datasette_table_config.get("fts_table"))
fks_back = []
if hasattr(db[table], "foreign_keys"):
# Views don't have .foreign_keys
Expand Down
4 changes: 1 addition & 3 deletions tests/test_graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,9 +536,7 @@ async def test_time_limit_ms(db_path):
assert response_json["data"] == {"repos": None}
assert len(response_json["errors"]) == 1
assert response_json["errors"][0]["message"].startswith("Time limit exceeded: ")
assert response_json["errors"][0]["message"].endswith(
" > 0.1ms - /test/repos.json?_nofacet=1&_size=10&_search=dogspotter"
)
assert " > 0.1ms " in response_json["errors"][0]["message"]


@pytest.mark.asyncio
Expand Down

0 comments on commit c32a5ad

Please sign in to comment.