Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test against datasette>=1.0a8 #101

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 7 additions & 10 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,20 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
datasette-version: ["<1.0", ">=1.0a8"]
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- uses: actions/cache@v2
name: Configure pip caching
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
cache: pip
cache-dependency-path: setup.py
- name: Install dependencies
run: |
pip install -e '.[test]'
pip install "datasette${{ matrix.datasette-version }}"
- name: Run tests
run: |
pytest
36 changes: 28 additions & 8 deletions datasette_graphql/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,23 @@ async def schema_for_database(datasette, database=None):
db = datasette.get_database(database)
hidden_tables = await db.hidden_table_names()

# Load table_configs using appropriate method
table_configs = {}
if hasattr(datasette, "table_config"):
for table_name in await db.table_names():
table_configs[table_name] = await datasette.table_config(
database, table_name
)
else:
# Get it from metadata instead
for table_name in await db.table_names():
table_configs[table_name] = datasette.table_metadata(
table=table_name, database=database
)

# Perform all introspection in a single call to the execute_fn thread
table_metadata = await db.execute_fn(
lambda conn: introspect_tables(conn, datasette, db.name)
lambda conn: introspect_tables(conn, table_configs)
)

# Construct the tableFilter classes
Expand Down Expand Up @@ -270,7 +284,7 @@ class _TableCollection(graphene.ObjectType):
edges = graphene.List(_Edge)

def resolve_totalCount(parent, info):
return parent["filtered_table_rows_count"]
return parent.get("filtered_table_rows_count") or parent.get("count")

def resolve_nodes(parent, info):
return parent["rows"]
Expand Down Expand Up @@ -550,6 +564,7 @@ async def resolve_table(
if after:
qs["_next"] = after
qs["_size"] = first
qs["_extra"] = "count"

if search and meta.supports_fts:
qs["_search"] = search
Expand Down Expand Up @@ -593,7 +608,14 @@ async def resolve_table(
)

data = (await datasette.client.get(path_with_query_string)).json()
data["rows"] = [dict(zip(data["columns"], row)) for row in data["rows"]]

if (
"columns" in data
and "rows" in data
and data["rows"]
and isinstance(data["rows"][0], list)
):
data["rows"] = [dict(zip(data["columns"], row)) for row in data["rows"]]
# If any cells are $base64, decode them into bytes objects
for row in data["rows"]:
for key, value in row.items():
Expand Down Expand Up @@ -659,7 +681,7 @@ def path_from_row_pks(row, pks, use_rowid, quote=True):
return ",".join(bits)


def introspect_tables(conn, datasette, db_name):
def introspect_tables(conn, table_configs):
db = sqlite_utils.Database(conn)

table_names = db.table_names()
Expand All @@ -669,13 +691,11 @@ def introspect_tables(conn, datasette, db_name):
table_namer = Namer("t")

for table in table_names + view_names:
datasette_table_metadata = datasette.table_metadata(
table=table, database=db_name
)
datasette_table_config = table_configs.get(table) or {}
columns = db[table].columns_dict
foreign_keys = []
pks = []
supports_fts = bool(datasette_table_metadata.get("fts_table"))
supports_fts = bool(datasette_table_config.get("fts_table"))
fks_back = []
if hasattr(db[table], "foreign_keys"):
# Views don't have .foreign_keys
Expand Down
4 changes: 1 addition & 3 deletions tests/test_graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,9 +536,7 @@ async def test_time_limit_ms(db_path):
assert response_json["data"] == {"repos": None}
assert len(response_json["errors"]) == 1
assert response_json["errors"][0]["message"].startswith("Time limit exceeded: ")
assert response_json["errors"][0]["message"].endswith(
" > 0.1ms - /test/repos.json?_nofacet=1&_size=10&_search=dogspotter"
)
assert " > 0.1ms " in response_json["errors"][0]["message"]


@pytest.mark.asyncio
Expand Down
Loading