Skip to content

Commit

Permalink
Merge branch 'main' into 3269-docs-rfc-update-the-readme-in-the-times…
Browse files Browse the repository at this point in the history
…caledb-github-repo-to-match-the-pgai-docs
  • Loading branch information
billy-the-fish authored Dec 5, 2024
2 parents 4cc15b3 + f1d201e commit 775164f
Show file tree
Hide file tree
Showing 13 changed files with 1,651 additions and 1,298 deletions.
647 changes: 333 additions & 314 deletions test/expected/append-14.out

Large diffs are not rendered by default.

649 changes: 334 additions & 315 deletions test/expected/append-15.out

Large diffs are not rendered by default.

649 changes: 334 additions & 315 deletions test/expected/append-16.out

Large diffs are not rendered by default.

649 changes: 334 additions & 315 deletions test/expected/append-17.out

Large diffs are not rendered by default.

12 changes: 1 addition & 11 deletions test/runner.sh
Original file line number Diff line number Diff line change
Expand Up @@ -148,14 +148,4 @@ ${PSQL} -U ${TEST_PGUSER} \
-v TSL_MODULE_PATHNAME="'timescaledb-tsl-${EXT_VERSION}'" \
-v TEST_SUPPORT_FILE=${TEST_SUPPORT_FILE} \
-v TEST_SUPPORT_FILE_INIT=${TEST_SUPPORT_FILE_INIT} \
"$@" -d ${TEST_DBNAME} 2>&1 | \
sed -e '/<exclude_from_test>/,/<\/exclude_from_test>/d' \
-e 's! Memory: [0-9]\{1,\}kB!!' \
-e 's! Memory Usage: [0-9]\{1,\}kB!!' \
-e 's! Average Peak Memory: [0-9]\{1,\}kB!!' | \
grep -v 'DEBUG: rehashing catalog cache id' | \
grep -v 'DEBUG: compacted fsync request queue from' | \
grep -v 'DEBUG: creating and filling new WAL file' | \
grep -v 'DEBUG: done creating and filling new WAL file' | \
grep -v 'DEBUG: flushed relation because a checkpoint occurred concurrently' | \
grep -v 'NOTICE: cancelling the background worker for job'
"$@" -d ${TEST_DBNAME} 2>&1 | ${CURRENT_DIR}/runner_cleanup_output.sh
30 changes: 30 additions & 0 deletions test/runner_cleanup_output.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/usr/bin/env bash

set -u
set -e

RUNNER=${1:-""}

sed -e '/<exclude_from_test>/,/<\/exclude_from_test>/d' \
-e 's! Memory: [0-9]\{1,\}kB!!' \
-e 's! Memory Usage: [0-9]\{1,\}kB!!' \
-e 's! Average Peak Memory: [0-9]\{1,\}kB!!' | \
grep -v 'DEBUG: rehashing catalog cache id' | \
grep -v 'DEBUG: compacted fsync request queue from' | \
grep -v 'DEBUG: creating and filling new WAL file' | \
grep -v 'DEBUG: done creating and filling new WAL file' | \
grep -v 'DEBUG: flushed relation because a checkpoint occurred concurrently' | \
grep -v 'NOTICE: cancelling the background worker for job' | \
if [ "${RUNNER}" = "shared" ]; then \
sed -e '/^-\{1,\}$/d' \
-e 's!_[0-9]\{1,\}_[0-9]\{1,\}_chunk!_X_X_chunk!g' \
-e 's!^ \{1,\}QUERY PLAN \{1,\}$!QUERY PLAN!'; \
else \
cat; \
fi | \
if [ "${RUNNER}" = "isolation" ]; then \
sed -e 's!_[0-9]\{1,\}_[0-9]\{1,\}_chunk!_X_X_chunk!g' \
-e 's!hypertable_[0-9]\{1,\}!hypertable_X!g'; \
else \
cat; \
fi
6 changes: 2 additions & 4 deletions test/runner_isolation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

set -e
set -u
CURRENT_DIR=$(dirname $0)

ISOLATIONTEST=$1
shift
Expand All @@ -18,7 +19,4 @@ shift
# the chunk numbers influence the names of indexes if they are long enough to be
# truncated, so the only way to get a stable explain output is to run such a test
# in a separate database.
$ISOLATIONTEST "$@" | \
sed -e 's!_[0-9]\{1,\}_[0-9]\{1,\}_chunk!_X_X_chunk!g' \
-e 's!hypertable_[0-9]\{1,\}!hypertable_X!g'

$ISOLATIONTEST "$@" | ${CURRENT_DIR}/runner_cleanup_output.sh "isolation"
16 changes: 1 addition & 15 deletions test/runner_shared.sh
Original file line number Diff line number Diff line change
Expand Up @@ -79,18 +79,4 @@ ${PSQL} -U ${TEST_PGUSER} \
-v ROLE_DEFAULT_PERM_USER_2=${TEST_ROLE_DEFAULT_PERM_USER_2} \
-v MODULE_PATHNAME="'timescaledb-${EXT_VERSION}'" \
-v TSL_MODULE_PATHNAME="'timescaledb-tsl-${EXT_VERSION}'" \
"$@" -d ${TEST_DBNAME} 2>&1 | \
sed -e '/<exclude_from_test>/,/<\/exclude_from_test>/d' \
-e 's!_[0-9]\{1,\}_[0-9]\{1,\}_chunk!_X_X_chunk!g' \
-e 's!^ \{1,\}QUERY PLAN \{1,\}$!QUERY PLAN!' \
-e 's!: actual rows!: actual rows!' \
-e '/^-\{1,\}$/d' \
-e 's! Memory: [0-9]\{1,\}kB!!' \
-e 's! Memory Usage: [0-9]\{1,\}kB!!' \
-e 's! Average Peak Memory: [0-9]\{1,\}kB!!' | \
grep -v 'DEBUG: rehashing catalog cache id' | \
grep -v 'DEBUG: compacted fsync request queue from' | \
grep -v 'DEBUG: creating and filling new WAL file' | \
grep -v 'DEBUG: done creating and filling new WAL file' | \
grep -v 'DEBUG: flushed relation because a checkpoint occurred concurrently' | \
grep -v 'NOTICE: cancelling the background worker for job'
"$@" -d ${TEST_DBNAME} 2>&1 | ${CURRENT_DIR}/runner_cleanup_output.sh "shared"
14 changes: 8 additions & 6 deletions test/sql/include/append_load.sql
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ INSERT INTO append_test VALUES ('2017-03-22T09:18:22', 23.5, 1, '{"a": 1, "b": 2
('2017-05-22T09:18:22', 36.2, 2, '{"c": 3, "b": 2}'),
('2017-05-22T09:18:23', 15.2, 2, '{"c": 3}'),
('2017-08-22T09:18:22', 34.1, 3, '{"c": 4}');
VACUUM (ANALYZE) append_test;

-- Create another hypertable to join with
CREATE TABLE join_test(time timestamptz, temp float, colorid integer);
Expand All @@ -48,25 +49,27 @@ SELECT create_hypertable('join_test', 'time', chunk_time_interval => 26280000000
INSERT INTO join_test VALUES ('2017-01-22T09:18:22', 15.2, 1),
('2017-02-22T09:18:22', 24.5, 2),
('2017-08-22T09:18:22', 23.1, 3);
VACUUM (ANALYZE) join_test;

-- Create another table to join with which is not a hypertable.
CREATE TABLE join_test_plain(time timestamptz, temp float, colorid integer, attr jsonb);

INSERT INTO join_test_plain VALUES ('2017-01-22T09:18:22', 15.2, 1, '{"a": 1}'),
('2017-02-22T09:18:22', 24.5, 2, '{"b": 2}'),
('2017-08-22T09:18:22', 23.1, 3, '{"c": 3}');
VACUUM (ANALYZE) join_test_plain;

-- create hypertable with DATE time dimension
CREATE TABLE metrics_date(time DATE NOT NULL);
SELECT create_hypertable('metrics_date','time');
INSERT INTO metrics_date SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval);
ANALYZE metrics_date;
VACUUM (ANALYZE) metrics_date;

-- create hypertable with TIMESTAMP time dimension
CREATE TABLE metrics_timestamp(time TIMESTAMP NOT NULL);
SELECT create_hypertable('metrics_timestamp','time');
INSERT INTO metrics_timestamp SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval);
ANALYZE metrics_timestamp;
VACUUM (ANALYZE) metrics_timestamp;

-- create hypertable with TIMESTAMPTZ time dimension
CREATE TABLE metrics_timestamptz(time TIMESTAMPTZ NOT NULL, device_id INT NOT NULL);
Expand All @@ -75,7 +78,7 @@ SELECT create_hypertable('metrics_timestamptz','time');
INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval), 1;
INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval), 2;
INSERT INTO metrics_timestamptz SELECT generate_series('2000-01-01'::date, '2000-02-01'::date, '5m'::interval), 3;
ANALYZE metrics_timestamptz;
VACUUM (ANALYZE) metrics_timestamptz;

-- create space partitioned hypertable
CREATE TABLE metrics_space(time timestamptz NOT NULL, device_id int NOT NULL, v1 float, v2 float, v3 text);
Expand All @@ -87,7 +90,7 @@ FROM generate_series('2000-01-01'::timestamptz, '2000-01-14'::timestamptz, '5m':
generate_series(1,10,1) g2(device_id)
ORDER BY time, device_id;

ANALYZE metrics_space;
VACUUM (ANALYZE) metrics_space;

-- test ChunkAppend projection #2661
CREATE TABLE i2661 (
Expand All @@ -99,5 +102,4 @@ CREATE TABLE i2661 (
SELECT create_hypertable('i2661', 'timestamp');

INSERT INTO i2661 SELECT 1, 'speed', generate_series('2019-12-31 00:00:00', '2020-01-10 00:00:00', '2m'::interval), 0;
ANALYZE i2661;

VACUUM (ANALYZE) i2661;
3 changes: 2 additions & 1 deletion test/sql/include/append_query.sql
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,7 @@ SELECT time, device_id
FROM generate_series('2000-01-01'::timestamptz,'2000-01-21','30m') g1(time),
generate_series(1,10,1) g2(device_id)
ORDER BY time, device_id;
VACUUM (ANALYZE) join_limit;

-- get 2nd chunk oid
SELECT tableoid AS "CHUNK_OID" FROM join_limit WHERE time > '2000-01-07' ORDER BY time LIMIT 1
Expand Down Expand Up @@ -353,7 +354,7 @@ CREATE TABLE i3030(time timestamptz NOT NULL, a int, b int);
SELECT table_name FROM create_hypertable('i3030', 'time', create_default_indexes=>false);
CREATE INDEX ON i3030(a,time);
INSERT INTO i3030 (time,a) SELECT time, a FROM generate_series('2000-01-01'::timestamptz,'2000-01-01 3:00:00'::timestamptz,'1min'::interval) time, generate_series(1,30) a;
ANALYZE i3030;
VACUUM (ANALYZE) i3030;

:PREFIX SELECT * FROM i3030 where time BETWEEN '2000-01-01'::text::timestamptz AND '2000-01-03'::text::timestamptz ORDER BY a,time LIMIT 1;
DROP TABLE i3030;
Expand Down
189 changes: 189 additions & 0 deletions tsl/test/expected/hypercore_constraints.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
\c :TEST_DBNAME :ROLE_SUPERUSER;
\ir include/setup_hypercore.sql
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
\set hypertable readings
\ir hypercore_helpers.sql
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
-- Function to run an explain analyze with and do replacements on the
-- emitted plan. This is intended to be used when the structure of the
-- plan is important, but not the specific chunks scanned nor the
-- number of heap fetches, rows, loops, etc.
create function explain_analyze_anonymize(text) returns setof text
language plpgsql as
$$
declare
ln text;
begin
for ln in
execute format('explain (analyze, costs off, summary off, timing off, decompress_cache_stats) %s', $1)
loop
if trim(both from ln) like 'Group Key:%' then
continue;
end if;
ln := regexp_replace(ln, 'Array Cache Hits: \d+', 'Array Cache Hits: N');
ln := regexp_replace(ln, 'Array Cache Misses: \d+', 'Array Cache Misses: N');
ln := regexp_replace(ln, 'Array Cache Evictions: \d+', 'Array Cache Evictions: N');
ln := regexp_replace(ln, 'Heap Fetches: \d+', 'Heap Fetches: N');
ln := regexp_replace(ln, 'Workers Launched: \d+', 'Workers Launched: N');
ln := regexp_replace(ln, 'actual rows=\d+ loops=\d+', 'actual rows=N loops=N');
ln := regexp_replace(ln, '_hyper_\d+_\d+_chunk', '_hyper_I_N_chunk', 1, 0);
return next ln;
end loop;
end;
$$;
create function explain_anonymize(text) returns setof text
language plpgsql as
$$
declare
ln text;
begin
for ln in
execute format('explain (costs off, summary off, timing off) %s', $1)
loop
ln := regexp_replace(ln, 'Array Cache Hits: \d+', 'Array Cache Hits: N');
ln := regexp_replace(ln, 'Array Cache Misses: \d+', 'Array Cache Misses: N');
ln := regexp_replace(ln, 'Array Cache Evictions: \d+', 'Array Cache Evictions: N');
ln := regexp_replace(ln, 'Heap Fetches: \d+', 'Heap Fetches: N');
ln := regexp_replace(ln, 'Workers Launched: \d+', 'Workers Launched: N');
ln := regexp_replace(ln, 'actual rows=\d+ loops=\d+', 'actual rows=N loops=N');
ln := regexp_replace(ln, '_hyper_\d+_\d+_chunk', '_hyper_I_N_chunk', 1, 0);
return next ln;
end loop;
end;
$$;
create table :hypertable(
metric_id serial,
created_at timestamptz not null unique,
location_id smallint, --segmentby attribute with index
owner_id bigint, --segmentby attribute without index
device_id bigint, --non-segmentby attribute
temp float8,
humidity float4
);
create index hypertable_location_id_idx on :hypertable (location_id);
create index hypertable_device_id_idx on :hypertable (device_id);
select create_hypertable(:'hypertable', by_range('created_at'));
create_hypertable
-------------------
(1,t)
(1 row)

-- Disable incremental sort to make tests stable
set enable_incremental_sort = false;
select setseed(1);
setseed
---------

(1 row)

-- Insert rows into the tables.
--
-- The timestamps for the original rows will have timestamps every 10
-- seconds. Any other timestamps are inserted as part of the test.
insert into :hypertable (created_at, location_id, device_id, owner_id, temp, humidity)
select t, ceil(random()*10), ceil(random()*30), ceil(random() * 5), random()*40, random()*100
from generate_series('2022-06-01'::timestamptz, '2022-07-01', '5m') t;
alter table :hypertable set (
timescaledb.compress,
timescaledb.compress_orderby = 'created_at',
timescaledb.compress_segmentby = 'location_id, owner_id'
);
-- Get some test chunks as global variables (first and last chunk here)
select format('%I.%I', chunk_schema, chunk_name)::regclass as chunk1
from timescaledb_information.chunks
where format('%I.%I', hypertable_schema, hypertable_name)::regclass = :'hypertable'::regclass
order by chunk1 asc
limit 1 \gset
select format('%I.%I', chunk_schema, chunk_name)::regclass as chunk2
from timescaledb_information.chunks
where format('%I.%I', hypertable_schema, hypertable_name)::regclass = :'hypertable'::regclass
order by chunk2 asc
limit 1 offset 1 \gset
-- Drop the unique constraint and replace it with an exclusion
-- constraint doing the same thing.
alter table :hypertable drop constraint readings_created_at_key;
alter table :hypertable add exclude (created_at with =);
create table sample (like :chunk1 including generated including defaults including constraints);
insert into sample(created_at, location_id, device_id, owner_id, temp, humidity)
values
('2022-06-01 00:01:23', 999, 666, 111, 3.14, 3.14),
('2022-06-01 00:02:23', 999, 666, 112, 3.14, 3.14),
('2022-06-01 00:03:23', 999, 666, 113, 3.14, 3.14),
('2022-06-01 00:04:23', 999, 666, 114, 3.14, 3.14);
insert into :chunk1(created_at, location_id, device_id, owner_id, temp, humidity)
select created_at, location_id, device_id, owner_id, temp, humidity from sample;
select compress_chunk(show_chunks(:'hypertable'), hypercore_use_access_method => true);
compress_chunk
----------------------------------------
_timescaledb_internal._hyper_1_1_chunk
_timescaledb_internal._hyper_1_2_chunk
_timescaledb_internal._hyper_1_3_chunk
_timescaledb_internal._hyper_1_4_chunk
_timescaledb_internal._hyper_1_5_chunk
_timescaledb_internal._hyper_1_6_chunk
(6 rows)

-- These should fail the exclusion constraint
\set ON_ERROR_STOP 0
insert into :hypertable(created_at, location_id, device_id, owner_id, temp, humidity)
select created_at, location_id, device_id, owner_id, temp, humidity from sample;
ERROR: conflicting key value violates exclusion constraint "1_7_readings_created_at_excl"
insert into :chunk1(created_at, location_id, device_id, owner_id, temp, humidity)
select created_at, location_id, device_id, owner_id, temp, humidity from sample;
ERROR: conflicting key value violates exclusion constraint "1_7_readings_created_at_excl"
\set ON_ERROR_STOP 0
create table test_exclude(
created_at timestamptz not null unique,
device_id bigint,
humidity numrange
);
select create_hypertable('test_exclude', by_range('created_at'));
create_hypertable
-------------------
(3,t)
(1 row)

create or replace function randrange() returns numrange as $$
declare
start numeric := 100.0 * random()::numeric;
begin
return numrange(start, start + random()::numeric);
end;
$$ language plpgsql;
-- Insert a bunch or rows with a random humidity range.
insert into test_exclude (created_at, device_id, humidity)
select ts, ceil(random()*30), randrange()
from generate_series('2022-06-01'::timestamptz, '2022-07-01', '5m') ts;
-- Pick a chunk to work with.
select exclude_chunk from show_chunks('test_exclude') tbl(exclude_chunk) limit 1 \gset
-- Find all rows that is a duplicate of a previous row.
select * into dups from :exclude_chunk o where (
select count(*)
from :exclude_chunk i
where i.created_at < o.created_at and i.humidity && o.humidity
) > 0;
-- Make sure we have some duplicates. Otherwise, the test does not work.
select count(*) > 0 from dups;
?column?
----------
t
(1 row)

-- Delete the duplicates.
delete from :exclude_chunk where created_at in (select created_at from dups);
-- Add an exclusion constraint.
alter table :exclude_chunk add constraint humidity_overlap exclude using gist (humidity with &&);
-- Make sure that inserting some duplicate fails on this the exclusion constraint.
\set ON_ERROR_STOP 0
insert into :exclude_chunk select * from dups limit 10;
ERROR: conflicting key value violates exclusion constraint "humidity_overlap"
insert into test_exclude select * from dups limit 10;
ERROR: conflicting key value violates exclusion constraint "humidity_overlap"
\set ON_ERROR_STOP 1
5 changes: 3 additions & 2 deletions tsl/test/sql/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ if((${PG_VERSION_MAJOR} GREATER_EQUAL "15"))
TEST_FILES
cagg_refresh_using_merge.sql
hypercore_columnar.sql
hypercore_constraints.sql
hypercore_copy.sql
hypercore_create.sql
hypercore_cursor.sql
Expand All @@ -162,8 +163,8 @@ if((${PG_VERSION_MAJOR} GREATER_EQUAL "15"))
hypercore_types.sql
hypercore_update.sql
hypercore_vacuum.sql
merge_compress.sql
hypercore_vacuum_full.sql)
hypercore_vacuum_full.sql
merge_compress.sql)
endif()

if((${PG_VERSION_MAJOR} GREATER_EQUAL "16"))
Expand Down
Loading

0 comments on commit 775164f

Please sign in to comment.