Skip to content

Commit

Permalink
Merge branch 'main' into 3269-docs-rfc-update-the-readme-in-the-times…
Browse files Browse the repository at this point in the history
…caledb-github-repo-to-match-the-pgai-docs
  • Loading branch information
pallavisontakke authored Dec 3, 2024
2 parents 567e3c1 + 8fe7241 commit 8b58a11
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 11 deletions.
7 changes: 5 additions & 2 deletions src/ts_catalog/continuous_aggs_watermark.c
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
#include <utils/snapmgr.h>

#include "debug_point.h"
#include "guc.h"
#include "hypertable.h"
#include "ts_catalog/continuous_agg.h"
#include "ts_catalog/continuous_aggs_watermark.h"
Expand Down Expand Up @@ -321,8 +322,10 @@ ts_cagg_watermark_update(Hypertable *mat_ht, int64 watermark, bool watermark_isn

/* If we have a real-time CAgg, it uses a watermark function. So, we have to invalidate the rel
* cache to force a replanning of prepared statements. See cagg_watermark_update_internal for
* more information. */
bool invalidate_rel_cache = !cagg->data.materialized_only;
* more information. If the GUC enable_cagg_watermark_constify=false then it's not necessary
* to invalidate relation cache. */
bool invalidate_rel_cache =
!cagg->data.materialized_only && ts_guc_enable_cagg_watermark_constify;

watermark = cagg_compute_watermark(cagg, watermark, watermark_isnull);
cagg_watermark_update_internal(mat_ht->fd.id,
Expand Down
20 changes: 12 additions & 8 deletions tsl/test/expected/jit.out
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ SET jit_above_cost=0;
SET jit_inline_above_cost=0;
SET jit_optimize_above_cost=0;
SET jit_tuple_deforming=on;
SET enable_hashagg=off;
\ir :TEST_LOAD_NAME
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
Expand Down Expand Up @@ -198,16 +199,19 @@ SELECT * FROM jit_device_summary WHERE metric_spread = 1800 ORDER BY bucket DESC
Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread
Index Cond: (_hyper_4_6_chunk.bucket < 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone)
Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision)
-> HashAggregate
-> GroupAggregate
Output: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id, avg(_hyper_3_5_chunk.metric), (max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric))
Group Key: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id
Group Key: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id
Filter: ((max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) = '1800'::double precision)
-> Result
Output: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric
-> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk
Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric
Index Cond: (_hyper_3_5_chunk.observation_time >= 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone)
(19 rows)
-> Sort
Output: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric
Sort Key: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id
-> Result
Output: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric
-> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk
Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric
Index Cond: (_hyper_3_5_chunk.observation_time >= 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone)
(22 rows)

-- generate the results into two different files
\set ECHO errors
Expand Down
3 changes: 2 additions & 1 deletion tsl/test/expected/reorder.out
Original file line number Diff line number Diff line change
Expand Up @@ -1273,9 +1273,10 @@ CREATE INDEX ct2_time_idx ON ct2(time DESC);
CLUSTER ct2 USING ct2_time_idx;
-- deleted chunks are removed correctly
DELETE FROM ct2 where time < 2 OR val < 2;
VACUUM ct2;
SELECT reorder_chunk('_timescaledb_internal._hyper_2_3_chunk', verbose => TRUE);
INFO: reordering "_timescaledb_internal._hyper_2_3_chunk" using sequential scan and sort
INFO: "_hyper_2_3_chunk": found 2 removable, 3 nonremovable row versions in 1 pages
INFO: "_hyper_2_3_chunk": found 0 removable, 3 nonremovable row versions in 1 pages
reorder_chunk
---------------

Expand Down
1 change: 1 addition & 0 deletions tsl/test/sql/jit.sql
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ SET jit_above_cost=0;
SET jit_inline_above_cost=0;
SET jit_optimize_above_cost=0;
SET jit_tuple_deforming=on;
SET enable_hashagg=off;

\ir :TEST_LOAD_NAME
\set PREFIX 'EXPLAIN (VERBOSE, TIMING OFF, COSTS OFF, SUMMARY OFF)'
Expand Down
1 change: 1 addition & 0 deletions tsl/test/sql/reorder.sql
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ CLUSTER ct2 USING ct2_time_idx;

-- deleted chunks are removed correctly
DELETE FROM ct2 where time < 2 OR val < 2;
VACUUM ct2;

SELECT reorder_chunk('_timescaledb_internal._hyper_2_3_chunk', verbose => TRUE);
SELECT ctid, time, val FROM _timescaledb_internal._hyper_2_3_chunk ORDER BY time;
Expand Down

0 comments on commit 8b58a11

Please sign in to comment.