diff --git a/src/guc.c b/src/guc.c index d312b60d25d..028ebf8bcd3 100644 --- a/src/guc.c +++ b/src/guc.c @@ -117,6 +117,17 @@ bool ts_shutdown_bgw = false; char *ts_current_timestamp_mock = NULL; #endif +#ifdef TS_DEBUG +static const struct config_enum_entry require_vector_qual_options[] = { + { "allow", RVQ_Allow, false }, + { "forbid", RVQ_Forbid, false }, + { "only", RVQ_Only, false }, + { NULL, 0, false } +}; +#endif + +DebugRequireVectorQual ts_guc_debug_require_vector_qual = RVQ_Allow; + static bool ts_guc_enable_hypertable_create = true; static bool ts_guc_enable_hypertable_compression = true; static bool ts_guc_enable_cagg_create = true; @@ -725,6 +736,24 @@ _guc_init(void) /* assign_hook= */ NULL, /* show_hook= */ NULL); + DefineCustomEnumVariable(/* name= */ "timescaledb.debug_require_vector_qual", + /* short_desc= */ + "ensure that non-vectorized or vectorized filters are used in " + "DecompressChunk node", + /* long_desc= */ + "this is for debugging purposes, to let us check if the vectorized " + "quals are used or not. EXPLAIN differs after PG15 for custom nodes, " + "and " + "using the test templates is a pain", + /* valueAddr= */ (int *) &ts_guc_debug_require_vector_qual, + /* bootValue= */ RVQ_Allow, + /* options = */ require_vector_qual_options, + /* context= */ PGC_USERSET, + /* flags= */ 0, + /* check_hook= */ NULL, + /* assign_hook= */ NULL, + /* show_hook= */ NULL); + DefineCustomBoolVariable(/* name= */ "timescaledb.debug_require_batch_sorted_merge", /* short_desc= */ "require batch sorted merge in DecompressChunk node", /* long_desc= */ "this is for debugging purposes", diff --git a/src/guc.h b/src/guc.h index b4e369ebc79..12a1836f5dd 100644 --- a/src/guc.h +++ b/src/guc.h @@ -104,6 +104,15 @@ extern char *ts_current_timestamp_mock; #define ts_shutdown_bgw false #endif +typedef enum DebugRequireVectorQual +{ + RVQ_Allow = 0, + RVQ_Forbid, + RVQ_Only +} DebugRequireVectorQual; + +extern TSDLLEXPORT DebugRequireVectorQual ts_guc_debug_require_vector_qual; + extern TSDLLEXPORT bool ts_guc_debug_require_batch_sorted_merge; void _guc_init(void); diff --git a/tsl/src/CMakeLists.txt b/tsl/src/CMakeLists.txt index 1e8ceadb4de..ee1a2de3e6b 100644 --- a/tsl/src/CMakeLists.txt +++ b/tsl/src/CMakeLists.txt @@ -61,5 +61,6 @@ add_subdirectory(bgw_policy) add_subdirectory(compression) add_subdirectory(continuous_aggs) add_subdirectory(fdw) +add_subdirectory(import) add_subdirectory(nodes) add_subdirectory(remote) diff --git a/tsl/src/import/CMakeLists.txt b/tsl/src/import/CMakeLists.txt new file mode 100644 index 00000000000..beccff95dd9 --- /dev/null +++ b/tsl/src/import/CMakeLists.txt @@ -0,0 +1,2 @@ +set(SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/ts_explain.c) +target_sources(${TSL_LIBRARY_NAME} PRIVATE ${SOURCES}) diff --git a/tsl/src/import/ts_explain.c b/tsl/src/import/ts_explain.c new file mode 100644 index 00000000000..35dbd07e909 --- /dev/null +++ b/tsl/src/import/ts_explain.c @@ -0,0 +1,107 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * This file contains source code that was copied and/or modified from + * the PostgreSQL database, which is licensed under the open-source + * PostgreSQL License. Please see the NOTICE at the top level + * directory for a copy of the PostgreSQL License. + */ + +#include "ts_explain.h" + +#include +#include +#include + +#include "compat/compat.h" + +/* + * Show a generic expression + */ +static void +ts_show_expression(Node *node, const char *qlabel, PlanState *planstate, List *ancestors, + bool useprefix, ExplainState *es) +{ + List *context; + char *exprstr; + + /* Set up deparsing context */ +#if PG13_LT + context = set_deparse_context_planstate(es->deparse_cxt, (Node *) planstate, ancestors); +#else + context = set_deparse_context_plan(es->deparse_cxt, planstate->plan, ancestors); +#endif + + /* Deparse the expression */ + exprstr = deparse_expression(node, context, useprefix, false); + + /* And add to es->str */ + ExplainPropertyText(qlabel, exprstr, es); +} + +/* + * Show a qualifier expression (which is a List with implicit AND semantics) + */ +static void +ts_show_qual(List *qual, const char *qlabel, PlanState *planstate, List *ancestors, bool useprefix, + ExplainState *es) +{ + Node *node; + + /* No work if empty qual */ + if (qual == NIL) + return; + + /* Convert AND list to explicit AND */ + node = (Node *) make_ands_explicit(qual); + + /* And show it */ + ts_show_expression(node, qlabel, planstate, ancestors, useprefix, es); +} + +/* + * Show a qualifier expression for a scan plan node + */ +void +ts_show_scan_qual(List *qual, const char *qlabel, PlanState *planstate, List *ancestors, + ExplainState *es) +{ + bool useprefix; + + useprefix = (IsA(planstate->plan, SubqueryScan) || es->verbose); + ts_show_qual(qual, qlabel, planstate, ancestors, useprefix, es); +} + +/* + * If it's EXPLAIN ANALYZE, show instrumentation information for a plan node + * + * "which" identifies which instrumentation counter to print + */ +void +ts_show_instrumentation_count(const char *qlabel, int which, PlanState *planstate, ExplainState *es) +{ + double nfiltered; + double nloops; + + if (!es->analyze || !planstate->instrument) + return; + + if (which == 2) + nfiltered = planstate->instrument->nfiltered2; + else + nfiltered = planstate->instrument->nfiltered1; + nloops = planstate->instrument->nloops; + + /* In text mode, suppress zero counts; they're not interesting enough */ + if (nfiltered > 0 || es->format != EXPLAIN_FORMAT_TEXT) + { + if (nloops > 0) + ExplainPropertyFloat(qlabel, NULL, nfiltered / nloops, 0, es); + else + ExplainPropertyFloat(qlabel, NULL, 0.0, 0, es); + } +} diff --git a/tsl/src/import/ts_explain.h b/tsl/src/import/ts_explain.h new file mode 100644 index 00000000000..cddb65a5c57 --- /dev/null +++ b/tsl/src/import/ts_explain.h @@ -0,0 +1,26 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * This file contains source code that was copied and/or modified from + * the PostgreSQL database, which is licensed under the open-source + * PostgreSQL License. Please see the NOTICE at the top level + * directory for a copy of the PostgreSQL License. + */ + +#pragma once + +#include + +#include +#include +#include + +void ts_show_scan_qual(List *qual, const char *qlabel, PlanState *planstate, List *ancestors, + ExplainState *es); + +void ts_show_instrumentation_count(const char *qlabel, int which, PlanState *planstate, + ExplainState *es); diff --git a/tsl/src/nodes/decompress_chunk/CMakeLists.txt b/tsl/src/nodes/decompress_chunk/CMakeLists.txt index a37b7d6baed..0ef8c79a67a 100644 --- a/tsl/src/nodes/decompress_chunk/CMakeLists.txt +++ b/tsl/src/nodes/decompress_chunk/CMakeLists.txt @@ -6,5 +6,6 @@ set(SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/decompress_chunk.c ${CMAKE_CURRENT_SOURCE_DIR}/exec.c ${CMAKE_CURRENT_SOURCE_DIR}/planner.c - ${CMAKE_CURRENT_SOURCE_DIR}/qual_pushdown.c) + ${CMAKE_CURRENT_SOURCE_DIR}/qual_pushdown.c + ${CMAKE_CURRENT_SOURCE_DIR}/vector_predicates.c) target_sources(${TSL_LIBRARY_NAME} PRIVATE ${SOURCES}) diff --git a/tsl/src/nodes/decompress_chunk/batch_array.c b/tsl/src/nodes/decompress_chunk/batch_array.c index ee2cb41eaf1..0a4a709e431 100644 --- a/tsl/src/nodes/decompress_chunk/batch_array.c +++ b/tsl/src/nodes/decompress_chunk/batch_array.c @@ -93,6 +93,7 @@ batch_array_free_at(DecompressChunkState *chunk_state, int batch_index) /* Reset batch state */ batch_state->total_batch_rows = 0; batch_state->next_batch_row = 0; + batch_state->vector_qual_result = NULL; if (batch_state->per_batch_context != NULL) { diff --git a/tsl/src/nodes/decompress_chunk/compressed_batch.c b/tsl/src/nodes/decompress_chunk/compressed_batch.c index 64864941edb..97492449189 100644 --- a/tsl/src/nodes/decompress_chunk/compressed_batch.c +++ b/tsl/src/nodes/decompress_chunk/compressed_batch.c @@ -17,7 +17,198 @@ #include "guc.h" #include "nodes/decompress_chunk/compressed_batch.h" #include "nodes/decompress_chunk/exec.h" +#include "nodes/decompress_chunk/vector_predicates.h" +/* + * Create a single value ArrowArray from Postgres Datum. This is used to run + * the usual vectorized predicates on compressed columns with default values. + */ +static ArrowArray * +make_single_value_arrow(Oid pgtype, Datum datum, bool isnull) +{ + struct ArrowWithBuffers + { + ArrowArray arrow; + uint64 buffers[2]; + uint64 nulls_buffer; + uint64 values_buffer; + }; + + struct ArrowWithBuffers *with_buffers = palloc0(sizeof(struct ArrowWithBuffers)); + ArrowArray *arrow = &with_buffers->arrow; + arrow->length = 1; + arrow->null_count = -1; + arrow->n_buffers = 2; + arrow->buffers = (const void **) &with_buffers->buffers; + arrow->buffers[0] = &with_buffers->nulls_buffer; + arrow->buffers[1] = &with_buffers->values_buffer; + + if (isnull) + { + /* + * The validity bitmap was initialized to invalid on allocation, and + * the Datum might be invalid if the value is null (important on i386 + * where it might be pass-by-reference), so don't read it. + */ + return arrow; + } + +#define FOR_TYPE(PGTYPE, CTYPE, FROMDATUM) \ + case PGTYPE: \ + *((CTYPE *) &with_buffers->values_buffer) = FROMDATUM(datum); \ + break + + switch (pgtype) + { + FOR_TYPE(INT8OID, int64, DatumGetInt64); + FOR_TYPE(INT4OID, int32, DatumGetInt32); + FOR_TYPE(INT2OID, int16, DatumGetInt16); + FOR_TYPE(FLOAT8OID, float8, DatumGetFloat8); + FOR_TYPE(FLOAT4OID, float4, DatumGetFloat4); + FOR_TYPE(TIMESTAMPTZOID, TimestampTz, DatumGetTimestampTz); + FOR_TYPE(TIMESTAMPOID, Timestamp, DatumGetTimestamp); + FOR_TYPE(DATEOID, DateADT, DatumGetDateADT); + default: + elog(ERROR, "unexpected column type '%s'", format_type_be(pgtype)); + pg_unreachable(); + } + + arrow_set_row_validity(&with_buffers->nulls_buffer, 0, true); + + return arrow; +} + +static void +apply_vector_quals(DecompressChunkState *chunk_state, DecompressBatchState *batch_state) +{ + if (!chunk_state->vectorized_quals) + { + return; + } + + /* + * Allocate the bitmap that will hold the vectorized qual results. We will + * initialize it to all ones and AND the individual quals to it. + */ + const int bitmap_bytes = sizeof(uint64) * ((batch_state->total_batch_rows + 63) / 64); + batch_state->vector_qual_result = palloc(bitmap_bytes); + memset(batch_state->vector_qual_result, 0xFF, bitmap_bytes); + + /* + * Compute the quals. + */ + ListCell *lc; + foreach (lc, chunk_state->vectorized_quals) + { + /* For now we only support "Var ? Const" predicates. */ + OpExpr *oe = castNode(OpExpr, lfirst(lc)); + Var *var = castNode(Var, linitial(oe->args)); + Const *constnode = castNode(Const, lsecond(oe->args)); + + /* + * Find the compressed column referred to by the Var. + */ + DecompressChunkColumnDescription *column_description = NULL; + int column_index = 0; + for (; column_index < chunk_state->num_total_columns; column_index++) + { + column_description = &chunk_state->template_columns[column_index]; + if (column_description->output_attno == var->varattno) + { + break; + } + } + Ensure(column_index < chunk_state->num_total_columns, + "decompressed column %d not found in batch", + var->varattno); + Assert(column_description != NULL); + Assert(column_description->typid == var->vartype); + Ensure(column_description->type == COMPRESSED_COLUMN, + "only compressed columns are supported in vectorized quals"); + Assert(column_index < chunk_state->num_compressed_columns); + CompressedColumnValues *column_values = &batch_state->compressed_columns[column_index]; + Ensure(column_values->iterator == NULL, + "only arrow columns are supported in vectorized quals"); + + /* + * Prepare to compute the vector predicate. We have to handle the + * default values in a special way because they don't produce the usual + * decompressed ArrowArrays. + */ + uint64 default_value_predicate_result; + uint64 *predicate_result = batch_state->vector_qual_result; + const ArrowArray *vector = column_values->arrow; + if (column_values->arrow == NULL) + { + /* + * The compressed column had a default value. We can't fall back to + * the non-vectorized quals now, so build a single-value ArrowArray + * with this default value, check if it passes the predicate, and apply + * it to the entire batch. + */ + AttrNumber attr = AttrNumberGetAttrOffset(column_description->output_attno); + + Ensure(column_values->iterator == NULL, + "ArrowArray expected for column %s", + NameStr( + TupleDescAttr(batch_state->decompressed_scan_slot->tts_tupleDescriptor, attr) + ->attname)); + + /* + * We saved the actual default value into the decompressed scan slot + * above, so pull it from there. + */ + vector = make_single_value_arrow(column_description->typid, + batch_state->decompressed_scan_slot->tts_values[attr], + batch_state->decompressed_scan_slot->tts_isnull[attr]); + + /* + * We start from an all-valid bitmap, because the predicate is + * AND-ed to it. + */ + default_value_predicate_result = 1; + predicate_result = &default_value_predicate_result; + } + + /* Find and compute the predicate. */ + void (*predicate)(const ArrowArray *, Datum, uint64 *restrict) = + get_vector_const_predicate(get_opcode(oe->opno)); + Ensure(predicate != NULL, + "vectorized predicate not found for postgres predicate %d", + get_opcode(oe->opno)); + + /* + * The vectorizable predicates should be STRICT, so we shouldn't see null + * constants here. + */ + Ensure(!constnode->constisnull, "vectorized predicate called for a null value"); + + predicate(vector, constnode->constvalue, predicate_result); + + /* Process the result. */ + if (column_values->arrow == NULL) + { + /* The column had a default value. */ + Assert(column_values->iterator == NULL); + + if (!(default_value_predicate_result & 1)) + { + /* + * We had a default value for the compressed column, and it + * didn't pass the predicate, so the entire batch didn't pass. + */ + for (int i = 0; i < bitmap_bytes / 8; i++) + { + batch_state->vector_qual_result[i] = 0; + } + } + } + } +} + +/* + * Initialize the batch decompression state with the new compressed tuple. + */ void compressed_batch_set_compressed_tuple(DecompressChunkState *chunk_state, DecompressBatchState *batch_state, TupleTableSlot *subslot) @@ -238,6 +429,8 @@ compressed_batch_set_compressed_tuple(DecompressChunkState *chunk_state, } } + apply_vector_quals(chunk_state, batch_state); + MemoryContextSwitchTo(old_context); } @@ -332,6 +525,24 @@ compressed_batch_make_next_tuple(DecompressChunkState *chunk_state, } } +static bool +compressed_batch_vector_qual(DecompressChunkState *chunk_state, DecompressBatchState *batch_state) +{ + Assert(batch_state->total_batch_rows > 0); + Assert(batch_state->next_batch_row < batch_state->total_batch_rows); + + const int output_row = batch_state->next_batch_row; + const size_t arrow_row = + chunk_state->reverse ? batch_state->total_batch_rows - 1 - output_row : output_row; + + if (!batch_state->vector_qual_result) + { + return true; + } + + return arrow_row_is_valid(batch_state->vector_qual_result, arrow_row); +} + static bool compressed_batch_postgres_qual(DecompressChunkState *chunk_state, DecompressBatchState *batch_state) { @@ -368,6 +579,24 @@ compressed_batch_advance(DecompressChunkState *chunk_state, DecompressBatchState for (; batch_state->next_batch_row < batch_state->total_batch_rows; batch_state->next_batch_row++) { + if (!compressed_batch_vector_qual(chunk_state, batch_state)) + { + /* + * This row doesn't pass the vectorized quals. Advance the iterated + * compressed columns if we have any. + */ + for (int i = 0; i < num_compressed_columns; i++) + { + CompressedColumnValues *column_values = &batch_state->compressed_columns[i]; + if (column_values->iterator) + { + column_values->iterator->try_next(column_values->iterator); + } + } + InstrCountFiltered1(&chunk_state->csstate, 1); + continue; + } + compressed_batch_make_next_tuple(chunk_state, batch_state); if (!compressed_batch_postgres_qual(chunk_state, batch_state)) @@ -410,7 +639,7 @@ compressed_batch_advance(DecompressChunkState *chunk_state, DecompressBatchState /* * Before loading the first matching tuple from the batch, also save the very * first one into the given slot, even if it doesn't pass the quals. This is - * needed for batch merge append. + * needed for batch sorted merge. */ void compressed_batch_save_first_tuple(DecompressChunkState *chunk_state, @@ -424,7 +653,8 @@ compressed_batch_save_first_tuple(DecompressChunkState *chunk_state, compressed_batch_make_next_tuple(chunk_state, batch_state); ExecCopySlot(first_tuple_slot, batch_state->decompressed_scan_slot); - const bool qual_passed = compressed_batch_postgres_qual(chunk_state, batch_state); + const bool qual_passed = compressed_batch_vector_qual(chunk_state, batch_state) && + compressed_batch_postgres_qual(chunk_state, batch_state); batch_state->next_batch_row++; if (!qual_passed) diff --git a/tsl/src/nodes/decompress_chunk/compressed_batch.h b/tsl/src/nodes/decompress_chunk/compressed_batch.h index e783ec3c663..e7ad42dea42 100644 --- a/tsl/src/nodes/decompress_chunk/compressed_batch.h +++ b/tsl/src/nodes/decompress_chunk/compressed_batch.h @@ -51,6 +51,12 @@ typedef struct DecompressBatchState int total_batch_rows; int next_batch_row; MemoryContext per_batch_context; + + /* + * Arrow-style bitmap that says whether the vector quals passed for a given + * row. Indexed same as arrow arrays, w/o accounting for the reverse scan + * direction. Initialized to all ones, i.e. all rows pass. + */ uint64 *vector_qual_result; CompressedColumnValues compressed_columns[FLEXIBLE_ARRAY_MEMBER]; diff --git a/tsl/src/nodes/decompress_chunk/decompress_chunk.h b/tsl/src/nodes/decompress_chunk/decompress_chunk.h index 86584b5be3b..c42332a277a 100644 --- a/tsl/src/nodes/decompress_chunk/decompress_chunk.h +++ b/tsl/src/nodes/decompress_chunk/decompress_chunk.h @@ -80,6 +80,13 @@ typedef struct DecompressChunkPath */ bool have_bulk_decompression_columns; + /* + * Maps the uncompressed chunk attno to the respective column compression + * info. This lives only during planning so that we can understand on which + * columns we can apply vectorized quals. + */ + DecompressChunkColumnCompression *uncompressed_chunk_attno_to_compression_info; + List *compressed_pathkeys; bool needs_sequence_num; bool reverse; diff --git a/tsl/src/nodes/decompress_chunk/exec.c b/tsl/src/nodes/decompress_chunk/exec.c index 11d2d83cc2e..cd09df47a87 100644 --- a/tsl/src/nodes/decompress_chunk/exec.c +++ b/tsl/src/nodes/decompress_chunk/exec.c @@ -22,6 +22,7 @@ #include "compression/arrow_c_data_interface.h" #include "compression/compression.h" #include "guc.h" +#include "import/ts_explain.h" #include "nodes/decompress_chunk/batch_array.h" #include "nodes/decompress_chunk/batch_queue_fifo.h" #include "nodes/decompress_chunk/batch_queue_heap.h" @@ -155,6 +156,10 @@ decompress_chunk_state_create(CustomScan *cscan) chunk_state->batch_sorted_merge = lfourth_int(settings); chunk_state->enable_bulk_decompression = lfifth_int(settings); + Assert(IsA(cscan->custom_exprs, List)); + Assert(list_length(cscan->custom_exprs) == 1); + chunk_state->vectorized_quals = linitial(cscan->custom_exprs); + return (Node *) chunk_state; } @@ -545,6 +550,21 @@ decompress_chunk_explain(CustomScanState *node, List *ancestors, ExplainState *e { DecompressChunkState *chunk_state = (DecompressChunkState *) node; + ts_show_scan_qual(chunk_state->vectorized_quals, + "Vectorized Filter", + &node->ss.ps, + ancestors, + es); + + if (!node->ss.ps.plan->qual && chunk_state->vectorized_quals) + { + /* + * The normal explain won't show this if there are no normal quals but + * only the vectorized ones. + */ + ts_show_instrumentation_count("Rows Removed by Filter", 1, &node->ss.ps, es); + } + if (es->verbose || es->format != EXPLAIN_FORMAT_TEXT) { if (chunk_state->batch_sorted_merge) diff --git a/tsl/src/nodes/decompress_chunk/exec.h b/tsl/src/nodes/decompress_chunk/exec.h index cf3102834e5..f49395e8407 100644 --- a/tsl/src/nodes/decompress_chunk/exec.h +++ b/tsl/src/nodes/decompress_chunk/exec.h @@ -89,6 +89,13 @@ typedef struct DecompressChunkState */ MemoryContext bulk_decompression_context; + /* + * For some predicates, we have more efficient implementation that work on + * the entire compressed batch in one go. They go to this list, and the rest + * goes into the usual ss.ps.qual. + */ + List *vectorized_quals; + /* * Make non-refcounted copies of the tupdesc for reuse across all batch states * and avoid spending CPU in ResourceOwner when creating a big number of table diff --git a/tsl/src/nodes/decompress_chunk/planner.c b/tsl/src/nodes/decompress_chunk/planner.c index 7d15c84c906..27af0cc85e6 100644 --- a/tsl/src/nodes/decompress_chunk/planner.c +++ b/tsl/src/nodes/decompress_chunk/planner.c @@ -29,6 +29,7 @@ #include "nodes/decompress_chunk/decompress_chunk.h" #include "nodes/decompress_chunk/exec.h" #include "nodes/decompress_chunk/planner.h" +#include "vector_predicates.h" static CustomScanMethods decompress_chunk_plan_methods = { .CustomName = "DecompressChunk", @@ -119,6 +120,10 @@ build_decompression_map(DecompressChunkPath *path, List *scan_tlist, Bitmapset * compressed_attno_to_compression_info[compressed_attno] = fd; } + path->uncompressed_chunk_attno_to_compression_info = + palloc0(sizeof(*path->uncompressed_chunk_attno_to_compression_info) * + (path->info->chunk_rel->max_attr + 1)); + /* * Go over the scan targetlist and determine to which output column each * scan column goes, saving other additional info as we do that. @@ -243,6 +248,14 @@ build_decompression_map(DecompressChunkPath *path, List *scan_tlist, Bitmapset * path->have_bulk_decompression_columns |= bulk_decompression_possible; path->bulk_decompression_column = lappend_int(path->bulk_decompression_column, bulk_decompression_possible); + + if (destination_attno_in_uncompressed_chunk > 0) + { + path->uncompressed_chunk_attno_to_compression_info + [destination_attno_in_uncompressed_chunk] = (DecompressChunkColumnCompression){ + .fd = *compression_info, .bulk_decompression_possible = bulk_decompression_possible + }; + } } /* @@ -351,6 +364,83 @@ find_attr_pos_in_tlist(List *targetlist, AttrNumber pos) pg_unreachable(); } +static bool +qual_is_vectorizable(DecompressChunkPath *path, Node *qual) +{ + /* Only simple "Var op Const" binary predicates for now. */ + if (!IsA(qual, OpExpr)) + { + return false; + } + + OpExpr *o = castNode(OpExpr, qual); + + if (list_length(o->args) != 2) + { + return false; + } + + if (IsA(lsecond(o->args), Var) && IsA(linitial(o->args), Const)) + { + /* Try to commute the operator if the constant is on the right. */ + Oid commutator_opno = get_commutator(o->opno); + if (OidIsValid(commutator_opno)) + { + o->opno = commutator_opno; + /* + * opfuncid is a cache, we can set it to InvalidOid like the + * CommuteOpExpr() does. + */ + o->opfuncid = InvalidOid; + o->args = list_make2(lsecond(o->args), linitial(o->args)); + } + } + + if (!IsA(linitial(o->args), Var) || !IsA(lsecond(o->args), Const)) + { + return false; + } + + Var *var = castNode(Var, linitial(o->args)); + Assert((Index) var->varno == path->info->chunk_rel->relid); + + /* + * ExecQual is performed before ExecProject and operates on the decompressed + * scan slot, so the qual attnos are the uncompressed chunk attnos. + */ + if (!path->uncompressed_chunk_attno_to_compression_info[var->varattno] + .bulk_decompression_possible) + { + /* This column doesn't support bulk decompression. */ + return false; + } + + Oid opcode = get_opcode(o->opno); + if (get_vector_const_predicate(opcode)) + { + return true; + } + + return false; +} + +/* + * Find the scan qualifiers that can be vectorized and put them into a separate + * list. + */ +static void +find_vectorized_quals(DecompressChunkPath *path, List *qual, List **vectorized, + List **nonvectorized) +{ + ListCell *lc; + foreach (lc, qual) + { + Node *node = lfirst(lc); + List **dest = qual_is_vectorizable(path, node) ? vectorized : nonvectorized; + *dest = lappend(*dest, node); + } +} + Plan * decompress_chunk_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *path, List *decompressed_tlist, List *clauses, List *custom_plans) @@ -662,12 +752,48 @@ decompress_chunk_plan_create(PlannerInfo *root, RelOptInfo *rel, CustomPath *pat ts_guc_enable_bulk_decompression && dcpath->have_bulk_decompression_columns; + /* + * For some predicates, we have more efficient implementation that work on + * the entire compressed batch in one go. They go to this list, and the rest + * goes into the usual scan.plan.qual. + */ + List *vectorized_quals = NIL; + if (enable_bulk_decompression) + { + List *nonvectorized_quals = NIL; + find_vectorized_quals(dcpath, + decompress_plan->scan.plan.qual, + &vectorized_quals, + &nonvectorized_quals); + + decompress_plan->scan.plan.qual = nonvectorized_quals; + } + +#ifdef TS_DEBUG + if (ts_guc_debug_require_vector_qual == RVQ_Forbid && list_length(vectorized_quals) > 0) + { + elog(ERROR, "debug: encountered vector quals when they are disabled"); + } + else if (ts_guc_debug_require_vector_qual == RVQ_Only && + list_length(decompress_plan->scan.plan.qual) > 0) + { + elog(ERROR, "debug: encountered non-vector quals when they are disabled"); + } +#endif + settings = list_make5_int(dcpath->info->hypertable_id, dcpath->info->chunk_rte->relid, dcpath->reverse, dcpath->batch_sorted_merge, enable_bulk_decompression); + /* + * Vectorized quals must go into custom_exprs, because Postgres has to see + * them and perform the varno adjustments on them when flattening the + * subqueries. + */ + decompress_plan->custom_exprs = list_make1(vectorized_quals); + decompress_plan->custom_private = list_make5(settings, dcpath->decompression_map, dcpath->is_segmentby_column, diff --git a/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_all.c b/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_all.c new file mode 100644 index 00000000000..bf089f4bc79 --- /dev/null +++ b/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_all.c @@ -0,0 +1,86 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * Define all supported "vector ? const" predicates for arithmetic types. + */ + +/* In PG <= 13, in fmgroids.h the defines for TIMESTAMP functions specify + * TIMESTAMPTZ oids, and the actual TIMESTAMP oids are nowhere to be found. Fix + * this by manually defining the TIMESTAMPTZ defines to specify TIMESTAMP oids. + * This means that the tz/non-tz versions are switched, but we don't care since + * the implementation is the same. + */ +#if PG14_LT +#undef F_TIMESTAMPTZ_EQ +#undef F_TIMESTAMPTZ_NE +#undef F_TIMESTAMPTZ_LT +#undef F_TIMESTAMPTZ_LE +#undef F_TIMESTAMPTZ_GE +#undef F_TIMESTAMPTZ_GT +#define F_TIMESTAMPTZ_EQ 2052 +#define F_TIMESTAMPTZ_NE 2053 +#define F_TIMESTAMPTZ_LT 2054 +#define F_TIMESTAMPTZ_LE 2055 +#define F_TIMESTAMPTZ_GE 2056 +#define F_TIMESTAMPTZ_GT 2057 +#endif + +/* int8 functions. */ +#define VECTOR_CTYPE int64 +#define CONST_CTYPE int64 +#define CONST_CONVERSION(X) DatumGetInt64(X) +#define PG_PREDICATE(X) F_INT8##X: case F_TIMESTAMPTZ_##X: case F_TIMESTAMP_##X + +#include "pred_vector_const_arithmetic_type_pair.c" + +/* int4 functions. */ +#define VECTOR_CTYPE int32 +#define CONST_CTYPE int32 +#define CONST_CONVERSION(X) DatumGetInt32(X) +#define PG_PREDICATE(X) F_INT4##X + +#include "pred_vector_const_arithmetic_type_pair.c" + +/* int24 functions. */ +#define VECTOR_CTYPE int16 +#define CONST_CTYPE int32 +#define CONST_CONVERSION(X) DatumGetInt32(X) +#define PG_PREDICATE(X) F_INT24##X + +#include "pred_vector_const_arithmetic_type_pair.c" + +/* int84 functions. */ +#define VECTOR_CTYPE int64 +#define CONST_CTYPE int32 +#define CONST_CONVERSION(X) DatumGetInt32(X) +#define PG_PREDICATE(X) F_INT84##X + +#include "pred_vector_const_arithmetic_type_pair.c" + +/* int48 functions. */ +#define VECTOR_CTYPE int32 +#define CONST_CTYPE int64 +#define CONST_CONVERSION(X) DatumGetInt64(X) +#define PG_PREDICATE(X) F_INT48##X + +#include "pred_vector_const_arithmetic_type_pair.c" + +/* float8 functions. */ +#define VECTOR_CTYPE float8 +#define CONST_CTYPE float8 +#define CONST_CONVERSION(X) DatumGetFloat8(X) +#define PG_PREDICATE(X) F_FLOAT8##X + +#include "pred_vector_const_arithmetic_type_pair.c" + +/* float4 functions. */ +#define VECTOR_CTYPE float4 +#define CONST_CTYPE float4 +#define CONST_CONVERSION(X) DatumGetFloat4(X) +#define PG_PREDICATE(X) F_FLOAT4##X + +#include "pred_vector_const_arithmetic_type_pair.c" diff --git a/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_single.c b/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_single.c new file mode 100644 index 00000000000..d6c41c4bfc3 --- /dev/null +++ b/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_single.c @@ -0,0 +1,73 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * Compute a vector-const predicate and AND it to the filter bitmap. + * Specialized for particular arithmetic data types and predicate. + * Marked as noinline for the ease of debugging. Inlining it shouldn't be + * beneficial because it's a big self-contained loop. + */ + +#define PG_PREDICATE_HELPER(X) PG_PREDICATE(X) + +#define FUNCTION_NAME_HELPER(X, Y, Z) predicate_##X##_##Y##_vector_##Z##_const +#define FUNCTION_NAME(X, Y, Z) FUNCTION_NAME_HELPER(X, Y, Z) + +#ifdef GENERATE_DISPATCH_TABLE +case PG_PREDICATE_HELPER(PREDICATE_NAME): + return FUNCTION_NAME(PREDICATE_NAME, VECTOR_CTYPE, CONST_CTYPE); +#else + +static pg_noinline void +FUNCTION_NAME(PREDICATE_NAME, VECTOR_CTYPE, + CONST_CTYPE)(const ArrowArray *arrow, const Datum constdatum, uint64 *restrict result) +{ + const size_t n = arrow->length; + + /* Account for nulls which shouldn't pass the predicate. */ + const size_t n_words = (n + 63) / 64; + const uint64 *restrict validity = (uint64 *restrict) arrow->buffers[0]; + for (size_t i = 0; i < n_words; i++) + { + result[i] &= validity[i]; + } + + /* Now run the predicate itself. */ + const CONST_CTYPE constvalue = CONST_CONVERSION(constdatum); + const VECTOR_CTYPE *restrict vector = (VECTOR_CTYPE *restrict) arrow->buffers[1]; + + for (size_t outer = 0; outer < n / 64; outer++) + { + uint64 word = 0; + for (size_t inner = 0; inner < 64; inner++) + { + const bool valid = PREDICATE_EXPRESSION(vector[outer * 64 + inner], constvalue); + word |= ((uint64) valid) << inner; + } + result[outer] &= word; + } + + if (n % 64) + { + uint64 tail_word = 0; + for (size_t i = (n / 64) * 64; i < n; i++) + { + const bool valid = PREDICATE_EXPRESSION(vector[i], constvalue); + tail_word |= ((uint64) valid) << (i % 64); + } + result[n / 64] &= tail_word; + } +} + +#endif + +#undef PG_PREDICATE_HELPER + +#undef FUNCTION_NAME +#undef FUNCTION_NAME_HELPER + +#undef PREDICATE_EXPRESSION +#undef PREDICATE_NAME diff --git a/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_type_pair.c b/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_type_pair.c new file mode 100644 index 00000000000..37cfd0ebe22 --- /dev/null +++ b/tsl/src/nodes/decompress_chunk/pred_vector_const_arithmetic_type_pair.c @@ -0,0 +1,38 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * Vector-const predicates for one pair of arithmetic types. + */ + +#define PREDICATE_NAME GE +#define PREDICATE_EXPRESSION(X, Y) ((X) >= (Y)) +#include "pred_vector_const_arithmetic_single.c" + +#define PREDICATE_NAME LE +#define PREDICATE_EXPRESSION(X, Y) ((X) <= (Y)) +#include "pred_vector_const_arithmetic_single.c" + +#define PREDICATE_NAME LT +#define PREDICATE_EXPRESSION(X, Y) ((X) < (Y)) +#include "pred_vector_const_arithmetic_single.c" + +#define PREDICATE_NAME GT +#define PREDICATE_EXPRESSION(X, Y) ((X) > (Y)) +#include "pred_vector_const_arithmetic_single.c" + +#define PREDICATE_NAME EQ +#define PREDICATE_EXPRESSION(X, Y) ((X) == (Y)) +#include "pred_vector_const_arithmetic_single.c" + +#define PREDICATE_NAME NE +#define PREDICATE_EXPRESSION(X, Y) ((X) != (Y)) +#include "pred_vector_const_arithmetic_single.c" + +#undef VECTOR_CTYPE +#undef CONST_CTYPE +#undef CONST_CONVERSION +#undef PG_PREDICATE diff --git a/tsl/src/nodes/decompress_chunk/vector_predicates.c b/tsl/src/nodes/decompress_chunk/vector_predicates.c new file mode 100644 index 00000000000..c878fde6de9 --- /dev/null +++ b/tsl/src/nodes/decompress_chunk/vector_predicates.c @@ -0,0 +1,36 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * Functions for working with vectorized predicates. + */ + +#include + +#include + +#include "compat/compat.h" +#include "compression/arrow_c_data_interface.h" + +#include "vector_predicates.h" + +#include "pred_vector_const_arithmetic_all.c" + +/* + * Look up the vectorized implementation for a Postgres predicate, specified by + * its Oid in pg_proc. Note that this Oid is different from the opcode. + */ +void (*get_vector_const_predicate(Oid pg_predicate))(const ArrowArray *, const Datum, + uint64 *restrict) +{ + switch (pg_predicate) + { +#define GENERATE_DISPATCH_TABLE +#include "pred_vector_const_arithmetic_all.c" +#undef GENERATE_DISPATCH_TABLE + } + return NULL; +} diff --git a/tsl/src/nodes/decompress_chunk/vector_predicates.h b/tsl/src/nodes/decompress_chunk/vector_predicates.h new file mode 100644 index 00000000000..f00d72dfe44 --- /dev/null +++ b/tsl/src/nodes/decompress_chunk/vector_predicates.h @@ -0,0 +1,14 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +/* + * Functions for working with vectorized predicates. + */ + +#pragma once + +void (*get_vector_const_predicate(Oid pg_predicate))(const ArrowArray *, const Datum, + uint64 *restrict); diff --git a/tsl/test/expected/compression_qualpushdown.out b/tsl/test/expected/compression_qualpushdown.out index 137949f558a..7219e0acc1f 100644 --- a/tsl/test/expected/compression_qualpushdown.out +++ b/tsl/test/expected/compression_qualpushdown.out @@ -38,7 +38,7 @@ WHERE time > 2::bigint and time < 4; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk - Filter: ((_hyper_1_1_chunk."time" > '2'::bigint) AND (_hyper_1_1_chunk."time" < 4)) + Vectorized Filter: ((_hyper_1_1_chunk."time" > '2'::bigint) AND (_hyper_1_1_chunk."time" < 4)) -> Seq Scan on _timescaledb_internal.compress_hyper_2_3_chunk Output: compress_hyper_2_3_chunk."time", compress_hyper_2_3_chunk.device_id, compress_hyper_2_3_chunk.val, compress_hyper_2_3_chunk._ts_meta_count, compress_hyper_2_3_chunk._ts_meta_sequence_num, compress_hyper_2_3_chunk._ts_meta_min_1, compress_hyper_2_3_chunk._ts_meta_max_1 Filter: ((compress_hyper_2_3_chunk._ts_meta_max_1 > '2'::bigint) AND (compress_hyper_2_3_chunk._ts_meta_min_1 < 4)) @@ -51,7 +51,7 @@ WHERE time = 3::bigint; QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk - Filter: (_hyper_1_1_chunk."time" = '3'::bigint) + Vectorized Filter: (_hyper_1_1_chunk."time" = '3'::bigint) -> Seq Scan on _timescaledb_internal.compress_hyper_2_3_chunk Output: compress_hyper_2_3_chunk."time", compress_hyper_2_3_chunk.device_id, compress_hyper_2_3_chunk.val, compress_hyper_2_3_chunk._ts_meta_count, compress_hyper_2_3_chunk._ts_meta_sequence_num, compress_hyper_2_3_chunk._ts_meta_min_1, compress_hyper_2_3_chunk._ts_meta_max_1 Filter: ((compress_hyper_2_3_chunk._ts_meta_min_1 <= '3'::bigint) AND (compress_hyper_2_3_chunk._ts_meta_max_1 >= '3'::bigint)) @@ -131,11 +131,12 @@ order by factorid, end_dt; Sort Key: _hyper_3_4_chunk.factorid, _hyper_3_4_chunk.end_dt -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_3_4_chunk Output: _hyper_3_4_chunk.factorid, _hyper_3_4_chunk.end_dt, _hyper_3_4_chunk.logret - Filter: ((_hyper_3_4_chunk.end_dt >= '12-10-2012'::date) AND (_hyper_3_4_chunk.end_dt <= '12-11-2012'::date) AND (_hyper_3_4_chunk.fmid = 56)) + Filter: ((_hyper_3_4_chunk.end_dt >= '12-10-2012'::date) AND (_hyper_3_4_chunk.end_dt <= '12-11-2012'::date)) + Vectorized Filter: (_hyper_3_4_chunk.fmid = 56) -> Seq Scan on _timescaledb_internal.compress_hyper_4_5_chunk Output: compress_hyper_4_5_chunk.fmid, compress_hyper_4_5_chunk.factorid, compress_hyper_4_5_chunk.start_dt, compress_hyper_4_5_chunk.end_dt, compress_hyper_4_5_chunk.interval_number, compress_hyper_4_5_chunk.logret, compress_hyper_4_5_chunk.knowledge_date, compress_hyper_4_5_chunk._ts_meta_count, compress_hyper_4_5_chunk._ts_meta_sequence_num, compress_hyper_4_5_chunk._ts_meta_min_1, compress_hyper_4_5_chunk._ts_meta_max_1 Filter: ((compress_hyper_4_5_chunk._ts_meta_max_1 >= '12-10-2012'::date) AND (compress_hyper_4_5_chunk._ts_meta_min_1 <= '12-11-2012'::date)) -(9 rows) +(10 rows) --no pushdown here select factorid, end_dt, logret @@ -161,10 +162,11 @@ order by factorid, end_dt; Sort Key: _hyper_3_4_chunk.factorid, _hyper_3_4_chunk.end_dt -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_3_4_chunk Output: _hyper_3_4_chunk.factorid, _hyper_3_4_chunk.end_dt, _hyper_3_4_chunk.logret - Filter: ((_hyper_3_4_chunk.fmid = 56) AND ((_hyper_3_4_chunk.end_dt)::date >= 'Mon Dec 10 00:00:00 2012'::timestamp without time zone) AND ((_hyper_3_4_chunk.end_dt)::date <= '12-11-2012'::date)) + Filter: (((_hyper_3_4_chunk.end_dt)::date >= 'Mon Dec 10 00:00:00 2012'::timestamp without time zone) AND ((_hyper_3_4_chunk.end_dt)::date <= '12-11-2012'::date)) + Vectorized Filter: (_hyper_3_4_chunk.fmid = 56) -> Seq Scan on _timescaledb_internal.compress_hyper_4_5_chunk Output: compress_hyper_4_5_chunk.fmid, compress_hyper_4_5_chunk.factorid, compress_hyper_4_5_chunk.start_dt, compress_hyper_4_5_chunk.end_dt, compress_hyper_4_5_chunk.interval_number, compress_hyper_4_5_chunk.logret, compress_hyper_4_5_chunk.knowledge_date, compress_hyper_4_5_chunk._ts_meta_count, compress_hyper_4_5_chunk._ts_meta_sequence_num, compress_hyper_4_5_chunk._ts_meta_min_1, compress_hyper_4_5_chunk._ts_meta_max_1 -(8 rows) +(9 rows) --should fail \set ON_ERROR_STOP 0 diff --git a/tsl/test/expected/compression_sorted_merge-13.out b/tsl/test/expected/compression_sorted_merge-13.out index fc78c8278e1..8732b9e1775 100644 --- a/tsl/test/expected/compression_sorted_merge-13.out +++ b/tsl/test/expected/compression_sorted_merge-13.out @@ -637,7 +637,7 @@ SELECT * FROM test1 WHERE x4 > 100 ORDER BY time DESC, x4, x3; Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x1, _hyper_1_1_chunk.x2, _hyper_1_1_chunk.x3, _hyper_1_1_chunk.x4, _hyper_1_1_chunk.x5 - Filter: (_hyper_1_1_chunk.x4 > 100) + Vectorized Filter: (_hyper_1_1_chunk.x4 > 100) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (actual rows=0 loops=1) Output: compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk.x4, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk._ts_meta_sequence_num, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3 @@ -656,7 +656,7 @@ SELECT * FROM test1 WHERE x4 > 100 ORDER BY time ASC, x3, x4; Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x1, _hyper_1_1_chunk.x2, _hyper_1_1_chunk.x3, _hyper_1_1_chunk.x4, _hyper_1_1_chunk.x5 - Filter: (_hyper_1_1_chunk.x4 > 100) + Vectorized Filter: (_hyper_1_1_chunk.x4 > 100) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (actual rows=0 loops=1) Output: compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk.x4, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk._ts_meta_sequence_num, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3 diff --git a/tsl/test/expected/compression_sorted_merge-14.out b/tsl/test/expected/compression_sorted_merge-14.out index fc78c8278e1..8732b9e1775 100644 --- a/tsl/test/expected/compression_sorted_merge-14.out +++ b/tsl/test/expected/compression_sorted_merge-14.out @@ -637,7 +637,7 @@ SELECT * FROM test1 WHERE x4 > 100 ORDER BY time DESC, x4, x3; Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x1, _hyper_1_1_chunk.x2, _hyper_1_1_chunk.x3, _hyper_1_1_chunk.x4, _hyper_1_1_chunk.x5 - Filter: (_hyper_1_1_chunk.x4 > 100) + Vectorized Filter: (_hyper_1_1_chunk.x4 > 100) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (actual rows=0 loops=1) Output: compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk.x4, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk._ts_meta_sequence_num, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3 @@ -656,7 +656,7 @@ SELECT * FROM test1 WHERE x4 > 100 ORDER BY time ASC, x3, x4; Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x1, _hyper_1_1_chunk.x2, _hyper_1_1_chunk.x3, _hyper_1_1_chunk.x4, _hyper_1_1_chunk.x5 - Filter: (_hyper_1_1_chunk.x4 > 100) + Vectorized Filter: (_hyper_1_1_chunk.x4 > 100) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (actual rows=0 loops=1) Output: compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk.x4, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk._ts_meta_sequence_num, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3 diff --git a/tsl/test/expected/compression_sorted_merge-15.out b/tsl/test/expected/compression_sorted_merge-15.out index fa5b4d6f28a..bebbacd21d0 100644 --- a/tsl/test/expected/compression_sorted_merge-15.out +++ b/tsl/test/expected/compression_sorted_merge-15.out @@ -637,7 +637,7 @@ SELECT * FROM test1 WHERE x4 > 100 ORDER BY time DESC, x4, x3; Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x1, _hyper_1_1_chunk.x2, _hyper_1_1_chunk.x3, _hyper_1_1_chunk.x4, _hyper_1_1_chunk.x5 - Filter: (_hyper_1_1_chunk.x4 > 100) + Vectorized Filter: (_hyper_1_1_chunk.x4 > 100) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (actual rows=0 loops=1) Output: compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk.x4, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk._ts_meta_sequence_num, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3 @@ -656,7 +656,7 @@ SELECT * FROM test1 WHERE x4 > 100 ORDER BY time ASC, x3, x4; Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x1, _hyper_1_1_chunk.x2, _hyper_1_1_chunk.x3, _hyper_1_1_chunk.x4, _hyper_1_1_chunk.x5 - Filter: (_hyper_1_1_chunk.x4 > 100) + Vectorized Filter: (_hyper_1_1_chunk.x4 > 100) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (actual rows=0 loops=1) Output: compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk.x4, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk._ts_meta_sequence_num, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3 diff --git a/tsl/test/expected/decompress_vector_qual.out b/tsl/test/expected/decompress_vector_qual.out new file mode 100644 index 00000000000..331b189d839 --- /dev/null +++ b/tsl/test/expected/decompress_vector_qual.out @@ -0,0 +1,219 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +create table vectorqual(metric1 int8, ts timestamp, metric2 int8, device int8); +select create_hypertable('vectorqual', 'ts'); +WARNING: column type "timestamp without time zone" used for "ts" does not follow best practices +NOTICE: adding not-null constraint to column "ts" + create_hypertable +------------------------- + (1,public,vectorqual,t) +(1 row) + +alter table vectorqual set (timescaledb.compress, timescaledb.compress_segmentby = 'device'); +insert into vectorqual(ts, device, metric1, metric2) values ('2020-01-01 00:00:00', 1, 11, 12); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; + count +------- + 1 +(1 row) + +alter table vectorqual drop column metric1; +insert into vectorqual(ts, device, metric2) values ('2021-01-01 00:00:00', 2, 22); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; +NOTICE: chunk "_hyper_1_1_chunk" is already compressed + count +------- + 2 +(1 row) + +alter table vectorqual add column metric3 int4 default 777; +insert into vectorqual(ts, device, metric2, metric3) values ('2022-01-01 00:00:00', 3, 32, 33); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; +NOTICE: chunk "_hyper_1_1_chunk" is already compressed +NOTICE: chunk "_hyper_1_3_chunk" is already compressed + count +------- + 3 +(1 row) + +alter table vectorqual add column metric4 int8; +insert into vectorqual(ts, device, metric2, metric3, metric4) values ('2023-01-01 00:00:00', 4, 42, 43, 44); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; +NOTICE: chunk "_hyper_1_1_chunk" is already compressed +NOTICE: chunk "_hyper_1_3_chunk" is already compressed +NOTICE: chunk "_hyper_1_5_chunk" is already compressed + count +------- + 4 +(1 row) + +select * from vectorqual order by vectorqual; + ts | metric2 | device | metric3 | metric4 +--------------------------+---------+--------+---------+--------- + Wed Jan 01 00:00:00 2020 | 12 | 1 | 777 | + Fri Jan 01 00:00:00 2021 | 22 | 2 | 777 | + Sat Jan 01 00:00:00 2022 | 32 | 3 | 33 | + Sun Jan 01 00:00:00 2023 | 42 | 4 | 43 | 44 +(4 rows) + +set timescaledb.debug_require_vector_qual to 'only' /* all following quals must be vectorized */; +select count(*) from vectorqual where ts > '1999-01-01 00:00:00'; + count +------- + 4 +(1 row) + +select count(*) from vectorqual where metric2 = 22; + count +------- + 1 +(1 row) + +select count(*) from vectorqual where 22 = metric2 /* commutators */; + count +------- + 1 +(1 row) + +select count(*) from vectorqual where metric3 = 33; + count +------- + 1 +(1 row) + +select count(*) from vectorqual where metric3 = 777 /* default value */; + count +------- + 2 +(1 row) + +select count(*) from vectorqual where metric4 = 44 /* column with default null */; + count +------- + 1 +(1 row) + +select count(*) from vectorqual where metric4 >= 0 /* nulls shouldn't pass the qual */; + count +------- + 1 +(1 row) + +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where device = 1 /* can't apply vector ops to the segmentby column */; + count +------- + 1 +(1 row) + +-- Test columns that don't support bulk decompression. +alter table vectorqual add column tag text; +insert into vectorqual(ts, device, metric2, metric3, metric4, tag) values ('2025-01-01 00:00:00', 5, 52, 53, 54, 'tag5'); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; +NOTICE: chunk "_hyper_1_1_chunk" is already compressed +NOTICE: chunk "_hyper_1_3_chunk" is already compressed +NOTICE: chunk "_hyper_1_5_chunk" is already compressed +NOTICE: chunk "_hyper_1_7_chunk" is already compressed + count +------- + 5 +(1 row) + +set timescaledb.debug_require_vector_qual to 'only'; +select tag from vectorqual where metric2 > 0; + tag +------ + + + + + tag5 +(5 rows) + +-- Queries without aggregation. +select * from vectorqual where ts > '2021-01-01 00:00:00' order by vectorqual; + ts | metric2 | device | metric3 | metric4 | tag +--------------------------+---------+--------+---------+---------+------ + Sat Jan 01 00:00:00 2022 | 32 | 3 | 33 | | + Sun Jan 01 00:00:00 2023 | 42 | 4 | 43 | 44 | + Wed Jan 01 00:00:00 2025 | 52 | 5 | 53 | 54 | tag5 +(3 rows) + +select * from vectorqual where metric4 >= 0 order by vectorqual; + ts | metric2 | device | metric3 | metric4 | tag +--------------------------+---------+--------+---------+---------+------ + Sun Jan 01 00:00:00 2023 | 42 | 4 | 43 | 44 | + Wed Jan 01 00:00:00 2025 | 52 | 5 | 53 | 54 | tag5 +(2 rows) + +-- Constraints on columns not selected. +select metric4 from vectorqual where ts > '2021-01-01 00:00:00' order by 1; + metric4 +--------- + 44 + 54 + +(3 rows) + +-- ANDed constraints on multiple columns. +select * from vectorqual where ts > '2021-01-01 00:00:00' and metric3 > 40 order by vectorqual; + ts | metric2 | device | metric3 | metric4 | tag +--------------------------+---------+--------+---------+---------+------ + Sun Jan 01 00:00:00 2023 | 42 | 4 | 43 | 44 | + Wed Jan 01 00:00:00 2025 | 52 | 5 | 53 | 54 | tag5 +(2 rows) + +-- ORed constrainst on multiple columns (not vectorized for now). +set timescaledb.debug_require_vector_qual to 'forbid'; +select * from vectorqual where ts > '2021-01-01 00:00:00' or metric3 > 40 order by vectorqual; + ts | metric2 | device | metric3 | metric4 | tag +--------------------------+---------+--------+---------+---------+------ + Wed Jan 01 00:00:00 2020 | 12 | 1 | 777 | | + Fri Jan 01 00:00:00 2021 | 22 | 2 | 777 | | + Sat Jan 01 00:00:00 2022 | 32 | 3 | 33 | | + Sun Jan 01 00:00:00 2023 | 42 | 4 | 43 | 44 | + Wed Jan 01 00:00:00 2025 | 52 | 5 | 53 | 54 | tag5 +(5 rows) + +-- Test with unary operator. +create operator !! (function = 'bool', rightarg = int4); +select count(*) from vectorqual where !!metric3; + count +------- + 5 +(1 row) + +-- NullTest is not vectorized. +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where metric4 is null; + count +------- + 3 +(1 row) + +select count(*) from vectorqual where metric4 is not null; + count +------- + 2 +(1 row) + +-- Test that the vectorized quals are disabled by disabling the bulk decompression. +set timescaledb.enable_bulk_decompression to off; +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where metric4 > null; + count +------- + 0 +(1 row) + +set timescaledb.enable_bulk_decompression to on; +-- Test that the debug GUC works +\set ON_ERROR_STOP 0 +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where metric4 > 4; +ERROR: debug: encountered vector quals when they are disabled +set timescaledb.debug_require_vector_qual to 'only'; +select count(*) from vectorqual where metric4 is null; +ERROR: debug: encountered non-vector quals when they are disabled +\set ON_ERROR_STOP 1 diff --git a/tsl/test/expected/transparent_decompression-13.out b/tsl/test/expected/transparent_decompression-13.out index e0c406a74ad..83c11d8be3a 100644 --- a/tsl/test/expected/transparent_decompression-13.out +++ b/tsl/test/expected/transparent_decompression-13.out @@ -475,7 +475,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id, _hyper_1_1_chunk.device_id_peer, _hyper_1_1_chunk.v0, _hyper_1_1_chunk.v1, _hyper_1_1_chunk.v2, _hyper_1_1_chunk.v3 - Filter: (_hyper_1_1_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_1_1_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1800 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_15_chunk (actual rows=5 loops=1) @@ -490,7 +490,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=0 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_1_3_chunk.v3 > '10'::double precision) Rows Removed by Filter: 2520 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) @@ -789,7 +789,7 @@ LIMIT 10; --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit (actual rows=5 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=5 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1795 -> Sort (actual rows=5 loops=1) Sort Key: compress_hyper_5_15_chunk.device_id @@ -812,7 +812,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=15 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1785 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -832,7 +832,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=20 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1780 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -858,7 +858,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1785 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 15 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -867,7 +867,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -892,7 +892,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1780 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 20 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -901,7 +901,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -926,7 +926,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1780 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 20 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -935,7 +935,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -961,7 +961,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_5_15_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 5 @@ -971,7 +971,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_min_1 < 1) (24 rows) @@ -1140,7 +1140,7 @@ LIMIT 10; Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=356 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id, _hyper_1_1_chunk.device_id_peer, _hyper_1_1_chunk.v0, _hyper_1_1_chunk.v1, _hyper_1_1_chunk.v2, _hyper_1_1_chunk.v3 - Filter: (_hyper_1_1_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_1_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_15_chunk (actual rows=1 loops=1) @@ -1156,7 +1156,7 @@ LIMIT 10; Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (never executed) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (never executed) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1334,7 +1334,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1365,7 +1365,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1400,7 +1400,7 @@ LIMIT 100; Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=1 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk."time" - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=1 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1433,7 +1433,7 @@ LIMIT 100; Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=1 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=1 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1465,7 +1465,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1499,7 +1499,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1530,7 +1530,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1562,7 +1562,7 @@ ORDER BY device_id DESC, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1596,7 +1596,7 @@ ORDER BY device_id DESC, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1628,7 +1628,7 @@ ORDER BY time, Sort Key: _hyper_1_3_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (actual rows=2520 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) (15 rows) @@ -3472,21 +3472,21 @@ ORDER BY time, -> Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_4_chunk (actual rows=0 loops=1) Output: _hyper_2_4_chunk."time", _hyper_2_4_chunk.device_id, _hyper_2_4_chunk.device_id_peer, _hyper_2_4_chunk.v0, _hyper_2_4_chunk.v1, _hyper_2_4_chunk.v2, _hyper_2_4_chunk.v3 - Filter: (_hyper_2_4_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_4_chunk.v3 > '10'::double precision) Rows Removed by Filter: 360 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_17_chunk (actual rows=1 loops=1) Output: compress_hyper_6_17_chunk."time", compress_hyper_6_17_chunk.device_id, compress_hyper_6_17_chunk.device_id_peer, compress_hyper_6_17_chunk.v0, compress_hyper_6_17_chunk.v1, compress_hyper_6_17_chunk.v2, compress_hyper_6_17_chunk.v3, compress_hyper_6_17_chunk._ts_meta_count, compress_hyper_6_17_chunk._ts_meta_sequence_num, compress_hyper_6_17_chunk._ts_meta_min_3, compress_hyper_6_17_chunk._ts_meta_max_3, compress_hyper_6_17_chunk._ts_meta_min_1, compress_hyper_6_17_chunk._ts_meta_max_1, compress_hyper_6_17_chunk._ts_meta_min_2, compress_hyper_6_17_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_5_chunk (actual rows=0 loops=1) Output: _hyper_2_5_chunk."time", _hyper_2_5_chunk.device_id, _hyper_2_5_chunk.device_id_peer, _hyper_2_5_chunk.v0, _hyper_2_5_chunk.v1, _hyper_2_5_chunk.v2, _hyper_2_5_chunk.v3 - Filter: (_hyper_2_5_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_5_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1080 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_18_chunk (actual rows=3 loops=1) Output: compress_hyper_6_18_chunk."time", compress_hyper_6_18_chunk.device_id, compress_hyper_6_18_chunk.device_id_peer, compress_hyper_6_18_chunk.v0, compress_hyper_6_18_chunk.v1, compress_hyper_6_18_chunk.v2, compress_hyper_6_18_chunk.v3, compress_hyper_6_18_chunk._ts_meta_count, compress_hyper_6_18_chunk._ts_meta_sequence_num, compress_hyper_6_18_chunk._ts_meta_min_3, compress_hyper_6_18_chunk._ts_meta_max_3, compress_hyper_6_18_chunk._ts_meta_min_1, compress_hyper_6_18_chunk._ts_meta_max_1, compress_hyper_6_18_chunk._ts_meta_min_2, compress_hyper_6_18_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_6_chunk (actual rows=0 loops=1) Output: _hyper_2_6_chunk."time", _hyper_2_6_chunk.device_id, _hyper_2_6_chunk.device_id_peer, _hyper_2_6_chunk.v0, _hyper_2_6_chunk.v1, _hyper_2_6_chunk.v2, _hyper_2_6_chunk.v3 - Filter: (_hyper_2_6_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_6_chunk.v3 > '10'::double precision) Rows Removed by Filter: 360 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_19_chunk (actual rows=1 loops=1) @@ -3505,14 +3505,14 @@ ORDER BY time, Rows Removed by Filter: 504 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=0 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_10_chunk.v3 > '10'::double precision) Rows Removed by Filter: 504 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=0 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_11_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1512 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) @@ -4034,7 +4034,7 @@ LIMIT 10; -> Merge Append (actual rows=5 loops=1) Sort Key: _hyper_2_4_chunk.device_id -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=1 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 359 -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_6_17_chunk.device_id @@ -4042,7 +4042,7 @@ LIMIT 10; -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) AND (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone)) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=3 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1077 -> Sort (actual rows=3 loops=1) Sort Key: compress_hyper_6_18_chunk.device_id @@ -4050,7 +4050,7 @@ LIMIT 10; -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) AND (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone)) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=1 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 359 -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_6_19_chunk.device_id @@ -4074,17 +4074,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=15 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=3 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 357 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=9 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1071 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=3 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 357 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4105,17 +4105,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=20 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=4 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 356 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=12 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1068 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=4 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 356 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4146,7 +4146,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=357 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4157,7 +4157,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1071 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 9 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4168,7 +4168,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=357 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4187,7 +4187,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4195,7 +4195,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4227,7 +4227,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=356 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4238,7 +4238,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1068 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 12 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4249,7 +4249,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=356 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4268,7 +4268,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4276,7 +4276,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4308,7 +4308,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=356 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4319,7 +4319,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1068 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 12 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4330,7 +4330,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=356 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4349,7 +4349,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4357,7 +4357,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4380,17 +4380,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_17_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_18_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_19_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4401,12 +4401,12 @@ LIMIT 10; -> Index Scan using _hyper_2_9_chunk_metrics_space_device_id_device_id_peer_v0_v1_2 on _hyper_2_9_chunk (actual rows=0 loops=1) Index Cond: (v0 < 1) -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_20_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_21_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 3 @@ -4780,7 +4780,7 @@ LIMIT 10; Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_4_chunk (actual rows=356 loops=1) Output: _hyper_2_4_chunk."time", _hyper_2_4_chunk.device_id, _hyper_2_4_chunk.device_id_peer, _hyper_2_4_chunk.v0, _hyper_2_4_chunk.v1, _hyper_2_4_chunk.v2, _hyper_2_4_chunk.v3 - Filter: (_hyper_2_4_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_4_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_17_chunk (actual rows=1 loops=1) @@ -4795,7 +4795,7 @@ LIMIT 10; Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (never executed) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (never executed) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5154,14 +5154,14 @@ ORDER BY time, Rows Removed by Filter: 169 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5196,14 +5196,14 @@ ORDER BY device_id, Index Cond: (_hyper_2_9_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5245,7 +5245,7 @@ LIMIT 100; Heap Fetches: 1 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=1 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk."time" - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5256,7 +5256,7 @@ LIMIT 100; Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk."time" - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5301,7 +5301,7 @@ LIMIT 100; Heap Fetches: 1 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=1 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5312,7 +5312,7 @@ LIMIT 100; Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5356,14 +5356,14 @@ ORDER BY device_id, Heap Fetches: 335 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5405,14 +5405,14 @@ ORDER BY device_id, Heap Fetches: 335 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5455,14 +5455,14 @@ ORDER BY device_id, Rows Removed by Filter: 169 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5498,7 +5498,7 @@ ORDER BY device_id DESC, Index Cond: (_hyper_2_9_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5509,7 +5509,7 @@ ORDER BY device_id DESC, Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5554,14 +5554,14 @@ ORDER BY device_id DESC, Rows Removed by Filter: 169 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5598,11 +5598,11 @@ ORDER BY time, Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 169 -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (actual rows=504 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (actual rows=1512 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on _hyper_2_12_chunk (actual rows=504 loops=1) @@ -9661,7 +9661,7 @@ EXPLAIN (costs off) SELECT * FROM metrics WHERE time > '2000-01-08' ORDER BY dev -> Index Scan using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk Index Cond: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_5_16_chunk_c_index_2 on compress_hyper_5_16_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) (10 rows) @@ -9678,11 +9678,11 @@ EXPLAIN (costs off) SELECT * FROM metrics_space WHERE time > '2000-01-08' ORDER -> Index Scan Backward using _hyper_2_9_chunk_metrics_space_device_id_device_id_peer_v0_v1_2 on _hyper_2_9_chunk Index Cond: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_6_20_chunk_c_space_index_2 on compress_hyper_6_20_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_6_21_chunk_c_space_index_2 on compress_hyper_6_21_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_device_id_device_id_peer_v0_v_2 on _hyper_2_12_chunk diff --git a/tsl/test/expected/transparent_decompression-14.out b/tsl/test/expected/transparent_decompression-14.out index 1d3fa294120..ff969d000f4 100644 --- a/tsl/test/expected/transparent_decompression-14.out +++ b/tsl/test/expected/transparent_decompression-14.out @@ -475,7 +475,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id, _hyper_1_1_chunk.device_id_peer, _hyper_1_1_chunk.v0, _hyper_1_1_chunk.v1, _hyper_1_1_chunk.v2, _hyper_1_1_chunk.v3 - Filter: (_hyper_1_1_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_1_1_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1800 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_15_chunk (actual rows=5 loops=1) @@ -490,7 +490,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=0 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_1_3_chunk.v3 > '10'::double precision) Rows Removed by Filter: 2520 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) @@ -789,7 +789,7 @@ LIMIT 10; --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit (actual rows=5 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=5 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1795 -> Sort (actual rows=5 loops=1) Sort Key: compress_hyper_5_15_chunk.device_id @@ -812,7 +812,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=15 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1785 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -832,7 +832,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=20 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1780 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -858,7 +858,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1785 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 15 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -867,7 +867,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -892,7 +892,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1780 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 20 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -901,7 +901,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -926,7 +926,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1780 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 20 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -935,7 +935,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -961,7 +961,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_5_15_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 5 @@ -971,7 +971,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_min_1 < 1) (24 rows) @@ -1140,7 +1140,7 @@ LIMIT 10; Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=356 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id, _hyper_1_1_chunk.device_id_peer, _hyper_1_1_chunk.v0, _hyper_1_1_chunk.v1, _hyper_1_1_chunk.v2, _hyper_1_1_chunk.v3 - Filter: (_hyper_1_1_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_1_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_15_chunk (actual rows=1 loops=1) @@ -1156,7 +1156,7 @@ LIMIT 10; Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (never executed) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (never executed) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1334,7 +1334,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1365,7 +1365,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1400,7 +1400,7 @@ LIMIT 100; Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=1 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk."time" - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=1 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1433,7 +1433,7 @@ LIMIT 100; Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=1 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=1 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1465,7 +1465,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1499,7 +1499,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1530,7 +1530,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1562,7 +1562,7 @@ ORDER BY device_id DESC, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1596,7 +1596,7 @@ ORDER BY device_id DESC, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1628,7 +1628,7 @@ ORDER BY time, Sort Key: _hyper_1_3_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (actual rows=2520 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) (15 rows) @@ -3504,21 +3504,21 @@ ORDER BY time, -> Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_4_chunk (actual rows=0 loops=1) Output: _hyper_2_4_chunk."time", _hyper_2_4_chunk.device_id, _hyper_2_4_chunk.device_id_peer, _hyper_2_4_chunk.v0, _hyper_2_4_chunk.v1, _hyper_2_4_chunk.v2, _hyper_2_4_chunk.v3 - Filter: (_hyper_2_4_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_4_chunk.v3 > '10'::double precision) Rows Removed by Filter: 360 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_17_chunk (actual rows=1 loops=1) Output: compress_hyper_6_17_chunk."time", compress_hyper_6_17_chunk.device_id, compress_hyper_6_17_chunk.device_id_peer, compress_hyper_6_17_chunk.v0, compress_hyper_6_17_chunk.v1, compress_hyper_6_17_chunk.v2, compress_hyper_6_17_chunk.v3, compress_hyper_6_17_chunk._ts_meta_count, compress_hyper_6_17_chunk._ts_meta_sequence_num, compress_hyper_6_17_chunk._ts_meta_min_3, compress_hyper_6_17_chunk._ts_meta_max_3, compress_hyper_6_17_chunk._ts_meta_min_1, compress_hyper_6_17_chunk._ts_meta_max_1, compress_hyper_6_17_chunk._ts_meta_min_2, compress_hyper_6_17_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_5_chunk (actual rows=0 loops=1) Output: _hyper_2_5_chunk."time", _hyper_2_5_chunk.device_id, _hyper_2_5_chunk.device_id_peer, _hyper_2_5_chunk.v0, _hyper_2_5_chunk.v1, _hyper_2_5_chunk.v2, _hyper_2_5_chunk.v3 - Filter: (_hyper_2_5_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_5_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1080 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_18_chunk (actual rows=3 loops=1) Output: compress_hyper_6_18_chunk."time", compress_hyper_6_18_chunk.device_id, compress_hyper_6_18_chunk.device_id_peer, compress_hyper_6_18_chunk.v0, compress_hyper_6_18_chunk.v1, compress_hyper_6_18_chunk.v2, compress_hyper_6_18_chunk.v3, compress_hyper_6_18_chunk._ts_meta_count, compress_hyper_6_18_chunk._ts_meta_sequence_num, compress_hyper_6_18_chunk._ts_meta_min_3, compress_hyper_6_18_chunk._ts_meta_max_3, compress_hyper_6_18_chunk._ts_meta_min_1, compress_hyper_6_18_chunk._ts_meta_max_1, compress_hyper_6_18_chunk._ts_meta_min_2, compress_hyper_6_18_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_6_chunk (actual rows=0 loops=1) Output: _hyper_2_6_chunk."time", _hyper_2_6_chunk.device_id, _hyper_2_6_chunk.device_id_peer, _hyper_2_6_chunk.v0, _hyper_2_6_chunk.v1, _hyper_2_6_chunk.v2, _hyper_2_6_chunk.v3 - Filter: (_hyper_2_6_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_6_chunk.v3 > '10'::double precision) Rows Removed by Filter: 360 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_19_chunk (actual rows=1 loops=1) @@ -3537,14 +3537,14 @@ ORDER BY time, Rows Removed by Filter: 504 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=0 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_10_chunk.v3 > '10'::double precision) Rows Removed by Filter: 504 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=0 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_11_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1512 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) @@ -4108,7 +4108,7 @@ LIMIT 10; -> Merge Append (actual rows=5 loops=1) Sort Key: _hyper_2_4_chunk.device_id -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=1 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 359 -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_6_17_chunk.device_id @@ -4116,7 +4116,7 @@ LIMIT 10; -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) AND (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone)) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=3 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1077 -> Sort (actual rows=3 loops=1) Sort Key: compress_hyper_6_18_chunk.device_id @@ -4124,7 +4124,7 @@ LIMIT 10; -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) AND (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone)) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=1 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 359 -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_6_19_chunk.device_id @@ -4148,17 +4148,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=15 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=3 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 357 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=9 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1071 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=3 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 357 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4179,17 +4179,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=20 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=4 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 356 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=12 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1068 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=4 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 356 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4220,7 +4220,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=357 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4231,7 +4231,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1071 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 9 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4242,7 +4242,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=357 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4261,7 +4261,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4269,7 +4269,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4301,7 +4301,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=356 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4312,7 +4312,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1068 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 12 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4323,7 +4323,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=356 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4342,7 +4342,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4350,7 +4350,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4382,7 +4382,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=356 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4393,7 +4393,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1068 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 12 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4404,7 +4404,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=356 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4423,7 +4423,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4431,7 +4431,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4464,7 +4464,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_17_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4475,7 +4475,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_18_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 3 @@ -4486,7 +4486,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_19_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4510,7 +4510,7 @@ LIMIT 10; Sort Key: _hyper_2_10_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_20_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4521,7 +4521,7 @@ LIMIT 10; Sort Key: _hyper_2_11_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_21_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 3 @@ -4896,7 +4896,7 @@ LIMIT 10; Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_4_chunk (actual rows=356 loops=1) Output: _hyper_2_4_chunk."time", _hyper_2_4_chunk.device_id, _hyper_2_4_chunk.device_id_peer, _hyper_2_4_chunk.v0, _hyper_2_4_chunk.v1, _hyper_2_4_chunk.v2, _hyper_2_4_chunk.v3 - Filter: (_hyper_2_4_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_4_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_17_chunk (actual rows=1 loops=1) @@ -4911,7 +4911,7 @@ LIMIT 10; Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (never executed) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (never executed) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5284,7 +5284,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5299,7 +5299,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5334,14 +5334,14 @@ ORDER BY device_id, Index Cond: (_hyper_2_9_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5383,7 +5383,7 @@ LIMIT 100; Heap Fetches: 1 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=1 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk."time" - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5394,7 +5394,7 @@ LIMIT 100; Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk."time" - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5439,7 +5439,7 @@ LIMIT 100; Heap Fetches: 1 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=1 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5450,7 +5450,7 @@ LIMIT 100; Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5494,14 +5494,14 @@ ORDER BY device_id, Heap Fetches: 335 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5543,14 +5543,14 @@ ORDER BY device_id, Heap Fetches: 335 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5596,7 +5596,7 @@ ORDER BY device_id, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5607,7 +5607,7 @@ ORDER BY device_id, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5643,7 +5643,7 @@ ORDER BY device_id DESC, Index Cond: (_hyper_2_9_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5654,7 +5654,7 @@ ORDER BY device_id DESC, Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5699,14 +5699,14 @@ ORDER BY device_id DESC, Rows Removed by Filter: 169 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5752,7 +5752,7 @@ ORDER BY time, Sort Key: _hyper_2_10_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (actual rows=504 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Sort (actual rows=1512 loops=1) @@ -5762,7 +5762,7 @@ ORDER BY time, Sort Key: _hyper_2_11_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (actual rows=1512 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (actual rows=504 loops=1) @@ -9875,7 +9875,7 @@ EXPLAIN (costs off) SELECT * FROM metrics WHERE time > '2000-01-08' ORDER BY dev -> Index Scan using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk Index Cond: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_5_16_chunk_c_index_2 on compress_hyper_5_16_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) (10 rows) @@ -9892,11 +9892,11 @@ EXPLAIN (costs off) SELECT * FROM metrics_space WHERE time > '2000-01-08' ORDER -> Index Scan Backward using _hyper_2_9_chunk_metrics_space_device_id_device_id_peer_v0_v1_2 on _hyper_2_9_chunk Index Cond: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_6_20_chunk_c_space_index_2 on compress_hyper_6_20_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_6_21_chunk_c_space_index_2 on compress_hyper_6_21_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_device_id_device_id_peer_v0_v_2 on _hyper_2_12_chunk diff --git a/tsl/test/expected/transparent_decompression-15.out b/tsl/test/expected/transparent_decompression-15.out index 83bc868b064..867133ec27d 100644 --- a/tsl/test/expected/transparent_decompression-15.out +++ b/tsl/test/expected/transparent_decompression-15.out @@ -476,7 +476,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=0 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id, _hyper_1_1_chunk.device_id_peer, _hyper_1_1_chunk.v0, _hyper_1_1_chunk.v1, _hyper_1_1_chunk.v2, _hyper_1_1_chunk.v3 - Filter: (_hyper_1_1_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_1_1_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1800 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_15_chunk (actual rows=5 loops=1) @@ -491,7 +491,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=0 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_1_3_chunk.v3 > '10'::double precision) Rows Removed by Filter: 2520 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) @@ -790,7 +790,7 @@ LIMIT 10; --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit (actual rows=5 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=5 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1795 -> Sort (actual rows=5 loops=1) Sort Key: compress_hyper_5_15_chunk.device_id @@ -813,7 +813,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=15 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1785 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -833,7 +833,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=20 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1780 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -859,7 +859,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1785 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 15 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -868,7 +868,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -893,7 +893,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1780 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 20 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -902,7 +902,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -927,7 +927,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=1780 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 20 -> Seq Scan on compress_hyper_5_15_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -936,7 +936,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) (23 rows) @@ -962,7 +962,7 @@ LIMIT 10; Sort Key: _hyper_1_1_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_5_15_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 5 @@ -972,7 +972,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (never executed) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_5_16_chunk (never executed) Filter: (_ts_meta_min_1 < 1) (24 rows) @@ -1141,7 +1141,7 @@ LIMIT 10; Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (actual rows=356 loops=1) Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.device_id, _hyper_1_1_chunk.device_id_peer, _hyper_1_1_chunk.v0, _hyper_1_1_chunk.v1, _hyper_1_1_chunk.v2, _hyper_1_1_chunk.v3 - Filter: (_hyper_1_1_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_1_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_15_chunk (actual rows=1 loops=1) @@ -1157,7 +1157,7 @@ LIMIT 10; Sort Key: _hyper_1_3_chunk."time" -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (never executed) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (never executed) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1335,7 +1335,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1366,7 +1366,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1401,7 +1401,7 @@ LIMIT 100; Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=1 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk."time" - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=1 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1434,7 +1434,7 @@ LIMIT 100; Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=1 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=1 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1466,7 +1466,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1500,7 +1500,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_5_16_chunk__compressed_hypertable_5_device_id_de on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1531,7 +1531,7 @@ ORDER BY device_id, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1563,7 +1563,7 @@ ORDER BY device_id DESC, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1597,7 +1597,7 @@ ORDER BY device_id DESC, Rows Removed by Filter: 845 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_3_chunk (actual rows=2520 loops=1) Output: _hyper_1_3_chunk."time", _hyper_1_3_chunk.device_id, _hyper_1_3_chunk.device_id_peer, _hyper_1_3_chunk.v0, _hyper_1_3_chunk.v1, _hyper_1_3_chunk.v2, _hyper_1_3_chunk.v3 - Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_1_3_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_5_16_chunk (actual rows=5 loops=1) Output: compress_hyper_5_16_chunk."time", compress_hyper_5_16_chunk.device_id, compress_hyper_5_16_chunk.device_id_peer, compress_hyper_5_16_chunk.v0, compress_hyper_5_16_chunk.v1, compress_hyper_5_16_chunk.v2, compress_hyper_5_16_chunk.v3, compress_hyper_5_16_chunk._ts_meta_count, compress_hyper_5_16_chunk._ts_meta_sequence_num, compress_hyper_5_16_chunk._ts_meta_min_3, compress_hyper_5_16_chunk._ts_meta_max_3, compress_hyper_5_16_chunk._ts_meta_min_1, compress_hyper_5_16_chunk._ts_meta_max_1, compress_hyper_5_16_chunk._ts_meta_min_2, compress_hyper_5_16_chunk._ts_meta_max_2 @@ -1629,7 +1629,7 @@ ORDER BY time, Sort Key: _hyper_1_3_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk (actual rows=2520 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_5_16_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) (15 rows) @@ -3506,21 +3506,21 @@ ORDER BY time, -> Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_4_chunk (actual rows=0 loops=1) Output: _hyper_2_4_chunk."time", _hyper_2_4_chunk.device_id, _hyper_2_4_chunk.device_id_peer, _hyper_2_4_chunk.v0, _hyper_2_4_chunk.v1, _hyper_2_4_chunk.v2, _hyper_2_4_chunk.v3 - Filter: (_hyper_2_4_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_4_chunk.v3 > '10'::double precision) Rows Removed by Filter: 360 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_17_chunk (actual rows=1 loops=1) Output: compress_hyper_6_17_chunk."time", compress_hyper_6_17_chunk.device_id, compress_hyper_6_17_chunk.device_id_peer, compress_hyper_6_17_chunk.v0, compress_hyper_6_17_chunk.v1, compress_hyper_6_17_chunk.v2, compress_hyper_6_17_chunk.v3, compress_hyper_6_17_chunk._ts_meta_count, compress_hyper_6_17_chunk._ts_meta_sequence_num, compress_hyper_6_17_chunk._ts_meta_min_3, compress_hyper_6_17_chunk._ts_meta_max_3, compress_hyper_6_17_chunk._ts_meta_min_1, compress_hyper_6_17_chunk._ts_meta_max_1, compress_hyper_6_17_chunk._ts_meta_min_2, compress_hyper_6_17_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_5_chunk (actual rows=0 loops=1) Output: _hyper_2_5_chunk."time", _hyper_2_5_chunk.device_id, _hyper_2_5_chunk.device_id_peer, _hyper_2_5_chunk.v0, _hyper_2_5_chunk.v1, _hyper_2_5_chunk.v2, _hyper_2_5_chunk.v3 - Filter: (_hyper_2_5_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_5_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1080 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_18_chunk (actual rows=3 loops=1) Output: compress_hyper_6_18_chunk."time", compress_hyper_6_18_chunk.device_id, compress_hyper_6_18_chunk.device_id_peer, compress_hyper_6_18_chunk.v0, compress_hyper_6_18_chunk.v1, compress_hyper_6_18_chunk.v2, compress_hyper_6_18_chunk.v3, compress_hyper_6_18_chunk._ts_meta_count, compress_hyper_6_18_chunk._ts_meta_sequence_num, compress_hyper_6_18_chunk._ts_meta_min_3, compress_hyper_6_18_chunk._ts_meta_max_3, compress_hyper_6_18_chunk._ts_meta_min_1, compress_hyper_6_18_chunk._ts_meta_max_1, compress_hyper_6_18_chunk._ts_meta_min_2, compress_hyper_6_18_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_6_chunk (actual rows=0 loops=1) Output: _hyper_2_6_chunk."time", _hyper_2_6_chunk.device_id, _hyper_2_6_chunk.device_id_peer, _hyper_2_6_chunk.v0, _hyper_2_6_chunk.v1, _hyper_2_6_chunk.v2, _hyper_2_6_chunk.v3 - Filter: (_hyper_2_6_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_6_chunk.v3 > '10'::double precision) Rows Removed by Filter: 360 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_19_chunk (actual rows=1 loops=1) @@ -3539,14 +3539,14 @@ ORDER BY time, Rows Removed by Filter: 504 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=0 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_10_chunk.v3 > '10'::double precision) Rows Removed by Filter: 504 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=0 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_2_11_chunk.v3 > '10'::double precision) Rows Removed by Filter: 1512 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) @@ -4110,7 +4110,7 @@ LIMIT 10; -> Merge Append (actual rows=5 loops=1) Sort Key: _hyper_2_4_chunk.device_id -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=1 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 359 -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_6_17_chunk.device_id @@ -4118,7 +4118,7 @@ LIMIT 10; -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) AND (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone)) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=3 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1077 -> Sort (actual rows=3 loops=1) Sort Key: compress_hyper_6_18_chunk.device_id @@ -4126,7 +4126,7 @@ LIMIT 10; -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) AND (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone)) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=1 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 359 -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_6_19_chunk.device_id @@ -4150,17 +4150,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=15 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=3 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 357 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=9 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1071 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=3 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 357 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4181,17 +4181,17 @@ LIMIT 10; Sort Method: quicksort -> Append (actual rows=20 loops=1) -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=4 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 356 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=12 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 1068 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=4 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 356 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_min_3 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4222,7 +4222,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=357 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4233,7 +4233,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1071 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 9 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4244,7 +4244,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=357 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4263,7 +4263,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4271,7 +4271,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4303,7 +4303,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=356 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4314,7 +4314,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1068 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 12 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4325,7 +4325,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=356 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4344,7 +4344,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4352,7 +4352,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4384,7 +4384,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=356 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_17_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4395,7 +4395,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=1068 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 12 -> Seq Scan on compress_hyper_6_18_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4406,7 +4406,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=356 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 -> Seq Scan on compress_hyper_6_19_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -4425,7 +4425,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Sort (never executed) @@ -4433,7 +4433,7 @@ LIMIT 10; -> Sort (never executed) Sort Key: _hyper_2_11_chunk."time" -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (never executed) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (never executed) Filter: (_ts_meta_max_3 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (never executed) @@ -4466,7 +4466,7 @@ LIMIT 10; Sort Key: _hyper_2_4_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_4_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_17_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4477,7 +4477,7 @@ LIMIT 10; Sort Key: _hyper_2_5_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_5_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_18_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 3 @@ -4488,7 +4488,7 @@ LIMIT 10; Sort Key: _hyper_2_6_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_6_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_19_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4512,7 +4512,7 @@ LIMIT 10; Sort Key: _hyper_2_10_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_20_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 1 @@ -4523,7 +4523,7 @@ LIMIT 10; Sort Key: _hyper_2_11_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) -> Seq Scan on compress_hyper_6_21_chunk (actual rows=0 loops=1) Filter: (_ts_meta_min_1 < 1) Rows Removed by Filter: 3 @@ -4898,7 +4898,7 @@ LIMIT 10; Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_4_chunk (actual rows=356 loops=1) Output: _hyper_2_4_chunk."time", _hyper_2_4_chunk.device_id, _hyper_2_4_chunk.device_id_peer, _hyper_2_4_chunk.v0, _hyper_2_4_chunk.v1, _hyper_2_4_chunk.v2, _hyper_2_4_chunk.v3 - Filter: (_hyper_2_4_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_4_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 4 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_17_chunk (actual rows=1 loops=1) @@ -4913,7 +4913,7 @@ LIMIT 10; Sort Key: _hyper_2_10_chunk."time" -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (never executed) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (never executed) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5286,7 +5286,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5301,7 +5301,7 @@ ORDER BY time, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5336,14 +5336,14 @@ ORDER BY device_id, Index Cond: (_hyper_2_9_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5385,7 +5385,7 @@ LIMIT 100; Heap Fetches: 1 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=1 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk."time" - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5396,7 +5396,7 @@ LIMIT 100; Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk."time" - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5441,7 +5441,7 @@ LIMIT 100; Heap Fetches: 1 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=1 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5452,7 +5452,7 @@ LIMIT 100; Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5496,14 +5496,14 @@ ORDER BY device_id, Heap Fetches: 335 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5545,14 +5545,14 @@ ORDER BY device_id, Heap Fetches: 335 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_20_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Index Scan using compress_hyper_6_21_chunk__compressed_hypertable_6_device_id_de on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5598,7 +5598,7 @@ ORDER BY device_id, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5609,7 +5609,7 @@ ORDER BY device_id, Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5645,7 +5645,7 @@ ORDER BY device_id DESC, Index Cond: (_hyper_2_9_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 @@ -5656,7 +5656,7 @@ ORDER BY device_id DESC, Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Sort (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5701,14 +5701,14 @@ ORDER BY device_id DESC, Rows Removed by Filter: 169 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_10_chunk (actual rows=504 loops=1) Output: _hyper_2_10_chunk."time", _hyper_2_10_chunk.device_id, _hyper_2_10_chunk.device_id_peer, _hyper_2_10_chunk.v0, _hyper_2_10_chunk.v1, _hyper_2_10_chunk.v2, _hyper_2_10_chunk.v3 - Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_10_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_20_chunk (actual rows=1 loops=1) Output: compress_hyper_6_20_chunk."time", compress_hyper_6_20_chunk.device_id, compress_hyper_6_20_chunk.device_id_peer, compress_hyper_6_20_chunk.v0, compress_hyper_6_20_chunk.v1, compress_hyper_6_20_chunk.v2, compress_hyper_6_20_chunk.v3, compress_hyper_6_20_chunk._ts_meta_count, compress_hyper_6_20_chunk._ts_meta_sequence_num, compress_hyper_6_20_chunk._ts_meta_min_3, compress_hyper_6_20_chunk._ts_meta_max_3, compress_hyper_6_20_chunk._ts_meta_min_1, compress_hyper_6_20_chunk._ts_meta_max_1, compress_hyper_6_20_chunk._ts_meta_min_2, compress_hyper_6_20_chunk._ts_meta_max_2 Filter: (compress_hyper_6_20_chunk._ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_2_11_chunk (actual rows=1512 loops=1) Output: _hyper_2_11_chunk."time", _hyper_2_11_chunk.device_id, _hyper_2_11_chunk.device_id_peer, _hyper_2_11_chunk.v0, _hyper_2_11_chunk.v1, _hyper_2_11_chunk.v2, _hyper_2_11_chunk.v3 - Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_2_11_chunk."time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_6_21_chunk (actual rows=3 loops=1) Output: compress_hyper_6_21_chunk."time", compress_hyper_6_21_chunk.device_id, compress_hyper_6_21_chunk.device_id_peer, compress_hyper_6_21_chunk.v0, compress_hyper_6_21_chunk.v1, compress_hyper_6_21_chunk.v2, compress_hyper_6_21_chunk.v3, compress_hyper_6_21_chunk._ts_meta_count, compress_hyper_6_21_chunk._ts_meta_sequence_num, compress_hyper_6_21_chunk._ts_meta_min_3, compress_hyper_6_21_chunk._ts_meta_max_3, compress_hyper_6_21_chunk._ts_meta_min_1, compress_hyper_6_21_chunk._ts_meta_max_1, compress_hyper_6_21_chunk._ts_meta_min_2, compress_hyper_6_21_chunk._ts_meta_max_2 @@ -5754,7 +5754,7 @@ ORDER BY time, Sort Key: _hyper_2_10_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk (actual rows=504 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_20_chunk (actual rows=1 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Sort (actual rows=1512 loops=1) @@ -5764,7 +5764,7 @@ ORDER BY time, Sort Key: _hyper_2_11_chunk."time" Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk (actual rows=1512 loops=1) - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_6_21_chunk (actual rows=3 loops=1) Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_time_idx on _hyper_2_12_chunk (actual rows=504 loops=1) @@ -9878,7 +9878,7 @@ EXPLAIN (costs off) SELECT * FROM metrics WHERE time > '2000-01-08' ORDER BY dev -> Index Scan using _hyper_1_2_chunk_metrics_time_idx on _hyper_1_2_chunk Index Cond: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_5_16_chunk_c_index_2 on compress_hyper_5_16_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) (10 rows) @@ -9895,11 +9895,11 @@ EXPLAIN (costs off) SELECT * FROM metrics_space WHERE time > '2000-01-08' ORDER -> Index Scan Backward using _hyper_2_9_chunk_metrics_space_device_id_device_id_peer_v0_v1_2 on _hyper_2_9_chunk Index Cond: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_10_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_6_20_chunk_c_space_index_2 on compress_hyper_6_20_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_2_11_chunk - Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan using compress_hyper_6_21_chunk_c_space_index_2 on compress_hyper_6_21_chunk Filter: (_ts_meta_max_3 > 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_2_12_chunk_metrics_space_device_id_device_id_peer_v0_v_2 on _hyper_2_12_chunk diff --git a/tsl/test/expected/transparent_decompression_join_index.out b/tsl/test/expected/transparent_decompression_join_index.out index 09149d22b3d..355a0998ec2 100644 --- a/tsl/test/expected/transparent_decompression_join_index.out +++ b/tsl/test/expected/transparent_decompression_join_index.out @@ -64,7 +64,7 @@ order by test.time; -> Seq Scan on test_copy Filter: (((a)::text = ANY ('{lat,lon}'::text[])) AND (b = 1)) -> Custom Scan (DecompressChunk) on _hyper_1_1_chunk - Filter: (("time" >= 'Wed Jan 01 00:00:00 2020 PST'::timestamp with time zone) AND ("time" <= 'Wed Jan 01 00:02:00 2020 PST'::timestamp with time zone)) + Vectorized Filter: (("time" >= 'Wed Jan 01 00:00:00 2020 PST'::timestamp with time zone) AND ("time" <= 'Wed Jan 01 00:02:00 2020 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_2_chunk__compressed_hypertable_2_a_b__ts_meta_ on compress_hyper_2_2_chunk Index Cond: ((a = (test_copy.a)::text) AND (b = test_copy.b)) Filter: ((_ts_meta_max_1 >= 'Wed Jan 01 00:00:00 2020 PST'::timestamp with time zone) AND (_ts_meta_min_1 <= 'Wed Jan 01 00:02:00 2020 PST'::timestamp with time zone)) diff --git a/tsl/test/expected/transparent_decompression_ordered_index-13.out b/tsl/test/expected/transparent_decompression_ordered_index-13.out index 7badbcb2ee1..e18d7d65d62 100644 --- a/tsl/test/expected/transparent_decompression_ordered_index-13.out +++ b/tsl/test/expected/transparent_decompression_ordered_index-13.out @@ -637,17 +637,18 @@ FROM metrics_ordered_idx met join lookup ON met.device_id = lookup.did and met.v0 = lookup.version WHERE met.time > '2000-01-19 19:00:00-05' and met.time < '2000-01-20 20:00:00-05'; - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=2 loops=1) -> Values Scan on "*VALUES*" (actual rows=2 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=2) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND ("*VALUES*".column2 = v0)) + Filter: ("*VALUES*".column2 = v0) Rows Removed by Filter: 47 + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2) Index Cond: (device_id = "*VALUES*".column1) Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone)) -(8 rows) +(9 rows) --add filter to segment by (device_id) and compressed attr column (v0) :PREFIX @@ -658,14 +659,14 @@ ON met.device_id = lookup.did and met.v0 = lookup.version WHERE met.time > '2000-01-19 19:00:00-05' and met.time < '2000-01-20 20:00:00-05' and met.device_id = 3 and met.v0 = 5; - QUERY PLAN -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=1 loops=1) -> Values Scan on "*VALUES*" (actual rows=1 loops=1) Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=1) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) Rows Removed by Filter: 47 -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=1) Index Cond: (device_id = 3) @@ -687,7 +688,7 @@ WHERE met.time = '2000-01-19 19:00:00-05' Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=0 loops=1) - Filter: ((v0 = 5) AND ("time" = 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone)) + Vectorized Filter: ((v0 = 5) AND ("time" = 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone)) Rows Removed by Filter: 48 -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=1) Index Cond: ((device_id = 3) AND (device_id_peer = 3)) @@ -713,12 +714,13 @@ ON met.device_id = q.node and met.device_id_peer = q.device_id_peer -> Seq Scan on nodetime (actual rows=1 loops=1) -> Values Scan on "*VALUES*" (actual rows=2 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1) - Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND ("*VALUES*".column3 = v0)) + Filter: ("*VALUES*".column3 = v0) Rows Removed by Filter: 47 + Vectorized Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1) Index Cond: ((device_id = "*VALUES*".column1) AND (device_id_peer = "*VALUES*".column2)) Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone)) -(13 rows) +(14 rows) -- filter on compressed attr (v0) with seqscan enabled and indexscan -- disabled. filters on compressed attr should be above the seq scan. @@ -741,7 +743,7 @@ WHERE met.time > '2000-01-19 19:00:00-05' Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=1) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) Rows Removed by Filter: 47 -> Seq Scan on compress_hyper_2_8_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (device_id = 3) AND (device_id_peer = 3)) @@ -828,21 +830,23 @@ ORDER BY 1, -> Merge Append (actual rows=10 loops=1) Sort Key: _hyper_1_4_chunk."time" DESC -> Custom Scan (DecompressChunk) on _hyper_1_4_chunk (actual rows=9 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND ("time" < now())) + Filter: ("time" < now()) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_2_9_chunk._ts_meta_sequence_num DESC Sort Method: quicksort -> Seq Scan on compress_hyper_2_9_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND (device_id = 4) AND (device_id_peer = 5)) -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk (actual rows=1 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND ("time" < now())) + Filter: ("time" < now()) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_2_10_chunk._ts_meta_sequence_num DESC Sort Method: quicksort -> Seq Scan on compress_hyper_2_10_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND (device_id = 4) AND (device_id_peer = 5)) Rows Removed by Filter: 4 -(24 rows) +(26 rows) :PREFIX SELECT m.device_id, @@ -978,14 +982,15 @@ ORDER BY m.v0; -> Hash Join (actual rows=0 loops=1) Hash Cond: (m.device_id = d.device_id) -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk m (actual rows=0 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_2_10_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) Rows Removed by Filter: 5 -> Hash (actual rows=7 loops=1) Buckets: 1024 Batches: 1 -> Seq Scan on device_tbl d (actual rows=7 loops=1) -(13 rows) +(14 rows) -- no matches in metrics_ordered_idx but one row in device_tbl :PREFIX @@ -1008,11 +1013,12 @@ ORDER BY m.v0; Filter: (device_id = 8) Rows Removed by Filter: 6 -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk m (actual rows=0 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_2_10_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND (device_id = 8) AND (_ts_meta_min_1 < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) Rows Removed by Filter: 5 -(13 rows) +(14 rows) -- no matches in device_tbl but 1 row in metrics_ordered_idx :PREFIX diff --git a/tsl/test/expected/transparent_decompression_ordered_index-14.out b/tsl/test/expected/transparent_decompression_ordered_index-14.out index 5fbf190f367..fcedf0391e3 100644 --- a/tsl/test/expected/transparent_decompression_ordered_index-14.out +++ b/tsl/test/expected/transparent_decompression_ordered_index-14.out @@ -637,17 +637,18 @@ FROM metrics_ordered_idx met join lookup ON met.device_id = lookup.did and met.v0 = lookup.version WHERE met.time > '2000-01-19 19:00:00-05' and met.time < '2000-01-20 20:00:00-05'; - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=2 loops=1) -> Values Scan on "*VALUES*" (actual rows=2 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=2) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND ("*VALUES*".column2 = v0)) + Filter: ("*VALUES*".column2 = v0) Rows Removed by Filter: 47 + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2) Index Cond: (device_id = "*VALUES*".column1) Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone)) -(8 rows) +(9 rows) --add filter to segment by (device_id) and compressed attr column (v0) :PREFIX @@ -658,14 +659,14 @@ ON met.device_id = lookup.did and met.v0 = lookup.version WHERE met.time > '2000-01-19 19:00:00-05' and met.time < '2000-01-20 20:00:00-05' and met.device_id = 3 and met.v0 = 5; - QUERY PLAN -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=1 loops=1) -> Values Scan on "*VALUES*" (actual rows=1 loops=1) Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=1) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) Rows Removed by Filter: 47 -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=1) Index Cond: (device_id = 3) @@ -687,7 +688,7 @@ WHERE met.time = '2000-01-19 19:00:00-05' Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=0 loops=1) - Filter: ((v0 = 5) AND ("time" = 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone)) + Vectorized Filter: ((v0 = 5) AND ("time" = 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone)) Rows Removed by Filter: 48 -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=1) Index Cond: ((device_id = 3) AND (device_id_peer = 3)) @@ -713,12 +714,13 @@ ON met.device_id = q.node and met.device_id_peer = q.device_id_peer -> Seq Scan on nodetime (actual rows=1 loops=1) -> Values Scan on "*VALUES*" (actual rows=2 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1) - Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND ("*VALUES*".column3 = v0)) + Filter: ("*VALUES*".column3 = v0) Rows Removed by Filter: 47 + Vectorized Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1) Index Cond: ((device_id = "*VALUES*".column1) AND (device_id_peer = "*VALUES*".column2)) Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone)) -(13 rows) +(14 rows) -- filter on compressed attr (v0) with seqscan enabled and indexscan -- disabled. filters on compressed attr should be above the seq scan. @@ -741,7 +743,7 @@ WHERE met.time > '2000-01-19 19:00:00-05' Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=1) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) Rows Removed by Filter: 47 -> Seq Scan on compress_hyper_2_8_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (device_id = 3) AND (device_id_peer = 3)) @@ -828,21 +830,23 @@ ORDER BY 1, -> Merge Append (actual rows=10 loops=1) Sort Key: _hyper_1_4_chunk."time" DESC -> Custom Scan (DecompressChunk) on _hyper_1_4_chunk (actual rows=9 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND ("time" < now())) + Filter: ("time" < now()) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_2_9_chunk._ts_meta_sequence_num DESC Sort Method: quicksort -> Seq Scan on compress_hyper_2_9_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND (device_id = 4) AND (device_id_peer = 5)) -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk (actual rows=1 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND ("time" < now())) + Filter: ("time" < now()) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_2_10_chunk._ts_meta_sequence_num DESC Sort Method: quicksort -> Seq Scan on compress_hyper_2_10_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND (device_id = 4) AND (device_id_peer = 5)) Rows Removed by Filter: 4 -(24 rows) +(26 rows) :PREFIX SELECT m.device_id, @@ -978,14 +982,15 @@ ORDER BY m.v0; -> Hash Join (actual rows=0 loops=1) Hash Cond: (m.device_id = d.device_id) -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk m (actual rows=0 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_2_10_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) Rows Removed by Filter: 5 -> Hash (actual rows=7 loops=1) Buckets: 1024 Batches: 1 -> Seq Scan on device_tbl d (actual rows=7 loops=1) -(13 rows) +(14 rows) -- no matches in metrics_ordered_idx but one row in device_tbl :PREFIX @@ -1008,11 +1013,12 @@ ORDER BY m.v0; Filter: (device_id = 8) Rows Removed by Filter: 6 -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk m (actual rows=0 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_2_10_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND (device_id = 8) AND (_ts_meta_min_1 < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) Rows Removed by Filter: 5 -(13 rows) +(14 rows) -- no matches in device_tbl but 1 row in metrics_ordered_idx :PREFIX diff --git a/tsl/test/expected/transparent_decompression_ordered_index-15.out b/tsl/test/expected/transparent_decompression_ordered_index-15.out index c8b29b8db5e..4ce20a80bd0 100644 --- a/tsl/test/expected/transparent_decompression_ordered_index-15.out +++ b/tsl/test/expected/transparent_decompression_ordered_index-15.out @@ -639,17 +639,18 @@ FROM metrics_ordered_idx met join lookup ON met.device_id = lookup.did and met.v0 = lookup.version WHERE met.time > '2000-01-19 19:00:00-05' and met.time < '2000-01-20 20:00:00-05'; - QUERY PLAN ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=2 loops=1) -> Values Scan on "*VALUES*" (actual rows=2 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=2) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND ("*VALUES*".column2 = v0)) + Filter: ("*VALUES*".column2 = v0) Rows Removed by Filter: 47 + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=2) Index Cond: (device_id = "*VALUES*".column1) Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone)) -(8 rows) +(9 rows) --add filter to segment by (device_id) and compressed attr column (v0) :PREFIX @@ -660,14 +661,14 @@ ON met.device_id = lookup.did and met.v0 = lookup.version WHERE met.time > '2000-01-19 19:00:00-05' and met.time < '2000-01-20 20:00:00-05' and met.device_id = 3 and met.v0 = 5; - QUERY PLAN -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Nested Loop (actual rows=1 loops=1) -> Values Scan on "*VALUES*" (actual rows=1 loops=1) Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=1) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) Rows Removed by Filter: 47 -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=1) Index Cond: (device_id = 3) @@ -689,7 +690,7 @@ WHERE met.time = '2000-01-19 19:00:00-05' Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=0 loops=1) - Filter: ((v0 = 5) AND ("time" = 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone)) + Vectorized Filter: ((v0 = 5) AND ("time" = 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone)) Rows Removed by Filter: 48 -> Index Scan using compress_hyper_2_8_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_8_chunk (actual rows=1 loops=1) Index Cond: ((device_id = 3) AND (device_id_peer = 3)) @@ -715,12 +716,13 @@ ON met.device_id = q.node and met.device_id_peer = q.device_id_peer -> Seq Scan on nodetime (actual rows=1 loops=1) -> Values Scan on "*VALUES*" (actual rows=2 loops=1) -> Custom Scan (DecompressChunk) on _hyper_1_4_chunk met (actual rows=1 loops=1) - Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND ("*VALUES*".column3 = v0)) + Filter: ("*VALUES*".column3 = v0) Rows Removed by Filter: 47 + Vectorized Filter: ((v0 > 2) AND ("time" = 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone)) -> Index Scan using compress_hyper_2_9_chunk__compressed_hypertable_2_device_id_dev on compress_hyper_2_9_chunk (actual rows=1 loops=1) Index Cond: ((device_id = "*VALUES*".column1) AND (device_id_peer = "*VALUES*".column2)) Filter: ((_ts_meta_min_1 <= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone) AND (_ts_meta_max_1 >= 'Fri Jan 19 17:00:00 2018 PST'::timestamp with time zone)) -(13 rows) +(14 rows) -- filter on compressed attr (v0) with seqscan enabled and indexscan -- disabled. filters on compressed attr should be above the seq scan. @@ -743,7 +745,7 @@ WHERE met.time > '2000-01-19 19:00:00-05' Filter: ((column1 = 3) AND (column2 = 5)) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_1_3_chunk met (actual rows=1 loops=1) - Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) + Vectorized Filter: (("time" > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND ("time" < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (v0 = 5)) Rows Removed by Filter: 47 -> Seq Scan on compress_hyper_2_8_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Wed Jan 19 16:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < 'Thu Jan 20 17:00:00 2000 PST'::timestamp with time zone) AND (device_id = 3) AND (device_id_peer = 3)) @@ -830,21 +832,23 @@ ORDER BY 1, -> Merge Append (actual rows=10 loops=1) Sort Key: _hyper_1_4_chunk."time" DESC -> Custom Scan (DecompressChunk) on _hyper_1_4_chunk (actual rows=9 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND ("time" < now())) + Filter: ("time" < now()) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_2_9_chunk._ts_meta_sequence_num DESC Sort Method: quicksort -> Seq Scan on compress_hyper_2_9_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND (device_id = 4) AND (device_id_peer = 5)) -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk (actual rows=1 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND ("time" < now())) + Filter: ("time" < now()) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) -> Sort (actual rows=1 loops=1) Sort Key: compress_hyper_2_10_chunk._ts_meta_sequence_num DESC Sort Method: quicksort -> Seq Scan on compress_hyper_2_10_chunk (actual rows=1 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2002 PST'::timestamp with time zone) AND (device_id = 4) AND (device_id_peer = 5)) Rows Removed by Filter: 4 -(24 rows) +(26 rows) :PREFIX SELECT m.device_id, @@ -980,14 +984,15 @@ ORDER BY m.v0; -> Hash Join (actual rows=0 loops=1) Hash Cond: (m.device_id = d.device_id) -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk m (actual rows=0 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_2_10_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) Rows Removed by Filter: 5 -> Hash (actual rows=7 loops=1) Buckets: 1024 Batches: 1 -> Seq Scan on device_tbl d (actual rows=7 loops=1) -(13 rows) +(14 rows) -- no matches in metrics_ordered_idx but one row in device_tbl :PREFIX @@ -1010,11 +1015,12 @@ ORDER BY m.v0; Filter: (device_id = 8) Rows Removed by Filter: 6 -> Custom Scan (DecompressChunk) on _hyper_1_5_chunk m (actual rows=0 loops=1) - Filter: (("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_2_10_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Tue Jan 01 00:00:00 2019 PST'::timestamp with time zone) AND (device_id = 8) AND (_ts_meta_min_1 < ('2000-01-01 0:00:00+0'::cstring)::timestamp with time zone)) Rows Removed by Filter: 5 -(13 rows) +(14 rows) -- no matches in device_tbl but 1 row in metrics_ordered_idx :PREFIX diff --git a/tsl/test/shared/expected/ordered_append-13.out b/tsl/test/shared/expected/ordered_append-13.out index 94b8d1b4e86..c5aa866ae13 100644 --- a/tsl/test/shared/expected/ordered_append-13.out +++ b/tsl/test/shared/expected/ordered_append-13.out @@ -2385,13 +2385,13 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=20385 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 4615 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=25 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 5 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=25190 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) (15 rows) @@ -2409,11 +2409,11 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=25190 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=20385 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 4615 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=25 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) @@ -2487,12 +2487,13 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed (actual rows=7195 loops=1) Chunks excluded during startup: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=7195 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 7805 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 15 -(12 rows) +(13 rows) :PREFIX SELECT time @@ -2509,12 +2510,13 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed (actual rows=3595 loops=1) Chunks excluded during startup: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3595 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 6405 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=10 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 20 -(12 rows) +(13 rows) -- min/max queries :PREFIX @@ -2790,17 +2792,17 @@ QUERY PLAN Custom Scan (ChunkAppend) on metrics_compressed (actual rows=13674 loops=1) Order: metrics_compressed."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3370,33 +3372,33 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=12231 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 2769 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=15114 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=18 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) (35 rows) @@ -3414,31 +3416,31 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=15114 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=18 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=12231 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 2769 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) @@ -3586,40 +3588,46 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=7195 loops=1) -> Merge Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 12 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Merge Append (actual rows=7195 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=1439 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1561 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4317 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 4683 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=9 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 9 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=1439 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1561 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 3 -(40 rows) +(46 rows) :PREFIX SELECT time @@ -3636,40 +3644,46 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=3595 loops=1) -> Merge Append (actual rows=3595 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=719 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1281 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=2 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=2157 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 3843 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 12 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=719 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1281 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=2 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Merge Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 6 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 18 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 6 -(40 rows) +(46 rows) -- min/max queries :PREFIX @@ -4099,17 +4113,17 @@ QUERY PLAN Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=13674 loops=1) Order: metrics_space_compressed."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) diff --git a/tsl/test/shared/expected/ordered_append-14.out b/tsl/test/shared/expected/ordered_append-14.out index 94b8d1b4e86..c5aa866ae13 100644 --- a/tsl/test/shared/expected/ordered_append-14.out +++ b/tsl/test/shared/expected/ordered_append-14.out @@ -2385,13 +2385,13 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=20385 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 4615 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=25 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 5 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=25190 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) (15 rows) @@ -2409,11 +2409,11 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=25190 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=20385 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 4615 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=25 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) @@ -2487,12 +2487,13 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed (actual rows=7195 loops=1) Chunks excluded during startup: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=7195 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 7805 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 15 -(12 rows) +(13 rows) :PREFIX SELECT time @@ -2509,12 +2510,13 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed (actual rows=3595 loops=1) Chunks excluded during startup: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3595 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 6405 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=10 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 20 -(12 rows) +(13 rows) -- min/max queries :PREFIX @@ -2790,17 +2792,17 @@ QUERY PLAN Custom Scan (ChunkAppend) on metrics_compressed (actual rows=13674 loops=1) Order: metrics_compressed."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3370,33 +3372,33 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=12231 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 2769 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=15114 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=18 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) (35 rows) @@ -3414,31 +3416,31 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=15114 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=18 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=12231 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 2769 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) @@ -3586,40 +3588,46 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=7195 loops=1) -> Merge Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 12 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Merge Append (actual rows=7195 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=1439 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1561 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4317 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 4683 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=9 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 9 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=1439 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1561 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 3 -(40 rows) +(46 rows) :PREFIX SELECT time @@ -3636,40 +3644,46 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=3595 loops=1) -> Merge Append (actual rows=3595 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=719 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1281 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=2 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=2157 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 3843 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 12 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=719 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1281 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=2 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Merge Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 6 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 18 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 6 -(40 rows) +(46 rows) -- min/max queries :PREFIX @@ -4099,17 +4113,17 @@ QUERY PLAN Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=13674 loops=1) Order: metrics_space_compressed."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) diff --git a/tsl/test/shared/expected/ordered_append-15.out b/tsl/test/shared/expected/ordered_append-15.out index 78de3f9f723..c90315dc2f5 100644 --- a/tsl/test/shared/expected/ordered_append-15.out +++ b/tsl/test/shared/expected/ordered_append-15.out @@ -2404,13 +2404,13 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=20385 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 4615 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=25 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 5 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=25190 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) (15 rows) @@ -2428,11 +2428,11 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=25190 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=30 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=20385 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 4615 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=25 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) @@ -2506,12 +2506,13 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed (actual rows=7195 loops=1) Chunks excluded during startup: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=7195 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 7805 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 15 -(12 rows) +(13 rows) :PREFIX SELECT time @@ -2528,12 +2529,13 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed (actual rows=3595 loops=1) Chunks excluded during startup: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3595 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 6405 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=10 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 20 -(12 rows) +(13 rows) -- min/max queries :PREFIX @@ -2809,17 +2811,17 @@ QUERY PLAN Custom Scan (ChunkAppend) on metrics_compressed (actual rows=13674 loops=1) Order: metrics_compressed."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3390,33 +3392,33 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=12231 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 2769 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=15114 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=18 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) (35 rows) @@ -3434,31 +3436,31 @@ QUERY PLAN Sort Method: top-N heapsort -> Append (actual rows=45575 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=15114 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=18 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=12231 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 2769 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4077 loops=1) - Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 923 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) @@ -3606,40 +3608,46 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=7195 loops=1) -> Merge Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 12 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Merge Append (actual rows=7195 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=1439 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1561 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=4317 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 4683 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=9 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 9 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=1439 loops=1) - Filter: (("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND ("time" > ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" > ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1561 + Vectorized Filter: ("time" < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: ((_ts_meta_min_1 < 'Mon Jan 10 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_max_1 > ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 3 -(40 rows) +(46 rows) :PREFIX SELECT time @@ -3656,40 +3664,46 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=3595 loops=1) -> Merge Append (actual rows=3595 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=719 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1281 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=2 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=2157 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 3843 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=6 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 12 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=719 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) Rows Removed by Filter: 1281 + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=2 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 4 -> Merge Append (actual rows=0 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 6 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 18 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND ("time" < ('2000-01-08'::cstring)::timestamp with time zone)) + Filter: ("time" < ('2000-01-08'::cstring)::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=0 loops=1) Filter: ((_ts_meta_max_1 > 'Fri Jan 07 00:00:00 2000 PST'::timestamp with time zone) AND (_ts_meta_min_1 < ('2000-01-08'::cstring)::timestamp with time zone)) Rows Removed by Filter: 6 -(40 rows) +(46 rows) -- min/max queries :PREFIX @@ -4119,17 +4133,17 @@ QUERY PLAN Custom Scan (ChunkAppend) on metrics_space_compressed (actual rows=13674 loops=1) Order: metrics_space_compressed."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) diff --git a/tsl/test/shared/expected/ordered_append_join-13.out b/tsl/test/shared/expected/ordered_append_join-13.out index 741e37871c4..5ecd1344355 100644 --- a/tsl/test/shared/expected/ordered_append_join-13.out +++ b/tsl/test/shared/expected/ordered_append_join-13.out @@ -2092,17 +2092,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed o1 (actual rows=13674 loops=1) Order: o1."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -2110,17 +2110,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed o2 (actual rows=13674 loops=1) Order: o2."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=4 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -2148,11 +2148,11 @@ QUERY PLAN Sort Method: quicksort -> Append (actual rows=26390 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=17990 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=8400 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1790 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) @@ -3146,17 +3146,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed o1 (actual rows=13674 loops=1) Order: o1."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3164,17 +3164,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed o2 (actual rows=13674 loops=1) Order: o2."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3202,31 +3202,31 @@ QUERY PLAN Sort Method: quicksort -> Append (actual rows=26390 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=4 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=10794 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=12 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=3598 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=4 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_4 (actual rows=1680 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 358 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_5 (actual rows=5040 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1074 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=9 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 9 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_6 (actual rows=1680 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 358 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) diff --git a/tsl/test/shared/expected/ordered_append_join-14.out b/tsl/test/shared/expected/ordered_append_join-14.out index 741e37871c4..5ecd1344355 100644 --- a/tsl/test/shared/expected/ordered_append_join-14.out +++ b/tsl/test/shared/expected/ordered_append_join-14.out @@ -2092,17 +2092,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed o1 (actual rows=13674 loops=1) Order: o1."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -2110,17 +2110,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed o2 (actual rows=13674 loops=1) Order: o2."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=4 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -2148,11 +2148,11 @@ QUERY PLAN Sort Method: quicksort -> Append (actual rows=26390 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=17990 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=8400 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1790 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) @@ -3146,17 +3146,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed o1 (actual rows=13674 loops=1) Order: o1."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3164,17 +3164,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed o2 (actual rows=13674 loops=1) Order: o2."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3202,31 +3202,31 @@ QUERY PLAN Sort Method: quicksort -> Append (actual rows=26390 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=4 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=10794 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=12 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=3598 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=4 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_4 (actual rows=1680 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 358 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_5 (actual rows=5040 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1074 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=9 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 9 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_6 (actual rows=1680 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 358 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) diff --git a/tsl/test/shared/expected/ordered_append_join-15.out b/tsl/test/shared/expected/ordered_append_join-15.out index 8cb1611f4e2..2e84799fd92 100644 --- a/tsl/test/shared/expected/ordered_append_join-15.out +++ b/tsl/test/shared/expected/ordered_append_join-15.out @@ -2108,17 +2108,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed o1 (actual rows=13674 loops=1) Order: o1."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -2126,17 +2126,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_compressed o2 (actual rows=13674 loops=1) Order: o2."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=4 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on compress_hyper_X_X_chunk compress_hyper_X_X_chunk_1 (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -2164,11 +2164,11 @@ QUERY PLAN Sort Method: quicksort -> Append (actual rows=26390 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=17990 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=8400 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1790 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=15 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) @@ -3166,17 +3166,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed o1 (actual rows=13674 loops=1) Order: o1."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 1) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3184,17 +3184,17 @@ QUERY PLAN -> Custom Scan (ChunkAppend) on metrics_space_compressed o2 (actual rows=13674 loops=1) Order: o2."time" -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=4 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_2 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o2_3 (actual rows=5038 loops=1) - Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_6_device_id__t on compress_hyper_X_X_chunk (actual rows=6 loops=1) Index Cond: (device_id = 2) Filter: (_ts_meta_min_1 < 'Tue Feb 01 00:00:00 2000 PST'::timestamp with time zone) @@ -3222,31 +3222,31 @@ QUERY PLAN Sort Method: quicksort -> Append (actual rows=26390 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_1 (actual rows=3598 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=4 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_2 (actual rows=10794 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=12 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_3 (actual rows=3598 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Seq Scan on compress_hyper_X_X_chunk (actual rows=4 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_4 (actual rows=1680 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 358 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 3 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_5 (actual rows=5040 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 1074 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=9 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 9 -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk o1_6 (actual rows=1680 loops=1) - Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) Rows Removed by Filter: 358 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=3 loops=1) Filter: (_ts_meta_min_1 < 'Sat Jan 08 00:00:00 2000 PST'::timestamp with time zone) diff --git a/tsl/test/shared/expected/transparent_decompress_chunk-13.out b/tsl/test/shared/expected/transparent_decompress_chunk-13.out index f0ec79afac8..09591acb5e5 100644 --- a/tsl/test/shared/expected/transparent_decompress_chunk-13.out +++ b/tsl/test/shared/expected/transparent_decompress_chunk-13.out @@ -119,7 +119,7 @@ QUERY PLAN Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_X_X_chunk (actual rows=0 loops=1) Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 - Filter: (_hyper_X_X_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_X_X_chunk.v3 > '10'::double precision) Rows Removed by Filter: 17990 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_X_X_chunk (actual rows=20 loops=1) @@ -233,7 +233,7 @@ QUERY PLAN QUERY PLAN Limit (actual rows=5 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2985 -> Sort (actual rows=5 loops=1) Sort Key: compress_hyper_X_X_chunk.device_id @@ -250,7 +250,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=150 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2840 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_1 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -264,7 +264,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=155 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2835 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_1 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -278,7 +278,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17840 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 150 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -291,7 +291,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17835 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 155 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -304,7 +304,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17835 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 155 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -318,7 +318,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) Rows Removed by Filter: 17990 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) (8 rows) @@ -378,7 +378,7 @@ QUERY PLAN Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_X_X_chunk (actual rows=10 loops=1) Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 - Filter: (_hyper_X_X_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_X_X_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 31 Bulk Decompression: true -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on _timescaledb_internal.compress_hyper_X_X_chunk (actual rows=1 loops=1) diff --git a/tsl/test/shared/expected/transparent_decompress_chunk-14.out b/tsl/test/shared/expected/transparent_decompress_chunk-14.out index f0ec79afac8..09591acb5e5 100644 --- a/tsl/test/shared/expected/transparent_decompress_chunk-14.out +++ b/tsl/test/shared/expected/transparent_decompress_chunk-14.out @@ -119,7 +119,7 @@ QUERY PLAN Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_X_X_chunk (actual rows=0 loops=1) Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 - Filter: (_hyper_X_X_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_X_X_chunk.v3 > '10'::double precision) Rows Removed by Filter: 17990 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_X_X_chunk (actual rows=20 loops=1) @@ -233,7 +233,7 @@ QUERY PLAN QUERY PLAN Limit (actual rows=5 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2985 -> Sort (actual rows=5 loops=1) Sort Key: compress_hyper_X_X_chunk.device_id @@ -250,7 +250,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=150 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2840 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_1 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -264,7 +264,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=155 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2835 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_1 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -278,7 +278,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17840 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 150 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -291,7 +291,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17835 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 155 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -304,7 +304,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17835 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 155 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -318,7 +318,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) Rows Removed by Filter: 17990 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) (8 rows) @@ -378,7 +378,7 @@ QUERY PLAN Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_X_X_chunk (actual rows=10 loops=1) Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 - Filter: (_hyper_X_X_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_X_X_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 31 Bulk Decompression: true -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on _timescaledb_internal.compress_hyper_X_X_chunk (actual rows=1 loops=1) diff --git a/tsl/test/shared/expected/transparent_decompress_chunk-15.out b/tsl/test/shared/expected/transparent_decompress_chunk-15.out index 68e629c716d..4831c52fc9a 100644 --- a/tsl/test/shared/expected/transparent_decompress_chunk-15.out +++ b/tsl/test/shared/expected/transparent_decompress_chunk-15.out @@ -121,7 +121,7 @@ QUERY PLAN Sort Method: quicksort -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_X_X_chunk (actual rows=0 loops=1) Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 - Filter: (_hyper_X_X_chunk.v3 > '10'::double precision) + Vectorized Filter: (_hyper_X_X_chunk.v3 > '10'::double precision) Rows Removed by Filter: 17990 Bulk Decompression: true -> Seq Scan on _timescaledb_internal.compress_hyper_X_X_chunk (actual rows=20 loops=1) @@ -235,7 +235,7 @@ QUERY PLAN QUERY PLAN Limit (actual rows=5 loops=1) -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=5 loops=1) - Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" = 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2985 -> Sort (actual rows=5 loops=1) Sort Key: compress_hyper_X_X_chunk.device_id @@ -252,7 +252,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=150 loops=1) - Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2840 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_1 < 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -266,7 +266,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=155 loops=1) - Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 2835 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=5 loops=1) Filter: (_ts_meta_min_1 <= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -280,7 +280,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17840 loops=1) - Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 150 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 >= 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -293,7 +293,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17835 loops=1) - Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 155 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -306,7 +306,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: top-N heapsort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17835 loops=1) - Filter: ('Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone < "time") + Vectorized Filter: ("time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 155 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) Filter: (_ts_meta_max_1 > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) @@ -320,7 +320,7 @@ QUERY PLAN Sort Key: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id Sort Method: quicksort -> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=0 loops=1) - Filter: (v0 < 1) + Vectorized Filter: (v0 < 1) Rows Removed by Filter: 17990 -> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1) (8 rows) @@ -380,7 +380,7 @@ QUERY PLAN Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 -> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_X_X_chunk (actual rows=10 loops=1) Output: _hyper_X_X_chunk."time", _hyper_X_X_chunk.device_id, _hyper_X_X_chunk.v0, _hyper_X_X_chunk.v1, _hyper_X_X_chunk.v2, _hyper_X_X_chunk.v3 - Filter: (_hyper_X_X_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) + Vectorized Filter: (_hyper_X_X_chunk."time" > 'Fri Dec 31 17:00:00 1999 PST'::timestamp with time zone) Rows Removed by Filter: 31 Bulk Decompression: true -> Index Scan Backward using compress_hyper_X_X_chunk__compressed_hypertable_4_device_id__t on _timescaledb_internal.compress_hyper_X_X_chunk (actual rows=1 loops=1) diff --git a/tsl/test/sql/CMakeLists.txt b/tsl/test/sql/CMakeLists.txt index aae0ccaa603..9a793a3b29e 100644 --- a/tsl/test/sql/CMakeLists.txt +++ b/tsl/test/sql/CMakeLists.txt @@ -79,6 +79,7 @@ if(CMAKE_BUILD_TYPE MATCHES Debug) data_node.sql ddl_hook.sql debug_notice.sql + decompress_vector_qual.sql deparse.sql insert_memory_usage.sql information_view_chunk_count.sql diff --git a/tsl/test/sql/decompress_vector_qual.sql b/tsl/test/sql/decompress_vector_qual.sql new file mode 100644 index 00000000000..405ef27d7dd --- /dev/null +++ b/tsl/test/sql/decompress_vector_qual.sql @@ -0,0 +1,89 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. + +create table vectorqual(metric1 int8, ts timestamp, metric2 int8, device int8); +select create_hypertable('vectorqual', 'ts'); +alter table vectorqual set (timescaledb.compress, timescaledb.compress_segmentby = 'device'); + +insert into vectorqual(ts, device, metric1, metric2) values ('2020-01-01 00:00:00', 1, 11, 12); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; + +alter table vectorqual drop column metric1; +insert into vectorqual(ts, device, metric2) values ('2021-01-01 00:00:00', 2, 22); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; + +alter table vectorqual add column metric3 int4 default 777; +insert into vectorqual(ts, device, metric2, metric3) values ('2022-01-01 00:00:00', 3, 32, 33); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; + +alter table vectorqual add column metric4 int8; +insert into vectorqual(ts, device, metric2, metric3, metric4) values ('2023-01-01 00:00:00', 4, 42, 43, 44); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; + +select * from vectorqual order by vectorqual; + +set timescaledb.debug_require_vector_qual to 'only' /* all following quals must be vectorized */; +select count(*) from vectorqual where ts > '1999-01-01 00:00:00'; +select count(*) from vectorqual where metric2 = 22; +select count(*) from vectorqual where 22 = metric2 /* commutators */; +select count(*) from vectorqual where metric3 = 33; +select count(*) from vectorqual where metric3 = 777 /* default value */; +select count(*) from vectorqual where metric4 = 44 /* column with default null */; +select count(*) from vectorqual where metric4 >= 0 /* nulls shouldn't pass the qual */; + +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where device = 1 /* can't apply vector ops to the segmentby column */; + +-- Test columns that don't support bulk decompression. +alter table vectorqual add column tag text; +insert into vectorqual(ts, device, metric2, metric3, metric4, tag) values ('2025-01-01 00:00:00', 5, 52, 53, 54, 'tag5'); +select count(compress_chunk(x, true)) from show_chunks('vectorqual') x; + +set timescaledb.debug_require_vector_qual to 'only'; +select tag from vectorqual where metric2 > 0; + + +-- Queries without aggregation. +select * from vectorqual where ts > '2021-01-01 00:00:00' order by vectorqual; +select * from vectorqual where metric4 >= 0 order by vectorqual; + + +-- Constraints on columns not selected. +select metric4 from vectorqual where ts > '2021-01-01 00:00:00' order by 1; + + +-- ANDed constraints on multiple columns. +select * from vectorqual where ts > '2021-01-01 00:00:00' and metric3 > 40 order by vectorqual; + + +-- ORed constrainst on multiple columns (not vectorized for now). +set timescaledb.debug_require_vector_qual to 'forbid'; +select * from vectorqual where ts > '2021-01-01 00:00:00' or metric3 > 40 order by vectorqual; + + +-- Test with unary operator. +create operator !! (function = 'bool', rightarg = int4); +select count(*) from vectorqual where !!metric3; + + +-- NullTest is not vectorized. +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where metric4 is null; +select count(*) from vectorqual where metric4 is not null; + + +-- Test that the vectorized quals are disabled by disabling the bulk decompression. +set timescaledb.enable_bulk_decompression to off; +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where metric4 > null; +set timescaledb.enable_bulk_decompression to on; + + +-- Test that the debug GUC works +\set ON_ERROR_STOP 0 +set timescaledb.debug_require_vector_qual to 'forbid'; +select count(*) from vectorqual where metric4 > 4; +set timescaledb.debug_require_vector_qual to 'only'; +select count(*) from vectorqual where metric4 is null; +\set ON_ERROR_STOP 1