Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support Postgres 17 #19568

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def get_initial_config(self):
'black==24.10.0',
'ruff==0.8.0',
# Keep in sync with: /datadog_checks_base/pyproject.toml
'pydantic==2.7.3',
'pydantic==2.10.5',
],
}
config = {'lint': lint_env}
Expand Down
18 changes: 11 additions & 7 deletions postgres/datadog_checks/postgres/metrics_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,21 @@
ACTIVITY_QUERY_LT_10,
CHECKSUM_METRICS,
COMMON_ARCHIVER_METRICS,
COMMON_BGW_METRICS,
COMMON_BGW_METRICS_LT_17,
COMMON_METRICS,
DATABASE_SIZE_METRICS,
DBM_MIGRATED_METRICS,
NEWER_14_METRICS,
NEWER_91_BGW_METRICS,
NEWER_92_BGW_METRICS,
NEWER_91_BGW_METRICS_LT_17,
NEWER_92_BGW_METRICS_LT_17,
NEWER_92_METRICS,
QUERY_PG_BGWRITER_CHECKPOINTER,
REPLICATION_METRICS_9_1,
REPLICATION_METRICS_9_2,
REPLICATION_METRICS_10,
REPLICATION_STATS_METRICS,
)
from .version_utils import V8_3, V9, V9_1, V9_2, V9_4, V9_6, V10, V12, V14
from .version_utils import V8_3, V9, V9_1, V9_2, V9_4, V9_6, V10, V12, V14, V17

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -117,14 +118,17 @@ def get_bgw_metrics(self, version):
depending on the postgres version.
Uses a dictionary to save the result for each instance
"""
if version >= V17:
return QUERY_PG_BGWRITER_CHECKPOINTER

# Extended 9.2+ metrics if needed
if self.bgw_metrics is None:
self.bgw_metrics = dict(COMMON_BGW_METRICS)
self.bgw_metrics = dict(COMMON_BGW_METRICS_LT_17)

if version >= V9_1:
self.bgw_metrics.update(NEWER_91_BGW_METRICS)
self.bgw_metrics.update(NEWER_91_BGW_METRICS_LT_17)
if version >= V9_2:
self.bgw_metrics.update(NEWER_92_BGW_METRICS)
self.bgw_metrics.update(NEWER_92_BGW_METRICS_LT_17)

if not self.bgw_metrics:
return None
Expand Down
5 changes: 3 additions & 2 deletions postgres/datadog_checks/postgres/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
STAT_WAL_METRICS,
SUBSCRIPTION_STATE_METRICS,
VACUUM_PROGRESS_METRICS,
VACUUM_PROGRESS_METRICS_LT_17,
WAL_FILE_METRICS,
DatabaseConfigurationError,
DatabaseHealthCheckError, # noqa: F401
Expand All @@ -72,7 +73,7 @@
payload_pg_version,
warning_with_tags,
)
from .version_utils import V9, V9_2, V10, V12, V13, V14, V15, V16, VersionUtils
from .version_utils import V9, V9_2, V10, V12, V13, V14, V15, V16, V17, VersionUtils

try:
import datadog_agent
Expand Down Expand Up @@ -314,7 +315,7 @@ def dynamic_queries(self):
if self._config.collect_buffercache_metrics:
queries.append(BUFFERCACHE_METRICS)
queries.append(QUERY_PG_REPLICATION_SLOTS)
queries.append(VACUUM_PROGRESS_METRICS)
queries.append(VACUUM_PROGRESS_METRICS if self.version >= V17 else VACUUM_PROGRESS_METRICS_LT_17)
queries.append(STAT_SUBSCRIPTION_METRICS)

if self.version >= V12:
Expand Down
8 changes: 8 additions & 0 deletions postgres/datadog_checks/postgres/statements.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,12 @@ def statements_query(**kwargs):
PG_STAT_STATEMENTS_REQUIRED_COLUMNS = frozenset({'calls', 'query', 'rows'})

PG_STAT_STATEMENTS_TIMING_COLUMNS = frozenset(
{
'shared_blk_read_time',
'shared_blk_write_time',
}
)
PG_STAT_STATEMENTS_TIMING_COLUMNS_LT_17 = frozenset(
{
'blk_read_time',
'blk_write_time',
Expand Down Expand Up @@ -114,6 +120,7 @@ def statements_query(**kwargs):
}
)
| PG_STAT_STATEMENTS_TIMING_COLUMNS
| PG_STAT_STATEMENTS_TIMING_COLUMNS_LT_17
)

PG_STAT_STATEMENTS_TAG_COLUMNS = frozenset(
Expand Down Expand Up @@ -310,6 +317,7 @@ def _load_pg_stat_statements(self):

if self._check.pg_settings.get("track_io_timing") != "on":
desired_columns -= PG_STAT_STATEMENTS_TIMING_COLUMNS
desired_columns -= PG_STAT_STATEMENTS_TIMING_COLUMNS_LT_17

pg_stat_statements_max_setting = self._check.pg_settings.get("pg_stat_statements.max")
pg_stat_statements_max = int(
Expand Down
58 changes: 54 additions & 4 deletions postgres/datadog_checks/postgres/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,35 @@ def get_list_chunks(lst, n):
],
}

COMMON_BGW_METRICS = {
QUERY_PG_BGWRITER_CHECKPOINTER = {
'name': 'bgw_metrics',
'query': """
SELECT
cp.num_timed,
cp.num_requested,
cp.buffers_written,
bg.buffers_clean,
bg.maxwritten_clean,
bg.buffers_alloc,
cp.write_time,
cp.sync_time
FROM pg_stat_bgwriter bg, pg_stat_checkpointer cp
""".strip(),
'metrics': {
'checkpoints_timed': ('bgwriter.checkpoints_timed', AgentCheck.monotonic_count),
'checkpoints_req': ('bgwriter.checkpoints_requested', AgentCheck.monotonic_count),
'buffers_checkpoint': ('bgwriter.buffers_checkpoint', AgentCheck.monotonic_count),
'buffers_clean': ('bgwriter.buffers_clean', AgentCheck.monotonic_count),
'maxwritten_clean': ('bgwriter.maxwritten_clean', AgentCheck.monotonic_count),
'buffers_alloc': ('bgwriter.buffers_alloc', AgentCheck.monotonic_count),
'checkpoint_write_time': ('bgwriter.write_time', AgentCheck.monotonic_count),
'checkpoint_sync_time': ('bgwriter.sync_time', AgentCheck.monotonic_count),
},
'descriptors': [],
'relation': False,
}

COMMON_BGW_METRICS_LT_17 = {
'checkpoints_timed': ('bgwriter.checkpoints_timed', AgentCheck.monotonic_count),
'checkpoints_req': ('bgwriter.checkpoints_requested', AgentCheck.monotonic_count),
'buffers_checkpoint': ('bgwriter.buffers_checkpoint', AgentCheck.monotonic_count),
Expand All @@ -238,9 +266,9 @@ def get_list_chunks(lst, n):
'buffers_alloc': ('bgwriter.buffers_alloc', AgentCheck.monotonic_count),
}

NEWER_91_BGW_METRICS = {'buffers_backend_fsync': ('bgwriter.buffers_backend_fsync', AgentCheck.monotonic_count)}
NEWER_91_BGW_METRICS_LT_17 = {'buffers_backend_fsync': ('bgwriter.buffers_backend_fsync', AgentCheck.monotonic_count)}

NEWER_92_BGW_METRICS = {
NEWER_92_BGW_METRICS_LT_17 = {
'checkpoint_write_time': ('bgwriter.write_time', AgentCheck.monotonic_count),
'checkpoint_sync_time': ('bgwriter.sync_time', AgentCheck.monotonic_count),
}
Expand Down Expand Up @@ -502,10 +530,32 @@ def get_list_chunks(lst, n):
],
}

# Requires PG10+
VACUUM_PROGRESS_METRICS = {
'name': 'vacuum_progress_metrics',
'query': """
SELECT v.datname, c.relname, v.phase,
v.heap_blks_total, v.heap_blks_scanned, v.heap_blks_vacuumed,
v.index_vacuum_count, v.max_dead_tuple_bytes, v.num_dead_item_ids
FROM pg_stat_progress_vacuum as v
JOIN pg_class c on c.oid = v.relid
""",
'columns': [
{'name': 'db', 'type': 'tag'},
{'name': 'table', 'type': 'tag'},
{'name': 'phase', 'type': 'tag'},
{'name': 'vacuum.heap_blks_total', 'type': 'gauge'},
{'name': 'vacuum.heap_blks_scanned', 'type': 'gauge'},
{'name': 'vacuum.heap_blks_vacuumed', 'type': 'gauge'},
{'name': 'vacuum.index_vacuum_count', 'type': 'gauge'},
{'name': 'vacuum.max_dead_tuples', 'type': 'gauge'},
{'name': 'vacuum.num_dead_tuples', 'type': 'gauge'},
],
}

# Requires PG10+
VACUUM_PROGRESS_METRICS_LT_17 = {
'name': 'vacuum_progress_metrics',
'query': """
SELECT v.datname, c.relname, v.phase,
v.heap_blks_total, v.heap_blks_scanned, v.heap_blks_vacuumed,
v.index_vacuum_count, v.max_dead_tuples, v.num_dead_tuples
Expand Down
1 change: 1 addition & 0 deletions postgres/datadog_checks/postgres/version_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
V14 = VersionInfo.parse("14.0.0")
V15 = VersionInfo.parse("15.0.0")
V16 = VersionInfo.parse("16.0.0")
V17 = VersionInfo.parse("17.0.0")


class VersionUtils(object):
Expand Down
3 changes: 2 additions & 1 deletion postgres/hatch.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ mypy-deps = [

[[envs.default.matrix]]
python = ["3.12"]
version = ["9.6", "10.0", "11.0", "12.17", "13.0", "14.0", "15.0", "16.0"]
version = ["9.6", "10.0", "11.0", "12.17", "13.0", "14.0", "15.0", "16.0", "17.0"]

[envs.default.overrides]
matrix.version.env-vars = [
Expand All @@ -23,6 +23,7 @@ matrix.version.env-vars = [
{ key = "POSTGRES_VERSION", value = "14", if = ["14.0"] },
{ key = "POSTGRES_VERSION", value = "15", if = ["15.0"] },
{ key = "POSTGRES_VERSION", value = "16", if = ["16.0"] },
{ key = "POSTGRES_VERSION", value = "17", if = ["17.0"] },
]

[envs.latest.env-vars]
Expand Down
29 changes: 26 additions & 3 deletions postgres/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,13 @@
'postgresql.bgwriter.buffers_checkpoint',
'postgresql.bgwriter.buffers_clean',
'postgresql.bgwriter.maxwritten_clean',
'postgresql.bgwriter.buffers_backend',
'postgresql.bgwriter.buffers_alloc',
'postgresql.bgwriter.buffers_backend_fsync',
'postgresql.bgwriter.write_time',
'postgresql.bgwriter.sync_time',
]

COMMON_BGW_METRICS_PG_ABOVE_94 = ['postgresql.archiver.archived_count', 'postgresql.archiver.failed_count']
COMMON_BGW_METRICS_PG_BELOW_17 = ['postgresql.bgwriter.buffers_backend', 'postgresql.bgwriter.buffers_backend_fsync']
CONNECTION_METRICS = ['postgresql.max_connections', 'postgresql.percent_usage_connections']
CONNECTION_METRICS_DB = ['postgresql.connections']
COMMON_DBS = ['dogs', 'postgres', 'dogs_nofunc', 'dogs_noschema', DB_NAME]
Expand Down Expand Up @@ -361,6 +360,10 @@ def check_bgw_metrics(aggregator, expected_tags, count=1):
for name in COMMON_BGW_METRICS:
aggregator.assert_metric(name, count=count, tags=expected_tags)

if float(POSTGRES_VERSION) < 17:
for name in COMMON_BGW_METRICS_PG_BELOW_17:
aggregator.assert_metric(name, count=count, tags=expected_tags)

if float(POSTGRES_VERSION) >= 9.4:
for name in COMMON_BGW_METRICS_PG_ABOVE_94:
aggregator.assert_metric(name, count=count, tags=expected_tags)
Expand All @@ -370,7 +373,27 @@ def check_slru_metrics(aggregator, expected_tags, count=1):
if float(POSTGRES_VERSION) < 13.0:
return

slru_caches = ['Subtrans', 'Serial', 'MultiXactMember', 'Xact', 'other', 'Notify', 'CommitTs', 'MultiXactOffset']
slru_caches = [
'subtransaction',
'serializable',
'multixact_member',
'transaction',
'other',
'notify',
'commit_timestamp',
'multixact_offset',
]
if float(POSTGRES_VERSION) < 17.0:
slru_caches = [
'Subtrans',
'Serial',
'MultiXactMember',
'Xact',
'other',
'Notify',
'CommitTs',
'MultiXactOffset',
]
for metric_name in _iterate_metric_name(SLRU_METRICS):
for slru_cache in slru_caches:
aggregator.assert_metric(
Expand Down
3 changes: 2 additions & 1 deletion postgres/tests/test_pg_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -1080,7 +1080,8 @@ def assert_state_clean(check):

def assert_state_set(check):
assert check.metrics_cache.instance_metrics
assert check.metrics_cache.bgw_metrics
if float(POSTGRES_VERSION) < 17.0:
assert check.metrics_cache.bgw_metrics
if POSTGRES_VERSION != '9.3':
assert check.metrics_cache.archiver_metrics
assert check.metrics_cache.replication_metrics
Expand Down
36 changes: 35 additions & 1 deletion postgres/tests/test_progress_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
CLUSTER_VACUUM_PROGRESS_METRICS,
INDEX_PROGRESS_METRICS,
VACUUM_PROGRESS_METRICS,
VACUUM_PROGRESS_METRICS_LT_17,
)

from .common import DB_NAME, _get_expected_tags, _iterate_metric_name
Expand All @@ -20,6 +21,8 @@
lock_table,
requires_over_12,
requires_over_13,
requires_over_17,
requires_under_17,
run_query_thread,
run_vacuum_thread,
)
Expand Down Expand Up @@ -72,7 +75,7 @@ def test_analyze_progress(aggregator, integration_check, pg_instance):
aggregator.assert_metric(metric_name, count=1, tags=expected_tags)


@requires_over_12
@requires_over_17
def test_vacuum_progress(aggregator, integration_check, pg_instance):
check = integration_check(pg_instance)

Expand Down Expand Up @@ -102,6 +105,37 @@ def test_vacuum_progress(aggregator, integration_check, pg_instance):
aggregator.assert_metric(metric_name, count=1, tags=expected_tags)


@requires_over_12
@requires_under_17
def test_vacuum_progress_lt_17(aggregator, integration_check, pg_instance):
check = integration_check(pg_instance)

# Start vacuum
thread = run_vacuum_thread(pg_instance, 'VACUUM (DISABLE_PAGE_SKIPPING) test_part1')

# Wait for vacuum to be reported
_wait_for_value(
pg_instance,
lower_threshold=0,
query="SELECT count(*) from pg_stat_progress_vacuum",
)

# Collect metrics
check.check(pg_instance)

# Kill vacuum and cleanup thread
kill_vacuum(pg_instance)
thread.join()

expected_tags = _get_expected_tags(check, pg_instance) + [
'phase:scanning heap',
'table:test_part1',
f'db:{DB_NAME}',
]
for metric_name in _iterate_metric_name(VACUUM_PROGRESS_METRICS_LT_17):
aggregator.assert_metric(metric_name, count=1, tags=expected_tags)


@requires_over_12
def test_index_progress(aggregator, integration_check, pg_instance):
check = integration_check(pg_instance)
Expand Down
13 changes: 11 additions & 2 deletions postgres/tests/test_statements.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,11 @@
DBExplainError,
StatementTruncationState,
)
from datadog_checks.postgres.statements import PG_STAT_STATEMENTS_METRICS_COLUMNS, PG_STAT_STATEMENTS_TIMING_COLUMNS
from datadog_checks.postgres.statements import (
PG_STAT_STATEMENTS_METRICS_COLUMNS,
PG_STAT_STATEMENTS_TIMING_COLUMNS,
PG_STAT_STATEMENTS_TIMING_COLUMNS_LT_17,
)
from datadog_checks.postgres.util import payload_pg_version
from datadog_checks.postgres.version_utils import V12

Expand Down Expand Up @@ -292,7 +296,12 @@ def _should_catch_query(dbname):
available_columns = set(row.keys())
metric_columns = available_columns & PG_STAT_STATEMENTS_METRICS_COLUMNS
if track_io_timing_enabled:
assert (available_columns & PG_STAT_STATEMENTS_TIMING_COLUMNS) == PG_STAT_STATEMENTS_TIMING_COLUMNS
if float(POSTGRES_VERSION) >= 17.0:
assert (available_columns & PG_STAT_STATEMENTS_TIMING_COLUMNS) == PG_STAT_STATEMENTS_TIMING_COLUMNS
else:
assert (
available_columns & PG_STAT_STATEMENTS_TIMING_COLUMNS_LT_17
) == PG_STAT_STATEMENTS_TIMING_COLUMNS_LT_17
else:
assert (available_columns & PG_STAT_STATEMENTS_TIMING_COLUMNS) == set()
for col in metric_columns:
Expand Down
8 changes: 8 additions & 0 deletions postgres/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,14 @@
POSTGRES_VERSION is None or float(POSTGRES_VERSION) < 16,
reason='This test is for over 16 only (make sure POSTGRES_VERSION is set)',
)
requires_under_17 = pytest.mark.skipif(
POSTGRES_VERSION is None or float(POSTGRES_VERSION) >= 17,
reason='This test is for under 17 only (make sure POSTGRES_VERSION is set)',
)
requires_over_17 = pytest.mark.skipif(
POSTGRES_VERSION is None or float(POSTGRES_VERSION) < 17,
reason='This test is for over 17 only (make sure POSTGRES_VERSION is set)',
)


def _get_conn(db_instance, dbname=None, user=None, password=None, application_name='test'):
Expand Down
Loading