Skip to content

Commit

Permalink
Merge branch '4.8.1' into feat/6571-indexer-stats-plot
Browse files Browse the repository at this point in the history
  • Loading branch information
Rebits committed Jun 21, 2024
2 parents 46816f5 + 0e07651 commit 649237b
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 2 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@ All notable changes to this project will be documented in this file.

## [4.8.1] - TBD

### Added

- Add functionality to unify data of the binary processes with their subprocesses to plot ([#5500](https://github.com/wazuh/wazuh-qa/pull/5500)) \- (Framework)

### Changed

- Fix test_consistency_initial_scans by adding a 30-minute wait before collecting vulnerabilities. ([#5507](https://github.com/wazuh/wazuh-qa/pull/5507)) \- (Tests)

## [4.8.0] - 12/06/2024

### Added
Expand Down
11 changes: 11 additions & 0 deletions deps/wazuh_testing/wazuh_testing/scripts/data_visualizations.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ def create_destination_directory(destination_directory):
if not exists(destination_directory):
makedirs(destination_directory)

def validate_arguments(options):
if options.visualization_target != 'binary' and options.unify:
raise ValueError("Unify option is not allowed for non binary data plotting")

def get_script_arguments():
parser = argparse.ArgumentParser(usage="%(prog)s [options]", description="Script to generate data visualizations",
Expand All @@ -43,6 +46,8 @@ def get_script_arguments():
help=f'Base name for the images. Default {None}.')
parser.add_argument('-c', '--columns', dest='columns', default=None,
help=f'Path to Json with Columns to Plot. Default {None}.')
parser.add_argument('-u', '--unify', dest='unify', action='store_true',
help=f'Unify data of the binary processes with their subprocesses to plot.')

return parser.parse_args()

Expand All @@ -52,11 +57,17 @@ def main():
create_destination_directory(options.destination)

target = options.visualization_target
validate_arguments(options)

if target in ['analysis', 'remote', 'wazuhdb']:
dv = DaemonStatisticsVisualizer(options.csv_list, daemon=target,
store_path=options.destination,
base_name=options.name)
elif target == 'binary':
dv = BinaryDatavisualizer(options.csv_list,
store_path=options.destination,
base_name=options.name,
unify_child_daemon_metrics=options.unify)
else:
dv = strategy_plot_by_target[target](options.csv_list,
store_path=options.destination,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,12 @@ class BinaryDatavisualizer(DataVisualizer):
binary_metrics_extra_fields = ["Daemon", "Version", "PID"]
binary_metrics_fields = binary_metrics_fields_to_plot + binary_metrics_extra_fields

def __init__(self, dataframes, store_path=gettempdir(), base_name=None):
def __init__(self, dataframes, store_path=gettempdir(), base_name=None, unify_child_daemon_metrics=False):
super().__init__(dataframes, store_path, base_name)
self._validate_dataframe()
if unify_child_daemon_metrics:
self.dataframe = self.dataframe.reset_index(drop=False)
self._unify_dataframes()

def _get_expected_fields(self) -> list:
return self.binary_metrics_fields
Expand Down Expand Up @@ -195,6 +198,25 @@ def _get_fields_to_plot(self):

return fields_to_plot

def _unify_dataframes(self):
"""Unify the data of each process with their respective sub-processes.
"""
pids = self.dataframe[['Daemon', 'PID']].drop_duplicates()
versions = self.dataframe[['Daemon', 'Version']].drop_duplicates()

daemons_list = [daemon_name for daemon_name in self._get_daemons() if "child" not in daemon_name]

for daemon_name in daemons_list:
self.dataframe.loc[self.dataframe['Daemon'].str.contains(daemon_name, na=False), 'Daemon'] = daemon_name

columns_to_drop = ['Timestamp', 'Daemon', 'Version', 'PID']
columns_to_sum = self.dataframe.columns.drop(columns_to_drop)

self.dataframe = self.dataframe.groupby(['Timestamp', 'Daemon'])[columns_to_sum].sum().reset_index(drop=False)

self.dataframe = self.dataframe.merge(pids[['Daemon', 'PID']], on='Daemon', how='left')
self.dataframe = self.dataframe.merge(versions[['Daemon', 'Version']], on='Daemon', how='left')

def plot(self):
columns_to_plot = self._get_fields_to_plot()
for element in columns_to_plot:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
filebeat_version: 7.10.2

wazuh_template_branch: 4.8.0
wazuh_template_branch: 4.8.1

filebeat_node_name: node-1

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,11 @@
from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated
from wazuh_testing.tools.system import HostManager


pytestmark = [pytest.mark.e2e, pytest.mark.vulnerability_detector, pytest.mark.tier0]

# Wazuh Indexer abuseControl timeout set to 30 minutes (1800 seconds)
MINIMUM_TIMEOUT_RESCAN = 1800

AGENTS_SCANNED_FIRST_SCAN = []
FIRST_SCAN_TIME = None
Expand Down Expand Up @@ -317,6 +320,9 @@ def test_first_syscollector_scan(
"Syscollector scan not started in any agent. Check agent logs for more information"
)

logging.critical("Waiting 30 minutes to avoid Indexer abuseControl.")
time.sleep(MINIMUM_TIMEOUT_RESCAN)

logging.critical("Waiting until agent all agents have been scanned.")
time.sleep(TIMEOUT_PER_AGENT_VULNERABILITY_FIRST_SCAN * len(AGENTS_SCANNED_FIRST_SCAN))

Expand Down

0 comments on commit 649237b

Please sign in to comment.