This repository has been archived by the owner on Feb 6, 2025. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathquartz.py
183 lines (173 loc) · 6.19 KB
/
quartz.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
#!/usr/bin/env python3
from icmplib import ping
import json
import logging
import requests
from requests.structures import CaseInsensitiveDict
from pathlib import Path
import configparser
import argparse
import sys
import subprocess
import shlex
import os
from concurrent.futures import ThreadPoolExecutor, as_completed
CONFIG_DIR = Path.home()/'.config'
CONFIG_FILE = CONFIG_DIR/'quartz.conf'
DEFAULT_API_KEY = 'XXXXXX'
DEFAULT_URL = 'https://xxxxxxxx'
DEFAULT_ISP = 'Nombre del ISP (Fibertel, Telecentro, etc.)'
LOGGER = logging.getLogger("quartz")
def loglevel_validator(v):
"""Validate selected log level. Return v.upper() or raise an error."""
if v.upper() not in ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']:
raise argparse.ArgumentTypeError(
'Log level must be a valid Python log level. See -h for details.'
)
else:
return v.upper()
def setup_logging(log_level):
logformat = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=log_level, format=logformat)
def generate_config():
if not CONFIG_DIR.exists():
LOGGER.info(f'Creando {CONFIG_DIR} ya que no existe.')
CONFIG_DIR.mkdir()
if not CONFIG_FILE.exists():
config = configparser.ConfigParser()
config['quartz'] = {}
config['quartz']['api_key'] = DEFAULT_API_KEY
config['quartz']['url'] = DEFAULT_URL
config['quartz']['isp'] = DEFAULT_ISP
with open(CONFIG_FILE, 'w') as configfile:
config.write(configfile)
LOGGER.info(f'Creando {CONFIG_FILE} ya que no existe.')
LOGGER.warning(
f'{CONFIG_FILE} se creó con valores dummy. '
'Corregir con el isp, url y api_key correctos.'
)
return True
return False
def read_config():
config = configparser.ConfigParser()
config.read(CONFIG_FILE)
try:
if (
config['quartz']['api_key'] == DEFAULT_API_KEY or
config['quartz']['url'] == DEFAULT_URL or
config['quartz']['isp'] == DEFAULT_ISP
):
LOGGER.error(f'El archivo {CONFIG_FILE} tiene valores dummy.')
if os.environ.get('EXECUTION_ENV') == "DOCKER":
LOGGER.error(f'Revisá haber pasado las variables de entorno correctas al docker run.')
sys.exit(1)
except KeyError:
LOGGER.error(f'El archivo {CONFIG_FILE} es inválido (está vacío?).')
sys.exit(2)
return config
def ping_host(hostname):
if os.geteuid() == 0:
host = ping(hostname, count=3, privileged=True)
return {
'anchor': hostname.split('.')[0],
'min_rtt': host.min_rtt,
'max_rtt': host.max_rtt,
'jitter': host.jitter,
'packet_loss': host.packet_loss
}
else:
p = subprocess.Popen(
shlex.split(f'ping -c3 {hostname}'),
stdout=subprocess.PIPE
)
output = p.stdout.read()
for line in output.decode('utf-8').split('\n'):
if 'packets transmitted' in line:
transmitted = line
if line.startswith('rtt min'):
rtt = line
packet_loss = float(transmitted.split(',')[2].split('%')[0].strip())
min_rtt, _, max_rtt, jitter = map(
float, rtt.split('=')[1].strip().split(' ')[0].split('/')
)
return {
'anchor': hostname.split('.')[0],
'min_rtt': min_rtt,
'max_rtt': max_rtt,
'jitter': jitter,
'packet_loss': packet_loss
}
def run(config, args):
response = {
"id": config['quartz']['api_key'].split('-')[0],
"isp": config['quartz']['isp'],
'measurements': []
}
hosts = [
'fr-sxb-as8839.anchors.atlas.ripe.net',
'us-mia-as15133.anchors.atlas.ripe.net',
'us-qas-as15169.anchors.atlas.ripe.net',
'br-sao-as16509.anchors.atlas.ripe.net',
'au-syd-as16509.anchors.atlas.ripe.net',
'fr-lio-as41405.anchors.atlas.ripe.net',
'uy-mvd-as28000.anchors.atlas.ripe.net'
]
# tirarle ping a 3 anchors US,EU,APAC
# buildear el json
if args.parallel:
max_workers = len(hosts)
else:
max_workers=1
with ThreadPoolExecutor(max_workers=max_workers) as executor:
future_to_ping = {
executor.submit(ping_host, hostname): hostname for hostname in hosts
}
for future in as_completed(future_to_ping):
try:
response['measurements'].append(future.result())
except Exception as exc:
logger.error(f'Se rompió todo MAL. {exc}')
url = config['quartz']['url']
headers = CaseInsensitiveDict()
headers["Accept"] = "application/json"
headers["Content-Type"] = "application/json"
headers["api_key"] = config['quartz']['api_key']
LOGGER.info(f'Subiendo datos a {url}.')
LOGGER.debug(f' Datos a subir: {response}')
if not args.read_only:
resp = requests.post(url, headers=headers, data=json.dumps(response))
LOGGER.info(f'Datos subidos. Status code: {resp.status_code}')
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=f'Quartz')
parser.add_argument(
'-l', '--log-level',
metavar='level',
type=loglevel_validator,
default='INFO',
help='El log level. Puede ser cualquiera de los niveles estándares ' +
'de Python: CRITICAL, ERROR, WARNING, INFO, DEBUG.'
)
parser.add_argument(
'-s', '--silent',
action='store_true',
help='No generar logs. Útil para correr desde cron.'
)
parser.add_argument(
'-p', '--parallel',
action='store_true',
help='Ejecuta las mediciones en paralelo. Time is money!'
)
parser.add_argument(
'-r', '--read-only',
action='store_true',
help='No sube datos. Útil para debuggear qué se subiría.'
)
args = parser.parse_args()
if args.silent:
args.log_level = 'ERROR'
setup_logging(args.log_level)
if generate_config():
sys.exit(0)
config = read_config()
run(config, args)