Skip to content

Commit 8092574

Browse files
Refactor Prometheus config update function and improve authentication security
1 parent ad2e53a commit 8092574

3 files changed

Lines changed: 123 additions & 58 deletions

File tree

Lines changed: 68 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import os
22
import yaml
33
import subprocess
4+
from collections import OrderedDict
45
from src.utils import ROOT_DIR
56

67
prometheus_yml_path = os.path.join(ROOT_DIR, 'prometheus_config/prometheus.yml')
@@ -16,82 +17,103 @@ def is_valid_file(file_path: str) -> bool:
1617
return False
1718
return True
1819

20+
21+
class OrderedDumper(yaml.SafeDumper):
22+
"""Custom YAML dumper that preserves order of keys."""
23+
pass
24+
25+
def dict_representer(dumper, data):
26+
return dumper.represent_dict(data.items())
27+
28+
OrderedDumper.add_representer(OrderedDict, dict_representer)
29+
1930
def load_yaml(file_path):
20-
"""Load the YAML file."""
31+
"""Load the YAML file and keep the order of dictionaries."""
2132
with open(file_path, 'r') as file:
22-
return yaml.safe_load(file)
33+
return yaml.load(file, Loader=yaml.SafeLoader)
2334

2435
def save_yaml(data, file_path):
25-
"""Save the updated YAML data back to the file."""
36+
"""Save the updated YAML data back to the file, preserving order."""
2637
with open(file_path, 'w') as file:
27-
yaml.dump(data, file, default_flow_style=False)
28-
29-
def show_targets():
30-
"""Show all targets for each job."""
31-
config = load_yaml(prometheus_yml_path)
32-
targets_info = []
33-
for scrape_config in config.get('scrape_configs', []):
34-
job_name = scrape_config['job_name']
35-
targets = scrape_config['static_configs'][0]['targets']
36-
scrape_interval = scrape_config.get('scrape_interval', '15s')
37-
targets_info.append({
38-
'job_name': job_name,
39-
'targets': targets,
40-
'scrape_interval': scrape_interval
41-
})
42-
return targets_info
38+
yaml.dump(data, file, Dumper=OrderedDumper, default_flow_style=False)
4339

4440
def update_prometheus_config():
45-
"""
46-
Update the first target whenever the function is called.
47-
If the network changes, the IP address will also change.
48-
This updates the first IP address in the list, assuming it's the machine with the 'localhost' job.
49-
"""
41+
"""Update the first target with the machine's IP address."""
5042
print("Updating Prometheus config...")
5143

5244
# Get the machine's IP address
53-
ipv4_address = subprocess.run(['hostname', '-I'], capture_output=True, text=True, check=True).stdout.split()[0]
54-
55-
# Define the path to Prometheus config file
56-
prometheus_config_path = os.path.join(ROOT_DIR, 'prometheus_config', 'prometheus.yml')
57-
45+
try:
46+
ipv4_address = subprocess.run(['hostname', '-I'], capture_output=True, text=True, check=True).stdout.split()[0]
47+
except subprocess.CalledProcessError as e:
48+
print(f"Error getting IP address: {e}")
49+
return False
50+
5851
# Load the existing config
59-
config = load_yaml(prometheus_config_path)
52+
try:
53+
config = load_yaml(prometheus_yml_path)
54+
except Exception as e:
55+
print(f"Error loading YAML config: {e}")
56+
return False
57+
6058
# Fetch the 'localhost' job
61-
localhost_job = next((job for job in config['scrape_configs'] if job['job_name'] == 'localhost'), None)
59+
localhost_job = next((job for job in config.get('scrape_configs', []) if job.get('job_name') == 'localhost'), None)
6260

6361
if localhost_job:
6462
# Update the IP address for the 'localhost' job target
6563
localhost_job['static_configs'][0]['targets'][0] = f'{ipv4_address}:5050'
6664

67-
# localhost_job['basic_auth'] = {
68-
# 'username': "username",
69-
# 'password': "password"
70-
# }
71-
72-
# Save the updated config
73-
save_yaml(config, prometheus_config_path)
65+
# Create a new OrderedDict to maintain the correct order
66+
updated_job = OrderedDict()
67+
updated_job['job_name'] = localhost_job['job_name']
68+
updated_job['scrape_interval'] = localhost_job.get('scrape_interval', '10s')
69+
updated_job['static_configs'] = localhost_job['static_configs']
7470

75-
print("Prometheus config updated successfully.")
76-
return True
71+
# Add basic_auth last to maintain order
72+
if 'basic_auth' in localhost_job:
73+
updated_job['basic_auth'] = localhost_job['basic_auth']
74+
75+
# Replace the old job with the updated one
76+
for index, job in enumerate(config['scrape_configs']):
77+
if job['job_name'] == 'localhost':
78+
config['scrape_configs'][index] = updated_job
79+
break
7780

81+
# Save the updated config
82+
try:
83+
save_yaml(config, prometheus_yml_path)
84+
print("Prometheus config updated successfully.")
85+
return True
86+
except Exception as e:
87+
print(f"Error saving YAML config: {e}")
88+
return False
89+
7890
print("No 'localhost' job found in Prometheus config.")
7991
return False
8092

93+
def show_targets():
94+
"""Show all targets for each job."""
95+
config = load_yaml(prometheus_yml_path)
96+
targets_info = []
97+
for scrape_config in config.get('scrape_configs', []):
98+
job_name = scrape_config['job_name']
99+
targets = scrape_config.get('static_configs', [{}])[0].get('targets', [])
100+
scrape_interval = scrape_config.get('scrape_interval', '15s')
101+
targets_info.append({
102+
'job_name': job_name,
103+
'targets': targets,
104+
'scrape_interval': scrape_interval
105+
})
106+
return targets_info
107+
81108
def update_prometheus_container():
82109
"""Update the Prometheus container."""
83-
# Define the path to your shell script
84110
try:
85-
# Use subprocess.run to execute the shell script
86111
result = subprocess.run(['bash', update_prometheus_path], check=True, text=True, capture_output=True)
87-
88-
# Print the output of the script
89112
print("Output:")
90113
print(result.stdout)
91114

92-
# Print any errors (if any)
93115
if result.stderr:
94116
print("Errors:")
95117
print(result.stderr)
96118
except subprocess.CalledProcessError as e:
97-
print(f"An error occurred: {e}")
119+
print(f"An error occurred while updating Prometheus container: {e}")

src/routes/prometheus.py

Lines changed: 48 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from prometheus_client import generate_latest
33
import os
44
import yaml
5+
from collections import OrderedDict
56

67
from src.config import app, db
78
from src.models import ExternalMonitornig
@@ -73,14 +74,13 @@ def delete_file_path(id):
7374
@app.route('/configure_targets')
7475
@admin_required
7576
def configure_targets():
76-
# update_prometheus_config()
77+
update_prometheus_config()
7778
targets_info = show_targets()
7879
return render_template('other/targets.html', targets_info=targets_info)
7980

8081
@app.route('/targets/restart_prometheus')
8182
@admin_required
8283
def restart_prometheus():
83-
update_prometheus_config
8484
update_prometheus_container()
8585
flash('Prometheus container restarted successfully!', 'success')
8686
return redirect(url_for('configure_targets'))
@@ -89,26 +89,63 @@ def restart_prometheus():
8989
def add_target():
9090
job_name = request.form.get('job_name')
9191
new_target = request.form.get('new_target')
92+
username = request.form.get('username')
93+
password = request.form.get('password')
9294
scrape_interval = request.form.get('scrape_interval', '15s') + 's' # New scrape interval
9395
config = load_yaml(prometheus_yml_path)
9496

95-
# new target should be <ip>:<port> check if it is in the correct format
97+
# Validate target format
9698
if ':' not in new_target:
9799
flash('Invalid target format. It should be in the format <ip>:<port>.', 'danger')
98100
return redirect(url_for('configure_targets'))
99-
101+
102+
job_found = False
103+
100104
for scrape_config in config['scrape_configs']:
101105
if scrape_config['job_name'] == job_name:
106+
# Append new target
102107
scrape_config['static_configs'][0]['targets'].append(new_target)
108+
job_found = True
109+
110+
# Update scrape interval
111+
scrape_config['scrape_interval'] = scrape_interval
112+
113+
# Prepare the updated job dictionary to maintain order
114+
updated_job = OrderedDict()
115+
updated_job['job_name'] = scrape_config['job_name']
116+
updated_job['static_configs'] = scrape_config['static_configs']
117+
updated_job['scrape_interval'] = scrape_config['scrape_interval']
118+
119+
# Update basic_auth if provided
120+
if username and password:
121+
updated_job['basic_auth'] = {
122+
'username': username,
123+
'password': password
124+
}
125+
126+
# Replace the existing job with the updated one
127+
index = config['scrape_configs'].index(scrape_config)
128+
config['scrape_configs'][index] = updated_job
129+
103130
break
104-
else:
105-
new_job = {
106-
'job_name': job_name,
107-
'static_configs': [{'targets': [new_target]}],
108-
'scrape_interval': scrape_interval # Set the specific interval
109-
}
110-
config['scrape_configs'].append(new_job)
131+
132+
# if not job_found:
133+
# # Create new job entry
134+
# new_job = OrderedDict()
135+
# new_job['job_name'] = job_name
136+
# new_job['static_configs'] = [{'targets': [new_target]}]
137+
# new_job['scrape_interval'] = scrape_interval
111138

139+
# # Add basic_auth if provided
140+
# if username and password:
141+
# new_job['basic_auth'] = {
142+
# 'username': username,
143+
# 'password': password
144+
# }
145+
# # Append the new job to scrape_configs
146+
# config['scrape_configs'].append(new_job)
147+
148+
# Save the updated config
112149
save_yaml(config, prometheus_yml_path)
113150
flash('Target added successfully!', 'success')
114151
# update_prometheus_container()

src/scripts/prometheus.sh

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ PROMETHEUS_DATA_DIR="/home/$USER_NAME/.database/prometheus"
2727
FLASK_APP_IP=$(hostname -I | cut -d' ' -f1)
2828
FLASK_APP_PORT="5050"
2929
SCRAPING_INTERVAL="10s"
30-
JOB_NAME='systemguard-metrics'
30+
monitor='systemguard-metrics'
3131

3232
# Logging function for better readability
3333
log() {
@@ -42,6 +42,12 @@ mkdir -p "$PROMETHEUS_DATA_DIR"
4242
# Create the prometheus.yml configuration file
4343
log "Generating prometheus.yml configuration file."
4444
cat > "$PROMETHEUS_CONFIG_FILE" <<EOL
45+
global:
46+
external_labels:
47+
system: $monitor
48+
environment: 'production'
49+
user: '$USER_NAME'
50+
4551
scrape_configs:
4652
- job_name: localhost
4753
scrape_interval: $SCRAPING_INTERVAL

0 commit comments

Comments
 (0)