Skip to content

Commit 7641ab6

Browse files
chore: new feature added to add and remove targets
1 parent 0c7fd05 commit 7641ab6

13 files changed

Lines changed: 337 additions & 161 deletions

File tree

requirements.txt

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,4 +29,7 @@ speedtest-cli==2.1.3
2929
prometheus_client==0.20.0
3030

3131
# influxdb-client, optional for writing metrics to InfluxDB
32-
# influxdb-client==1.46.0
32+
# influxdb-client==1.46.0
33+
34+
# pyyaml for parsing YAML configuration files
35+
pyyaml==6.0.2

src/background_task/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import os
22
from src.background_task.monitor_website import start_website_monitoring
33
from src.background_task.log_system_info import monitor_settings
4-
from src.background_task.prometheus_helper import fetch_file_metrics_task
4+
from src.background_task.external_monitoring import fetch_file_metrics_task
55
from src.logger import logger
66

77

File renamed without changes.

src/influxdb_config.py

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,3 +20,103 @@
2020
# except Exception as e:
2121
# logger.error(f"Failed to connect to InfluxDB: {e}")
2222
# raiseclient = InfluxDBClient(url=url, token=INFLUXDB_TOKEN, org=org)
23+
24+
25+
26+
# @app.route('/api/v2/influxdb/graphs_data', methods=['GET'])
27+
# @login_required
28+
# def graph_data_api_v2():
29+
# try:
30+
# current_time = datetime.now()
31+
# # Get the time filter from query parameters
32+
# time_filter = request.args.get('filter', default='1 day')
33+
34+
# # Determine the start time based on the filter
35+
# time_deltas = {
36+
# '5 minutes': '-5m',
37+
# '15 minutes': '-15m',
38+
# '30 minutes': '-30m',
39+
# '1 hour': '-1h',
40+
# '3 hours': '-3h',
41+
# '6 hours': '-6h',
42+
# '12 hours': '-12h',
43+
# '1 day': '-1d',
44+
# '2 days': '-2d',
45+
# '3 days': '-3d',
46+
# '1 week': '-1w',
47+
# '1 month': '-30d',
48+
# '3 months': '-90d',
49+
# }
50+
51+
# # Get the start time for the query
52+
# time_range = time_deltas.get(time_filter, '-1d')
53+
54+
# # Build the InfluxDB query
55+
# flux_query = f"""
56+
# from(bucket: "{bucket}")
57+
# |> range(start: {time_range})
58+
# |> filter(fn: (r) => r._measurement == "system_info")
59+
# |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
60+
# """
61+
62+
# # Execute the query
63+
# tables = query_api.query(flux_query)
64+
65+
# # Initialize lists for the data
66+
# time_data = []
67+
# cpu_data = []
68+
# memory_data = []
69+
# battery_data = []
70+
# network_sent_data = []
71+
# network_received_data = []
72+
# dashboard_memory_usage = []
73+
# cpu_frequency = []
74+
# current_temp = []
75+
76+
# # Parse the results
77+
# for table in tables:
78+
# for record in table.records:
79+
# time_data.append(record.values.get("_time", None))
80+
# # Extract each field by key (handle missing fields gracefully)
81+
# cpu_data.append(record.values.get("cpu_percent", None))
82+
# memory_data.append(record.values.get("memory_percent", None))
83+
# battery_data.append(record.values.get("battery_percent", None))
84+
# network_sent_data.append(record.values.get("network_sent", None))
85+
# network_received_data.append(record.values.get("network_received", None))
86+
# dashboard_memory_usage.append(record.values.get("dashboard_memory_usage", None))
87+
# cpu_frequency.append(record.values.get("cpu_frequency", None))
88+
# current_temp.append(record.values.get("current_temp", None))
89+
90+
# # Return the data as JSON
91+
# response = jsonify({
92+
# "time": time_data,
93+
# "cpu": cpu_data,
94+
# "memory": memory_data,
95+
# "battery": battery_data,
96+
# "network_sent": network_sent_data,
97+
# "network_received": network_received_data,
98+
# "dashboard_memory_usage": dashboard_memory_usage,
99+
# "cpu_frequency": cpu_frequency,
100+
# "current_temp": current_temp,
101+
# "current_time": current_time
102+
# })
103+
104+
# # Clean up large data structures
105+
# del tables
106+
# del time_data
107+
# del cpu_data
108+
# del memory_data
109+
# del battery_data
110+
# del network_sent_data
111+
# del network_received_data
112+
# del dashboard_memory_usage
113+
# del cpu_frequency
114+
# del current_temp
115+
116+
# gc.collect()
117+
118+
# return response, 200
119+
120+
# except Exception as e:
121+
# # Handle and log the error for debugging purposes
122+
# return jsonify({'error': 'An error occurred while fetching the graph data', 'details': str(e)}), 500

src/models/page_toggle_settings.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import datetime
21
from src.config import db
32

43
class PageToggleSettings(db.Model):
@@ -26,4 +25,4 @@ class PageToggleSettings(db.Model):
2625
is_disk_info_enabled = db.Column(db.Boolean, default=True)
2726
is_network_info_enabled = db.Column(db.Boolean, default=True)
2827
is_process_info_enabled = db.Column(db.Boolean, default=True)
29-
is_dashboard_network_enabled = db.Column(db.Boolean, default=False)
28+
is_dashboard_network_enabled = db.Column(db.Boolean, default=True)

src/routes/api.py

Lines changed: 2 additions & 101 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
api_bp = blueprints.Blueprint("api", __name__)
1515

1616
PROMETHEUS_URL = 'http://localhost:9090' # Change if using a different URL or port
17-
QUERY_API_URL = f'{PROMETHEUS_URL}/api/v1/query'
17+
QUERY_API_URL = f'{PROMETHEUS_URL}/api/v1/query_range'
1818
TARGETS_API_URL = f'{PROMETHEUS_URL}/api/v1/targets'
1919

2020
PROMETHEUS_METRICS = {
@@ -28,7 +28,7 @@
2828
'current_temp': 'cpu_temperature',
2929
}
3030

31-
@app.route("/api/system-info", methods=["GET"])
31+
@app.route("/api/v1/system-info", methods=["GET"])
3232
@login_required
3333
def system_api():
3434
try:
@@ -142,104 +142,6 @@ def graph_data_api():
142142
# Handle and log the error for debugging purposes
143143
return jsonify({'error': 'An error occurred while fetching the graph data', 'details': str(e)}), 500
144144

145-
# @app.route('/api/v2/influxdb/graphs_data', methods=['GET'])
146-
# @login_required
147-
# def graph_data_api_v2():
148-
# try:
149-
# current_time = datetime.now()
150-
# # Get the time filter from query parameters
151-
# time_filter = request.args.get('filter', default='1 day')
152-
153-
# # Determine the start time based on the filter
154-
# time_deltas = {
155-
# '5 minutes': '-5m',
156-
# '15 minutes': '-15m',
157-
# '30 minutes': '-30m',
158-
# '1 hour': '-1h',
159-
# '3 hours': '-3h',
160-
# '6 hours': '-6h',
161-
# '12 hours': '-12h',
162-
# '1 day': '-1d',
163-
# '2 days': '-2d',
164-
# '3 days': '-3d',
165-
# '1 week': '-1w',
166-
# '1 month': '-30d',
167-
# '3 months': '-90d',
168-
# }
169-
170-
# # Get the start time for the query
171-
# time_range = time_deltas.get(time_filter, '-1d')
172-
173-
# # Build the InfluxDB query
174-
# flux_query = f"""
175-
# from(bucket: "{bucket}")
176-
# |> range(start: {time_range})
177-
# |> filter(fn: (r) => r._measurement == "system_info")
178-
# |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
179-
# """
180-
181-
# # Execute the query
182-
# tables = query_api.query(flux_query)
183-
184-
# # Initialize lists for the data
185-
# time_data = []
186-
# cpu_data = []
187-
# memory_data = []
188-
# battery_data = []
189-
# network_sent_data = []
190-
# network_received_data = []
191-
# dashboard_memory_usage = []
192-
# cpu_frequency = []
193-
# current_temp = []
194-
195-
# # Parse the results
196-
# for table in tables:
197-
# for record in table.records:
198-
# time_data.append(record.values.get("_time", None))
199-
# # Extract each field by key (handle missing fields gracefully)
200-
# cpu_data.append(record.values.get("cpu_percent", None))
201-
# memory_data.append(record.values.get("memory_percent", None))
202-
# battery_data.append(record.values.get("battery_percent", None))
203-
# network_sent_data.append(record.values.get("network_sent", None))
204-
# network_received_data.append(record.values.get("network_received", None))
205-
# dashboard_memory_usage.append(record.values.get("dashboard_memory_usage", None))
206-
# cpu_frequency.append(record.values.get("cpu_frequency", None))
207-
# current_temp.append(record.values.get("current_temp", None))
208-
209-
# # Return the data as JSON
210-
# response = jsonify({
211-
# "time": time_data,
212-
# "cpu": cpu_data,
213-
# "memory": memory_data,
214-
# "battery": battery_data,
215-
# "network_sent": network_sent_data,
216-
# "network_received": network_received_data,
217-
# "dashboard_memory_usage": dashboard_memory_usage,
218-
# "cpu_frequency": cpu_frequency,
219-
# "current_temp": current_temp,
220-
# "current_time": current_time
221-
# })
222-
223-
# # Clean up large data structures
224-
# del tables
225-
# del time_data
226-
# del cpu_data
227-
# del memory_data
228-
# del battery_data
229-
# del network_sent_data
230-
# del network_received_data
231-
# del dashboard_memory_usage
232-
# del cpu_frequency
233-
# del current_temp
234-
235-
# gc.collect()
236-
237-
# return response, 200
238-
239-
# except Exception as e:
240-
# # Handle and log the error for debugging purposes
241-
# return jsonify({'error': 'An error occurred while fetching the graph data', 'details': str(e)}), 500
242-
243145
@app.route('/api/v1/prometheus/graphs_data', methods=['GET'])
244146
@login_required
245147
def graph_data_api_v3():
@@ -509,4 +411,3 @@ def get_os_info_api():
509411
return jsonify(os_info), 200
510412
except Exception as e:
511413
return jsonify({"error": "An error occurred while fetching the OS information", "details": str(e)}), 500
512-

src/routes/helper/__init__.py

Whitespace-only changes.
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import os
2+
import yaml
3+
from src.utils import ROOT_DIR
4+
5+
prometheus_yml_path = os.path.join(ROOT_DIR, 'prometheus_config/prometheus.yml')
6+
7+
def is_valid_file(file_path: str) -> bool:
8+
"""Checks if a file is valid and has key-value pairs separated by a colon."""
9+
with open(file_path, 'r') as file:
10+
for line in file:
11+
if not line.strip():
12+
continue
13+
if ':' not in line:
14+
return False
15+
return True
16+
17+
def load_yaml(file_path):
18+
"""Load the YAML file."""
19+
with open(file_path, 'r') as file:
20+
return yaml.safe_load(file)
21+
22+
def save_yaml(data, file_path):
23+
"""Save the updated YAML data back to the file."""
24+
with open(file_path, 'w') as file:
25+
yaml.dump(data, file, default_flow_style=False)
26+
27+
def show_targets():
28+
"""Show all targets for each job."""
29+
config = load_yaml(prometheus_yml_path)
30+
targets_info = []
31+
for scrape_config in config.get('scrape_configs', []):
32+
job_name = scrape_config['job_name']
33+
targets = scrape_config['static_configs'][0]['targets']
34+
scrape_interval = scrape_config.get('scrape_interval', '15s')
35+
targets_info.append({
36+
'job_name': job_name,
37+
'targets': targets,
38+
'scrape_interval': scrape_interval
39+
})
40+
return targets_info

0 commit comments

Comments
 (0)