Skip to content

Commit 16cd51c

Browse files
feat: influx support added
1 parent acb3843 commit 16cd51c

8 files changed

Lines changed: 258 additions & 37 deletions

File tree

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,3 +164,5 @@ cython_debug/
164164

165165
predefine_user.json
166166
src/assets/predefine_user.json
167+
168+
influxdb_data

app.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
# background thread to monitor system settings changes
77
# print("FLASK_ENV: ", os.getenv('FLASK_ENV'))
88
# # if os.getenv('FLASK_ENV') == 'production':
9-
# start_website_monitoring() # Starts pinging active websites
10-
# fetch_file_metrics_task()
11-
# monitor_settings() # Starts monitoring for system logging changes
9+
start_website_monitoring() # Starts pinging active websites
10+
fetch_file_metrics_task()
11+
monitor_settings() # Starts monitoring for system logging changes
1212

1313
if __name__ == "__main__":
1414
app.run(host="0.0.0.0", port=5000, debug=True)

install_influx.sh

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
#!/bin/bash
2+
3+
# Define variables
4+
INFLUXDB_VERSION="latest"
5+
CONTAINER_NAME="influxdb"
6+
NETWORK_NAME="influx_network"
7+
DATA_DIR="./influxdb_data"
8+
INFLUXDB_USER="admin" # Change this to your desired username
9+
INFLUXDB_PASSWORD="admin_passwor" # Change this to your desired password
10+
INFLUXDB_ORG="systemguard" # Change this to your desired organization name
11+
INFLUXDB_BUCKET="system_metrics" # Change this to your desired initial bucket (database) name
12+
INFLUXDB_TOKEN="GixPn4ZmYuPyzPtj2XivfALmtHUaatBTK85ZjSe78P8NdbM_J9426tTPoqnbQDCok_NgBreOaqzdmoGxJ3UfsA=="
13+
14+
# Export the INFLUXDB_TOKEN to be used later in your app
15+
export "INFLUXDB_TOKEN"=$INFLUXDB_TOKEN
16+
17+
# Create a data directory for InfluxDB
18+
rm -rf $DATA_DIR
19+
mkdir -p $DATA_DIR
20+
21+
# Stop and remove existing InfluxDB container if it exists
22+
if [ "$(docker ps -aq -f name=$CONTAINER_NAME)" ]; then
23+
echo "Stopping existing InfluxDB container..."
24+
docker stop $CONTAINER_NAME
25+
echo "Removing existing InfluxDB container..."
26+
docker rm -f $CONTAINER_NAME
27+
fi
28+
29+
# Kill any process using port 8086
30+
if sudo lsof -i :8086; then
31+
echo "Killing process using port 8086..."
32+
sudo fuser -k 8086/tcp
33+
fi
34+
35+
# Create a Docker network
36+
echo "Creating Docker network..."
37+
docker network create $NETWORK_NAME || true # Avoid error if the network already exists
38+
39+
# Pull the InfluxDB Docker image
40+
echo "Pulling InfluxDB Docker image..."
41+
docker pull influxdb:$INFLUXDB_VERSION
42+
43+
# Run the InfluxDB container with authentication and initial setup to skip onboarding
44+
echo "Running InfluxDB container..."
45+
docker run -d \
46+
--name $CONTAINER_NAME \
47+
--network $NETWORK_NAME \
48+
-p 8086:8086 \
49+
-v $PWD/$DATA_DIR:/var/lib/influxdb2 \
50+
-e DOCKER_INFLUXDB_INIT_MODE=setup \
51+
-e DOCKER_INFLUXDB_INIT_USERNAME=$INFLUXDB_USER \
52+
-e DOCKER_INFLUXDB_INIT_PASSWORD=$INFLUXDB_PASSWORD \
53+
-e DOCKER_INFLUXDB_INIT_ORG=$INFLUXDB_ORG \
54+
-e DOCKER_INFLUXDB_INIT_BUCKET=$INFLUXDB_BUCKET \
55+
-e DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=$INFLUXDB_TOKEN \
56+
influxdb:$INFLUXDB_VERSION
57+
58+
# Output completion message
59+
echo "InfluxDB setup completed! Access it at http://localhost:8086 with your credentials."

src/background_task/log_system_info.py

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
from src.models import GeneralSettings, SystemInformation
88
from sqlalchemy.exc import SQLAlchemyError
99
from prometheus_client import Counter, Gauge
10+
import os, time
11+
from influxdb_client import Point, WritePrecision
1012

1113
from src.logger import logger
1214
from src.config import influx_client, bucket, write_api
@@ -78,9 +80,11 @@ def log_system_info_to_db():
7880

7981
# Update Prometheus metrics
8082
update_prometheus_metrics(system_info)
83+
# Store system information in InfluxDB
84+
store_system_info_in_influxdb(system_info)
8185

8286
# Store system information in the database
83-
store_system_info_in_db(system_info)
87+
# store_system_info_in_db(system_info)
8488
logger.info("System information logged to database.")
8589

8690
except SQLAlchemyError as db_err:
@@ -124,6 +128,36 @@ def store_system_info_in_db(system_info):
124128
db.session.add(system_log)
125129
db.session.commit()
126130

131+
def store_system_info_in_influxdb(system_info):
132+
"""
133+
Stores the collected system information into the InfluxDB with proper error handling.
134+
"""
135+
try:
136+
# Create a data point for system information
137+
point = (
138+
Point("system_info")
139+
.tag("host", os.environ.get("HOSTNAME", "unknown"))
140+
.field("cpu_percent", system_info["cpu_percent"])
141+
.field("memory_percent", system_info["memory_percent"])
142+
.field("battery_percent", system_info["battery_percent"])
143+
.field("network_sent", system_info["network_sent"])
144+
.field("network_received", system_info["network_received"])
145+
.field("dashboard_memory_usage", system_info["dashboard_memory_usage"])
146+
.field("cpu_frequency", system_info["cpu_frequency"])
147+
.field("current_temp", system_info["current_temp"])
148+
.time(int(time.time() * 1_000_000_000), WritePrecision.NS) # Nanosecond precision
149+
)
150+
151+
# Write the data point to InfluxDB
152+
write_api.write(bucket=bucket, record=point)
153+
logger.info("Successfully wrote system information to InfluxDB")
154+
155+
except ValueError as ve:
156+
logger.error(f"Value error while storing system info: {ve}")
157+
except Exception as e:
158+
logger.error(f"An unexpected error occurred: {e}", exc_info=True)
159+
160+
127161

128162
def monitor_settings():
129163
"""

src/config.py

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@
22
from flask import Flask, render_template
33
from flask_sqlalchemy import SQLAlchemy
44
from flask_migrate import Migrate
5+
import os, time
6+
from influxdb_client import InfluxDBClient, Point, WritePrecision
7+
from influxdb_client.client.write_api import SYNCHRONOUS
8+
59

610
from src.logger import logger
711
from src.helper import get_system_node_name, get_ip_address
@@ -26,15 +30,35 @@
2630
os.makedirs(DB_DIR, exist_ok=True)
2731

2832
# Configure the SQLite database
29-
# app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///{DB_DIR}/systemguard.db"
30-
app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///systemguard.db"
33+
app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///{DB_DIR}/systemguard.db"
34+
# app.config['SQLALCHEMY_DATABASE_URI'] = f"sqlite:///systemguard.db"
3135
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
3236
app.config['SECRET_KEY'] = 'secret'
3337

3438
# Initialize the database
3539
db = SQLAlchemy(app)
3640
migrate = Migrate(app, db)
3741

42+
# influx db configuration
43+
44+
INFLUXDB_TOKEN=""
45+
print("Token: ", INFLUXDB_TOKEN)
46+
if not INFLUXDB_TOKEN:
47+
raise ValueError("Please set the INFLUXDB_TOKEN environment variable.")
48+
org = "systemguard"
49+
url = "http://localhost:8086"
50+
bucket="_monitoring"
51+
try:
52+
influx_client = InfluxDBClient(url=url, token=INFLUXDB_TOKEN, org=org)
53+
bucket = "_monitoring"
54+
write_api = influx_client.write_api(write_options=SYNCHRONOUS)
55+
query_api = influx_client.query_api()
56+
logger.info("Connected to InfluxDB successfully")
57+
58+
except Exception as e:
59+
logger.error(f"Failed to connect to InfluxDB: {e}")
60+
raiseclient = InfluxDBClient(url=url, token=INFLUXDB_TOKEN, org=org)
61+
3862
# Define global variables for templates
3963
app.jinja_env.globals.update(
4064
title=APP_NAME,

src/models/__init__.py

Lines changed: 31 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,39 @@
1818

1919
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
2020

21+
# Context processor for injecting settings into templates
22+
@app.context_processor
23+
def inject_settings():
24+
if current_user.is_anonymous:
25+
user_dashboard_settings = UserDashboardSettings(user_id=0)
26+
card_settings = None
27+
page_toggles_settings = None
28+
general_settings = None
29+
return dict(
30+
user_dashboard_settings=user_dashboard_settings,
31+
card_settings=card_settings,
32+
page_toggles_settings=page_toggles_settings,
33+
general_settings=general_settings,
34+
)
35+
general_settings = GeneralSettings.query.first()
36+
card_settings = UserCardSettings.query.filter_by(user_id=current_user.id).first()
37+
user_dashboard_settings = UserDashboardSettings.query.filter_by(
38+
user_id=current_user.id
39+
).first() # Retrieve user-specific user_dashboard_settings from DB
40+
page_toggles_settings = PageToggleSettings.query.filter_by(
41+
user_id=current_user.id
42+
).first()
43+
all_settings = dict(
44+
user_dashboard_settings=user_dashboard_settings,
45+
general_settings=general_settings,
46+
card_settings=card_settings,
47+
page_toggles_settings=page_toggles_settings,
48+
)
49+
return all_settings
50+
2151
with app.app_context():
2252
# Check if tables already exist
23-
if not db.inspect(db.engine).has_table('user_profile'): # Use an important table to check existence
53+
if not db.inspect(db.engine).has_table('users'): # Use an important table to check existence
2454
logger.info("Creating tables")
2555
db.create_all()
2656

@@ -70,32 +100,3 @@
70100
else:
71101
logger.info("Tables already exist. Skipping creation.")
72102

73-
# Context processor for injecting settings into templates
74-
@app.context_processor
75-
def inject_settings():
76-
if current_user.is_anonymous:
77-
user_dashboard_settings = UserDashboardSettings(user_id=0)
78-
card_settings = None
79-
page_toggles_settings = None
80-
general_settings = None
81-
return dict(
82-
user_dashboard_settings=user_dashboard_settings,
83-
card_settings=card_settings,
84-
page_toggles_settings=page_toggles_settings,
85-
general_settings=general_settings,
86-
)
87-
general_settings = GeneralSettings.query.first()
88-
card_settings = UserCardSettings.query.filter_by(user_id=current_user.id).first()
89-
user_dashboard_settings = UserDashboardSettings.query.filter_by(
90-
user_id=current_user.id
91-
).first() # Retrieve user-specific user_dashboard_settings from DB
92-
page_toggles_settings = PageToggleSettings.query.filter_by(
93-
user_id=current_user.id
94-
).first()
95-
all_settings = dict(
96-
user_dashboard_settings=user_dashboard_settings,
97-
general_settings=general_settings,
98-
card_settings=card_settings,
99-
page_toggles_settings=page_toggles_settings,
100-
)
101-
return all_settings

src/routes/api.py

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,10 @@
44
from src.models import SystemInformation, UserDashboardSettings
55
from src.utils import _get_system_info, get_os_release_info, get_os_info, get_cached_value
66
from datetime import datetime, timedelta
7+
from flask import request, jsonify
78
import gc
89

10+
from src.config import query_api, bucket
911
api_bp = blueprints.Blueprint("api", __name__)
1012

1113
@app.route("/api/system-info", methods=["GET"])
@@ -109,6 +111,105 @@ def graph_data_api():
109111
# Handle and log the error for debugging purposes
110112
return jsonify({'error': 'An error occurred while fetching the graph data', 'details': str(e)}), 500
111113

114+
@app.route('/api/v2/graphs_data', methods=['GET'])
115+
@login_required
116+
def graph_data_api_v2():
117+
try:
118+
current_time = datetime.now()
119+
# Get the time filter from query parameters
120+
time_filter = request.args.get('filter', default='1 day')
121+
122+
# Determine the start time based on the filter
123+
time_deltas = {
124+
'5 minutes': '-5m',
125+
'15 minutes': '-15m',
126+
'30 minutes': '-30m',
127+
'1 hour': '-1h',
128+
'3 hours': '-3h',
129+
'6 hours': '-6h',
130+
'12 hours': '-12h',
131+
'1 day': '-1d',
132+
'2 days': '-2d',
133+
'3 days': '-3d',
134+
'1 week': '-1w',
135+
'1 month': '-30d',
136+
'3 months': '-90d',
137+
}
138+
139+
# Get the start time for the query
140+
time_range = time_deltas.get(time_filter, '-1d')
141+
142+
# Build the InfluxDB query
143+
flux_query = f"""
144+
from(bucket: "{bucket}")
145+
|> range(start: {time_range})
146+
|> filter(fn: (r) => r._measurement == "system_info")
147+
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
148+
"""
149+
150+
# Execute the query
151+
tables = query_api.query(flux_query)
152+
153+
# Initialize lists for the data
154+
time_data = []
155+
cpu_data = []
156+
memory_data = []
157+
battery_data = []
158+
network_sent_data = []
159+
network_received_data = []
160+
dashboard_memory_usage = []
161+
cpu_frequency = []
162+
current_temp = []
163+
164+
# Parse the results
165+
for table in tables:
166+
for record in table.records:
167+
time_data.append(record.values.get("_time", None))
168+
# Extract each field by key (handle missing fields gracefully)
169+
cpu_data.append(record.values.get("cpu_percent", None))
170+
memory_data.append(record.values.get("memory_percent", None))
171+
battery_data.append(record.values.get("battery_percent", None))
172+
network_sent_data.append(record.values.get("network_sent", None))
173+
network_received_data.append(record.values.get("network_received", None))
174+
dashboard_memory_usage.append(record.values.get("dashboard_memory_usage", None))
175+
cpu_frequency.append(record.values.get("cpu_frequency", None))
176+
current_temp.append(record.values.get("current_temp", None))
177+
178+
# Return the data as JSON
179+
response = jsonify({
180+
"time": time_data,
181+
"cpu": cpu_data,
182+
"memory": memory_data,
183+
"battery": battery_data,
184+
"network_sent": network_sent_data,
185+
"network_received": network_received_data,
186+
"dashboard_memory_usage": dashboard_memory_usage,
187+
"cpu_frequency": cpu_frequency,
188+
"current_temp": current_temp,
189+
"current_time": current_time
190+
})
191+
192+
print(time_data)
193+
# Clean up large data structures
194+
del tables
195+
del time_data
196+
del cpu_data
197+
del memory_data
198+
del battery_data
199+
del network_sent_data
200+
del network_received_data
201+
del dashboard_memory_usage
202+
del cpu_frequency
203+
del current_temp
204+
205+
gc.collect()
206+
207+
return response, 200
208+
209+
except Exception as e:
210+
# Handle and log the error for debugging purposes
211+
return jsonify({'error': 'An error occurred while fetching the graph data', 'details': str(e)}), 500
212+
112213
@app.route('/api/v1/refresh-interval', methods=['GET', 'POST'])
113214
@login_required
114215
def manage_refresh_interval():

src/static/js/graphs.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ function fetchDataAndRenderCharts() {
1212
console.log('Stored Filter Value:', storedFilterValue);
1313

1414
// Fetch data with the selected time filter
15-
fetch(`/api/v1/graphs_data?filter=${storedFilterValue}`)
15+
fetch(`/api/v2/graphs_data?filter=${storedFilterValue}`)
1616
.then(response => response.json())
1717
.then(data => {
1818
const cpuData = data.cpu;

0 commit comments

Comments
 (0)