11import os
22import yaml
33import subprocess
4+ from collections import OrderedDict
45from src .utils import ROOT_DIR
56
67prometheus_yml_path = os .path .join (ROOT_DIR , 'prometheus_config/prometheus.yml' )
@@ -16,82 +17,103 @@ def is_valid_file(file_path: str) -> bool:
1617 return False
1718 return True
1819
20+
21+ class OrderedDumper (yaml .SafeDumper ):
22+ """Custom YAML dumper that preserves order of keys."""
23+ pass
24+
25+ def dict_representer (dumper , data ):
26+ return dumper .represent_dict (data .items ())
27+
28+ OrderedDumper .add_representer (OrderedDict , dict_representer )
29+
1930def load_yaml (file_path ):
20- """Load the YAML file."""
31+ """Load the YAML file and keep the order of dictionaries ."""
2132 with open (file_path , 'r' ) as file :
22- return yaml .safe_load (file )
33+ return yaml .load (file , Loader = yaml . SafeLoader )
2334
2435def save_yaml (data , file_path ):
25- """Save the updated YAML data back to the file."""
36+ """Save the updated YAML data back to the file, preserving order ."""
2637 with open (file_path , 'w' ) as file :
27- yaml .dump (data , file , default_flow_style = False )
28-
29- def show_targets ():
30- """Show all targets for each job."""
31- config = load_yaml (prometheus_yml_path )
32- targets_info = []
33- for scrape_config in config .get ('scrape_configs' , []):
34- job_name = scrape_config ['job_name' ]
35- targets = scrape_config ['static_configs' ][0 ]['targets' ]
36- scrape_interval = scrape_config .get ('scrape_interval' , '15s' )
37- targets_info .append ({
38- 'job_name' : job_name ,
39- 'targets' : targets ,
40- 'scrape_interval' : scrape_interval
41- })
42- return targets_info
38+ yaml .dump (data , file , Dumper = OrderedDumper , default_flow_style = False )
4339
4440def update_prometheus_config ():
45- """
46- Update the first target whenever the function is called.
47- If the network changes, the IP address will also change.
48- This updates the first IP address in the list, assuming it's the machine with the 'localhost' job.
49- """
41+ """Update the first target with the machine's IP address."""
5042 print ("Updating Prometheus config..." )
5143
5244 # Get the machine's IP address
53- ipv4_address = subprocess .run (['hostname' , '-I' ], capture_output = True , text = True , check = True ).stdout .split ()[0 ]
54-
55- # Define the path to Prometheus config file
56- prometheus_config_path = os .path .join (ROOT_DIR , 'prometheus_config' , 'prometheus.yml' )
57-
45+ try :
46+ ipv4_address = subprocess .run (['hostname' , '-I' ], capture_output = True , text = True , check = True ).stdout .split ()[0 ]
47+ except subprocess .CalledProcessError as e :
48+ print (f"Error getting IP address: { e } " )
49+ return False
50+
5851 # Load the existing config
59- config = load_yaml (prometheus_config_path )
52+ try :
53+ config = load_yaml (prometheus_yml_path )
54+ except Exception as e :
55+ print (f"Error loading YAML config: { e } " )
56+ return False
57+
6058 # Fetch the 'localhost' job
61- localhost_job = next ((job for job in config [ 'scrape_configs' ] if job [ 'job_name' ] == 'localhost' ), None )
59+ localhost_job = next ((job for job in config . get ( 'scrape_configs' , []) if job . get ( 'job_name' ) == 'localhost' ), None )
6260
6361 if localhost_job :
6462 # Update the IP address for the 'localhost' job target
6563 localhost_job ['static_configs' ][0 ]['targets' ][0 ] = f'{ ipv4_address } :5050'
6664
67- # localhost_job['basic_auth'] = {
68- # 'username': "username",
69- # 'password': "password"
70- # }
71-
72- # Save the updated config
73- save_yaml (config , prometheus_config_path )
65+ # Create a new OrderedDict to maintain the correct order
66+ updated_job = OrderedDict ()
67+ updated_job ['job_name' ] = localhost_job ['job_name' ]
68+ updated_job ['scrape_interval' ] = localhost_job .get ('scrape_interval' , '10s' )
69+ updated_job ['static_configs' ] = localhost_job ['static_configs' ]
7470
75- print ("Prometheus config updated successfully." )
76- return True
71+ # Add basic_auth last to maintain order
72+ if 'basic_auth' in localhost_job :
73+ updated_job ['basic_auth' ] = localhost_job ['basic_auth' ]
74+
75+ # Replace the old job with the updated one
76+ for index , job in enumerate (config ['scrape_configs' ]):
77+ if job ['job_name' ] == 'localhost' :
78+ config ['scrape_configs' ][index ] = updated_job
79+ break
7780
81+ # Save the updated config
82+ try :
83+ save_yaml (config , prometheus_yml_path )
84+ print ("Prometheus config updated successfully." )
85+ return True
86+ except Exception as e :
87+ print (f"Error saving YAML config: { e } " )
88+ return False
89+
7890 print ("No 'localhost' job found in Prometheus config." )
7991 return False
8092
93+ def show_targets ():
94+ """Show all targets for each job."""
95+ config = load_yaml (prometheus_yml_path )
96+ targets_info = []
97+ for scrape_config in config .get ('scrape_configs' , []):
98+ job_name = scrape_config ['job_name' ]
99+ targets = scrape_config .get ('static_configs' , [{}])[0 ].get ('targets' , [])
100+ scrape_interval = scrape_config .get ('scrape_interval' , '15s' )
101+ targets_info .append ({
102+ 'job_name' : job_name ,
103+ 'targets' : targets ,
104+ 'scrape_interval' : scrape_interval
105+ })
106+ return targets_info
107+
81108def update_prometheus_container ():
82109 """Update the Prometheus container."""
83- # Define the path to your shell script
84110 try :
85- # Use subprocess.run to execute the shell script
86111 result = subprocess .run (['bash' , update_prometheus_path ], check = True , text = True , capture_output = True )
87-
88- # Print the output of the script
89112 print ("Output:" )
90113 print (result .stdout )
91114
92- # Print any errors (if any)
93115 if result .stderr :
94116 print ("Errors:" )
95117 print (result .stderr )
96118 except subprocess .CalledProcessError as e :
97- print (f"An error occurred: { e } " )
119+ print (f"An error occurred while updating Prometheus container : { e } " )
0 commit comments