publish my redfish_exporter

This commit is contained in:
2025-11-13 10:29:29 +01:00
parent 581df6617b
commit 5e68842356
8 changed files with 525 additions and 0 deletions

View File

@@ -0,0 +1,91 @@
from prometheus_client import Gauge, start_http_server, Summary
import requests
import urllib3
import random
import time
from concurrent.futures import ThreadPoolExecutor
urllib3.disable_warnings()
username = "<user>"
password = "<password>"
# Create a metric to track time spent and requests made.
REQUEST_TIME = Summary("request_processing_seconds", "Time spent processing request")
# Decorate function with metric.
@REQUEST_TIME.time()
def process_request(t):
"""A dummy function that takes some time."""
time.sleep(t)
# Define Prometheus metrics
voltage_gauge = Gauge(
"redfish_psu_line_input_voltage_volts",
"Line Input Voltage per PSU",
["host", "psu_serial"],
)
watts_gauge = Gauge(
"redfish_psu_power_input_watts", "Power Input Watts per PSU", ["host", "psu_serial"]
)
amps_gauge = Gauge(
"redfish_psu_input_amps", "Current draw in Amps per PSU", ["host", "psu_serial"]
)
def get_power_data(fqdn):
"""Redfish API Chassis Power"""
url = f"https://{fqdn}/redfish/v1/Chassis/1/Power"
try:
response = requests.get(
url, auth=(username, password), verify=False, timeout=10
)
response.raise_for_status()
data = response.json()
for psu in data.get("PowerSupplies", []):
line_input_v = psu.get("LineInputVoltage")
watts_input = psu.get("PowerInputWatts")
serial = psu.get("SerialNumber")
if line_input_v and watts_input:
amps = round(watts_input / line_input_v, 2)
else:
amps = None
# Push metrics
if line_input_v is not None:
voltage_gauge.labels(host=fqdn, psu_serial=serial).set(line_input_v)
if watts_input is not None:
watts_gauge.labels(host=fqdn, psu_serial=serial).set(watts_input)
if amps is not None:
amps_gauge.labels(host=fqdn, psu_serial=serial).set(amps)
except Exception as e:
print(f"Error querying {url}: {e}")
if __name__ == "__main__":
# Start metrics server on port 8000
start_http_server(8000)
hosts = [
"srv1-119.mgmt.sgg1.ch.abainfra.net",
]
# Thread pool for parallel requests
executor = ThreadPoolExecutor(max_workers=len(hosts))
while True:
futures = [executor.submit(get_power_data, fqdn) for fqdn in hosts]
# wait for all to finish
for future in futures:
future.result()
# for fqdn in hosts:
# get_power_data(fqdn)
# Fixed scape interval:
# time.sleep(30) # scrape interval
# Random sleep between 0 and 5 seconds (inclusive)
# sleep_time = random.uniform(0, 5)
# process_request(sleep_time)
# Random sleep between 0 and 1 seconds:
process_request(random.random())