Eerste versie
This commit is contained in:
13
CHANGELOG.md
Normal file
13
CHANGELOG.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
Added : for new features.
|
||||||
|
Changed : for changes in existing functionality.
|
||||||
|
Deprecated: for soon-to-be removed features.
|
||||||
|
Removed : for now removed features.
|
||||||
|
Fixed : for any bug fixes.
|
||||||
|
Security : in case of vulnerabilities.
|
||||||
|
|
||||||
|
## [1.0.0] - 2022-10-28
|
||||||
|
First working version.
|
57
README.md
57
README.md
@@ -1,3 +1,56 @@
|
|||||||
# victron_mqqt_exporter
|
# Prometheus exporter for Victron MQTT devices
|
||||||
|
|
||||||
A purposely build Prometheus exporter for Victron Energy Cerbo GX MQTT devices
|
A purposely build Prometheus exporter for Victron Energy Cerbo GX MQTT devices.
|
||||||
|
|
||||||
|
(C) 2022 M. Konstapel https://meezenest.nl/mees
|
||||||
|
|
||||||
|
Based on the General purpose Prometheus exporter for MQTT from Frederic Hemberger.
|
||||||
|
(https://github.com/fhemberger/mqtt_exporter)
|
||||||
|
|
||||||
|
Subscribes to one or more MQTT topics, and lets you configure prometheus metrics based on pattern matching.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Converts JSON string from Victron MQTT device topics to value readable by Prometheus.
|
||||||
|
- Sends keepalive signal to Victron Energy Cerbo GX MQTT device (every 30 seconds).
|
||||||
|
- Supported Metrics:
|
||||||
|
- standard metrics
|
||||||
|
- Gauge, Counter, Histogram, Summary
|
||||||
|
- additional
|
||||||
|
- **Counter (Absolute):**
|
||||||
|
- Same as Counter, but working with absolute numbers received from MQTT. Which is far more common, than sending the diff in each publish.
|
||||||
|
- e.g. a network counter or a rain sensor
|
||||||
|
- **Enum:**
|
||||||
|
- is a metric type not so common, details can be found in the [OpenMetrics docs](https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#stateset) and [Python client code](https://github.com/prometheus/client_python/blob/9a24236695c9ad47f9dc537a922a6d1333d8d093/prometheus_client/metrics.py#L640-L698).
|
||||||
|
- Allows to track as state by a know set of strings describing the state, e.g. `on/off` or `high/medium/low`
|
||||||
|
- Common sources would be a light switch oder a door lock.
|
||||||
|
- Comprehensive rewriting for topic, value/payload and labels
|
||||||
|
- similar to prometheus label rewrites
|
||||||
|
- regex allows almost every conversion
|
||||||
|
- e.g. to
|
||||||
|
- remove units or other strings from payload
|
||||||
|
- convert topic hierarchy into labels
|
||||||
|
- normalize labels
|
||||||
|
- check example configs `./exampleconf` and the configs in `./test/test_data/`
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
- Create a folder to hold the config (default: `conf/`)
|
||||||
|
- Add metric config(s) in YAML format to the folder. Files are combined and read as a single config. (See `exampleconf/metric_example.yaml` for details)
|
||||||
|
- Install dependencies with `pip3 install -r requirements-frozen.txt`
|
||||||
|
- Run `./mqtt_exporter.py`
|
||||||
|
|
||||||
|
|
||||||
|
## Python dependencies
|
||||||
|
|
||||||
|
- paho-mqtt
|
||||||
|
- prometheus-client
|
||||||
|
- PyYAML
|
||||||
|
- yamlreader
|
||||||
|
|
||||||
|
|
||||||
|
## TODO
|
||||||
|
|
||||||
|
- Add persistence of metrics on restart
|
||||||
|
- forget/age out metrics receiving no updates anymore
|
||||||
|
26
conf/conf.yaml
Normal file
26
conf/conf.yaml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Config file for MQTT prometheus exporter
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
#logging:
|
||||||
|
# logfile: '' # Optional default '' (stdout)
|
||||||
|
# level: 'info' # Optional default 'info'
|
||||||
|
|
||||||
|
# MQTT All values default to paho.mqtt.client defaults
|
||||||
|
mqtt:
|
||||||
|
host: '192.168.88.98' # Optional default 'localhost'
|
||||||
|
# port: 1883 # Optional default '1883'
|
||||||
|
# keepalive: 60 # Optional
|
||||||
|
# auth: # Optional If included, username_pw_set() is called with user/password
|
||||||
|
# username: 'user' # Required (when auth is present)
|
||||||
|
# password: 'pass' # Optional
|
||||||
|
# tls: # Optional If included, tls_set() is called with the following:
|
||||||
|
# ca_certs: # Optional
|
||||||
|
# certfile: # Optional
|
||||||
|
# keyfile: # Optional
|
||||||
|
# cert_reqs: # Optional
|
||||||
|
# tls_version: # Optional
|
||||||
|
# ciphers: # Optional
|
||||||
|
|
||||||
|
# Prometheus
|
||||||
|
#prometheus:
|
||||||
|
# exporter_port: # Optional default 9344
|
21
conf/metric_example.yaml
Normal file
21
conf/metric_example.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Example metric definition
|
||||||
|
metrics:
|
||||||
|
- name: 'battery_soc' # Required(unique, if multiple, only last entry is kept)
|
||||||
|
help: 'state of charge in percentage' # Required
|
||||||
|
type: 'gauge' # Required ('gauge', 'counter', 'summary' or 'histogram')
|
||||||
|
topic: 'N/+/battery/512/Soc'
|
||||||
|
|
||||||
|
- name: 'grid_energy_forward'
|
||||||
|
help: 'imported energy in kWh'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: 'N/+/grid/30/Ac/Energy/Forward'
|
||||||
|
|
||||||
|
- name: 'grid_energy_reverse'
|
||||||
|
help: 'exported energy in kWh'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: 'N/+/grid/30/Ac/Energy/Reverse'
|
||||||
|
|
||||||
|
- name: 'grid_power'
|
||||||
|
help: 'grid power in Watts'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: 'N/+/grid/30/Ac/Power'
|
26
exampleconf/conf.yaml
Normal file
26
exampleconf/conf.yaml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Config file for MQTT prometheus exporter
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
#logging:
|
||||||
|
# logfile: '' # Optional default '' (stdout)
|
||||||
|
# level: 'info' # Optional default 'info'
|
||||||
|
|
||||||
|
# MQTT All values default to paho.mqtt.client defaults
|
||||||
|
#mqtt:
|
||||||
|
# host: 'mqtt.example.com' # Optional default 'localhost'
|
||||||
|
# port: 1883 # Optional default '1883'
|
||||||
|
# keepalive: 60 # Optional
|
||||||
|
# auth: # Optional If included, username_pw_set() is called with user/password
|
||||||
|
# username: 'user' # Required (when auth is present)
|
||||||
|
# password: 'pass' # Optional
|
||||||
|
# tls: # Optional If included, tls_set() is called with the following:
|
||||||
|
# ca_certs: # Optional
|
||||||
|
# certfile: # Optional
|
||||||
|
# keyfile: # Optional
|
||||||
|
# cert_reqs: # Optional
|
||||||
|
# tls_version: # Optional
|
||||||
|
# ciphers: # Optional
|
||||||
|
|
||||||
|
# Prometheus
|
||||||
|
#prometheus:
|
||||||
|
# exporter_port: # Optional default 9344
|
23
exampleconf/histogram.yaml
Normal file
23
exampleconf/histogram.yaml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# histogram metric. with Buckets <= 0.5, 5, 10, +inf
|
||||||
|
- name: 'network_ping_ms'
|
||||||
|
help: 'ping response in ms'
|
||||||
|
type: 'histogram'
|
||||||
|
topic: 'network/+/+/ping'
|
||||||
|
parameters:
|
||||||
|
buckets:
|
||||||
|
- 0.5
|
||||||
|
- 5
|
||||||
|
- 10
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "network/([^/]+).*"
|
||||||
|
target_label: "network"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "network/[^/]+/([^/]+).*"
|
||||||
|
target_label: "server"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
27
exampleconf/metric_example.yaml
Normal file
27
exampleconf/metric_example.yaml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Example metric definition
|
||||||
|
metrics:
|
||||||
|
- name: 'mqtt_example' # Required(unique, if multiple, only last entry is kept)
|
||||||
|
help: 'MQTT example gauge' # Required
|
||||||
|
type: 'gauge' # Required ('gauge', 'counter', 'summary' or 'histogram')
|
||||||
|
#parameters: # Optional parameters for certain metrics
|
||||||
|
# buckets: # Optional (Passed as 'buckets' argument to Histogram)
|
||||||
|
# - .1
|
||||||
|
# - 1.0
|
||||||
|
# - 10.0
|
||||||
|
# states: # Optional (Passes as 'states' arguments to Enum)
|
||||||
|
# - on
|
||||||
|
# - off
|
||||||
|
topic: 'example/topic/+' # Required
|
||||||
|
|
||||||
|
# Inspired by 'https://prometheus.io/docs/operating/configuration/#<relabel_config>'
|
||||||
|
# "__msg_topic__" and "__value__" are populated with msg topic and value. And "__topic__" is 'topic' from config.
|
||||||
|
# Supported actions are: 'replace', 'keep' and 'drop'
|
||||||
|
# All labels starting with "__" will be removed, and "__topic__" and "__value__" is copied into "topic" anv "value"
|
||||||
|
# after all label configs have been applied.
|
||||||
|
label_configs: # Optional
|
||||||
|
- source_labels: ['__msg_topic__'] # Required (when label_configs is present)
|
||||||
|
separator: '/' # Optional default ';'
|
||||||
|
regex: '(.*)' # Optional default '(.*)'
|
||||||
|
target_label: '__topic__' # Required (when label_configs is present and 'action' = 'replace')
|
||||||
|
replacement: '\1' # Optional default '\1'
|
||||||
|
action: 'replace' # Optional default 'replace'
|
61
exampleconf/mosquitto_sys_metrics.yaml
Normal file
61
exampleconf/mosquitto_sys_metrics.yaml
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# Config file for Mosquitto broker system metrics
|
||||||
|
|
||||||
|
# Metric definitions
|
||||||
|
metrics:
|
||||||
|
- name: 'mqtt_broker'
|
||||||
|
help: 'System events from broker'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/#'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
regex: '^(\d+([,.]\d*)?)$|^([,.]\d+)$'
|
||||||
|
action: 'keep'
|
||||||
|
|
||||||
|
- name: 'mqtt_broker_version'
|
||||||
|
help: 'Mosquitto version (static)'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/version'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
regex: '^\D+((?:\d+[\.]?)+)$'
|
||||||
|
target_label: 'version'
|
||||||
|
replacement: '\1'
|
||||||
|
action: 'replace'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
replacement: '1'
|
||||||
|
target_label: '__value__'
|
||||||
|
action: 'replace'
|
||||||
|
|
||||||
|
- name: 'mqtt_broker_changeset'
|
||||||
|
help: 'Mosquitto build changeset (static)'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/changeset'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
target_label: 'changeset'
|
||||||
|
action: 'replace'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
replacement: '1'
|
||||||
|
target_label: '__value__'
|
||||||
|
action: 'replace'
|
||||||
|
|
||||||
|
- name: 'mqtt_broker_timestamp'
|
||||||
|
help: 'Mosquitto build timestamp (static)'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/timestamp'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
target_label: 'timestamp'
|
||||||
|
action: 'replace'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
replacement: '1'
|
||||||
|
target_label: '__value__'
|
||||||
|
action: 'replace'
|
23
exampleconf/switchstate.yaml
Normal file
23
exampleconf/switchstate.yaml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# metric for a switch with the state on and off.
|
||||||
|
# states are case sensitive and must match exactly
|
||||||
|
# use label_config to rewrite other values, see below.
|
||||||
|
metrics:
|
||||||
|
- name: "fhem_light_state"
|
||||||
|
help: "Light state on/off"
|
||||||
|
type: "enum"
|
||||||
|
topic: "fhem/+/+/light"
|
||||||
|
parameters:
|
||||||
|
states:
|
||||||
|
- 'on'
|
||||||
|
- 'off'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__value__'] # replace uppercase ON and 0 with on
|
||||||
|
regex: "(ON|0)"
|
||||||
|
target_label: '__value__'
|
||||||
|
replacement: 'on'
|
||||||
|
action: "replace"
|
||||||
|
- source_labels: ['__value__'] # replace uppercase OFF und 1 with off
|
||||||
|
regex: "(OFF|1)"
|
||||||
|
target_label: '__value__'
|
||||||
|
replacement: 'off'
|
||||||
|
action: "replace"
|
5
requirements-dev.txt
Normal file
5
requirements-dev.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
paho-mqtt
|
||||||
|
prometheus-client
|
||||||
|
PyYAML
|
||||||
|
yamlreader
|
||||||
|
pytest
|
5
requirements-frozen.txt
Normal file
5
requirements-frozen.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
paho-mqtt==1.5.1
|
||||||
|
prometheus-client==0.11.0
|
||||||
|
PyYAML==5.4.1
|
||||||
|
six==1.16.0
|
||||||
|
yamlreader==3.0.4
|
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
paho-mqtt
|
||||||
|
prometheus-client
|
||||||
|
PyYAML
|
||||||
|
yamlreader
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
74
tests/readme.md
Normal file
74
tests/readme.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Tests with pytest
|
||||||
|
|
||||||
|
make sure pytest is installed `pip install -r requirements-dev.txt`
|
||||||
|
run `pytest -s -o log_cli=true -o log_cli_level="DEBUG"` from the repository root
|
||||||
|
|
||||||
|
## test_mqtt_explorer.py:test_update_metrics
|
||||||
|
|
||||||
|
This test loads test mqtt data from a file and feeds it into mqtt_exporter and check if expected results are recorded in the prometheus client.
|
||||||
|
|
||||||
|
### directory structure
|
||||||
|
Test data is loaded from following directory structure:
|
||||||
|
|
||||||
|
```
|
||||||
|
./tests/
|
||||||
|
./test_data/
|
||||||
|
./test1/
|
||||||
|
conf.yaml
|
||||||
|
mqtt_msg.csv
|
||||||
|
./test2/
|
||||||
|
conf.yaml
|
||||||
|
mqtt_msg.csv
|
||||||
|
./test_xyz/
|
||||||
|
conf.yaml
|
||||||
|
mqtt_msg.csv
|
||||||
|
./tmp_data
|
||||||
|
[metric_test...##,txt]
|
||||||
|
```
|
||||||
|
|
||||||
|
In `test-data` each subfolder (e.g. `test1`, `test_bla`) contains a separate set of test data. There is no naming convention for folders. The could be descriptive like `test_for_issue1234`. Avoid any special characters and white spaces.
|
||||||
|
|
||||||
|
Files:
|
||||||
|
- `conf.yaml`: a config like a config for mqtt_exporter itself, but only the `metrics` part and a new optional attribute `timescale` is read from it.
|
||||||
|
- `mqtt_msg.csv`: fake mqtt data, format description see below.
|
||||||
|
|
||||||
|
`tmp-data` contains prometheus scrape output from after each processed mqtt msg data. This folder will be cleaned before each test run.
|
||||||
|
|
||||||
|
### mqtt_msg.csv file format
|
||||||
|
|
||||||
|
`mqtt_msg.csv` is a CSV file with `;` as delimiter and `'` as quotation character.
|
||||||
|
Following Column looks are expected:
|
||||||
|
|
||||||
|
```
|
||||||
|
in_topic;in_payload;out_name;out_labels;out_value;delay;assert
|
||||||
|
```
|
||||||
|
- `in_topic`: topic as from mqtt server
|
||||||
|
- `in_payload`: payload from mqtt server as string (will be converted byte array)
|
||||||
|
- `out_name`: metric name without any suffix like `total`, `sum`, `bucket`, ...
|
||||||
|
- `out_labels`: labels notes as a JSON string including the topic.
|
||||||
|
- `out_value`: expected value for simple metrics like gauge it is a number. For other metrics is is a JSON string with expected values per suffix e.g. `{"_count": 10, "_sum": 85.55, "_bucket": 10}`
|
||||||
|
- `delay`: seconds delay until the next mqtt msg is processed. The `timescale` config attribute speed up/slow down the delay. A time scale of 0 means no default, a timescale of 1 means realtime. Default timescale = 0
|
||||||
|
- `assert`: `True/False`. Specify if the test should pass or not. In most cases this should be `True`
|
||||||
|
|
||||||
|
Metric type `Histogram` special handling here as it will log a `$(metric_name)_bucket` metric for each bucket with a reserved label `le` in the meaning of _less or equal_. Specify `le` for one bucket and set the expected count to the `bucket` attribute in the `out_value` JSON. See examples in `test1`.
|
||||||
|
|
||||||
|
For sample data see existing tests above.
|
||||||
|
|
||||||
|
### Gather test data from live environment
|
||||||
|
|
||||||
|
If logging level is set to `debug` the log will contain some lines that should be already correct formatted to be placed in a `mqtt_msq.csv`.
|
||||||
|
|
||||||
|
they look like this:
|
||||||
|
```
|
||||||
|
2021-08-08 22:24:36,996 DEBUG: TEST_DATA: fhem/Terrasse/TermPearl02/humidity; 21.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True
|
||||||
|
2021-08-08 22:24:30,601 DEBUG: TEST_DATA: fhem/Terrasse/TerrasseWeiss/humidity; 20.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True
|
||||||
|
2021-08-08 22:24:27,097 DEBUG: TEST_DATA: fhem/Garten/TermFetanten01/temperature; 16.7; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TerrasseWeiss/temperature"}; 16.0; 0; True
|
||||||
|
2021-08-08 22:23:58,831 DEBUG: TEST_DATA: fhem/paz/TermPearl01/humidity; 17.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True
|
||||||
|
```
|
||||||
|
Tips:
|
||||||
|
- remove `.* DEBUG: TEST_DATA: `.
|
||||||
|
- Make sure the `mqtt_msg.csv` contains as first line the headers given above.
|
||||||
|
- The captured data won't fit if the `payload/__value__` has been replaced by a label_config. Please set `in_payload` to the correct value manually. An example for this exception is the `- name: 'mqtt_broker_version'` metric from the example configurations.
|
||||||
|
- put the data in a new subfolder in the `test_data` dir. Copy also the config file from the live environment to this folder (you should remove the `mqtt` part from it. Make also sure the recorded data don't contain sensitive data).
|
||||||
|
- Create a PR and share the test data, as this will allow all developers to verify code changes.
|
||||||
|
|
76
tests/test_data/test1/conf.yaml
Normal file
76
tests/test_data/test1/conf.yaml
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
|
||||||
|
# Logging
|
||||||
|
logging:
|
||||||
|
# logfile: 'conf/mqttexperter.log' # Optional default '' (stdout)
|
||||||
|
level: 'debug' # Optional default 'info'
|
||||||
|
|
||||||
|
timescale: 0
|
||||||
|
|
||||||
|
# Metric definitions
|
||||||
|
metrics:
|
||||||
|
- name: 'ftp_transferred_bytes'
|
||||||
|
help: 'data transferred in bytes pe file'
|
||||||
|
type: 'summary'
|
||||||
|
topic: 'ftp/+/transferred'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "ftp/([^/]+).*"
|
||||||
|
target_label: "file"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: 'network_ping_ms'
|
||||||
|
help: 'ping response in ms'
|
||||||
|
type: 'histogram'
|
||||||
|
topic: 'network/+/+/ping'
|
||||||
|
buckets: '0.5,5,10'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "network/([^/]+).*"
|
||||||
|
target_label: "network"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "network/[^/]+/([^/]+).*"
|
||||||
|
target_label: "server"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: "fhem_temperature_celsius"
|
||||||
|
help: "443 Mhz Sensors, Temperature in C"
|
||||||
|
type: "gauge"
|
||||||
|
topic: "fhem/+/+/temperature"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: "fhem_humidity_percent"
|
||||||
|
help: "443 Mhz Sensors, Humidity in %"
|
||||||
|
type: "gauge"
|
||||||
|
topic: "fhem/+/+/humidity"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: "fhem_rain_mm"
|
||||||
|
help: "443 Mhz Sensors, rain in mm/m2"
|
||||||
|
type: "counter"
|
||||||
|
topic: "fhem/+/+/rain_total"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
26
tests/test_data/test1/mqtt_msg.csv
Normal file
26
tests/test_data/test1/mqtt_msg.csv
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
in_topic;in_payload;out_name;out_labels;out_value;delay;assert
|
||||||
|
fhem/Terrasse/TermPearl02/temperature;18;fhem_temperature_celsius;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/temperature"};18;1;True
|
||||||
|
fhem/Terrasse/TermPearl02/humidity;21;fhem_humidity_percent;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/humidity"};21;2;True
|
||||||
|
fhem/Garten/TermFetanten01/humidity;79;fhem_humidity_percent;{"location": "Garten","topic": "fhem/Garten/TermFetanten01/humidity"};79;2;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};134.8;4;True
|
||||||
|
fhem/Terrasse/TermPearl02/temperature;22;fhem_temperature_celsius;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/temperature"};22;5;True
|
||||||
|
fhem/Terrasse/TermPearl02/humidity;24.3;fhem_humidity_percent;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/humidity"};24.3;1;True
|
||||||
|
fhem/Garten/TermFetanten01/humidity;79;fhem_humidity_percent;{"location": "Garten","topic": "fhem/Garten/TermFetanten01/humidity"};79;2;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;11.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};145.9;3;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;10;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};155.9;5;True
|
||||||
|
network/vlan11/srv01.local/ping;2;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 1, "_sum": 2, "_bucket": 1};2;True
|
||||||
|
network/vlan11/srv01.local/ping;4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 2, "_sum": 6, "_bucket": 2};6;True
|
||||||
|
network/vlan11/srv01.local/ping;7;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 3, "_sum": 13, "_bucket": 3};1;True
|
||||||
|
network/vlan11/srv01.local/ping;0.4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 4, "_sum": 13.4, "_bucket": 1};4;True
|
||||||
|
network/vlan11/srv01.local/ping;20;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 5, "_sum": 33.4, "_bucket": 5};5;True
|
||||||
|
network/vlan11/srv01.local/ping;11.1;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 6, "_sum": 44.5, "_bucket": 6};2;True
|
||||||
|
network/vlan11/srv01.local/ping;5;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 7, "_sum": 49.5, "_bucket": 4};4;True
|
||||||
|
network/vlan11/srv01.local/ping;6;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 8, "_sum": 55.5, "_bucket": 6};1;True
|
||||||
|
network/vlan11/srv01.local/ping;0.05;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 9, "_sum": 55.55, "_bucket": 2};4;True
|
||||||
|
network/vlan11/srv01.local/ping;30;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 10, "_sum": 85.55, "_bucket": 10};5;True
|
||||||
|
ftp/update.bin/transferred;123;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 1, "_sum": 123};1;True
|
||||||
|
ftp/update.bin/transferred;234;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 2, "_sum": 357};1;True
|
||||||
|
ftp/update.bin/transferred;34;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 3, "_sum": 391};1;True
|
||||||
|
ftp/update.bin/transferred;45;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 4, "_sum": 436};1;True
|
||||||
|
ftp/update.bin/transferred;89;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 5, "_sum": 525};1;True
|
||||||
|
ftp/update.bin/transferred;11111;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 6, "_sum": 11636};1;True
|
Can't render this file because it contains an unexpected character in line 2 and column 68.
|
99
tests/test_data/test2/conf.yaml
Normal file
99
tests/test_data/test2/conf.yaml
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# Config file for MQTT prometheus exporter
|
||||||
|
|
||||||
|
|
||||||
|
# Metric definitions
|
||||||
|
metrics:
|
||||||
|
# - name: 'mqtt_broker'
|
||||||
|
# help: 'System events from broker'
|
||||||
|
# type: 'gauge'
|
||||||
|
# topic: '$SYS/broker/#'
|
||||||
|
# label_configs:
|
||||||
|
# - source_labels: ['__msg_topic__']
|
||||||
|
# target_label: '__topic__'
|
||||||
|
# - source_labels: ['__value__']
|
||||||
|
# regex: '^(\d+([,.]\d*)?)$|^([,.]\d+)$'
|
||||||
|
# action: 'keep'
|
||||||
|
|
||||||
|
- name: 'mqtt_broker_version'
|
||||||
|
help: 'Mosquitto version (static)'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/version'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
regex: '^\D+((?:\d+[\.]?)+)$'
|
||||||
|
target_label: 'version'
|
||||||
|
replacement: '\1'
|
||||||
|
action: 'replace'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
replacement: '1'
|
||||||
|
target_label: '__value__'
|
||||||
|
action: 'replace'
|
||||||
|
|
||||||
|
- name: 'mqtt_broker_changeset'
|
||||||
|
help: 'Mosquitto build changeset (static)'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/changeset'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
target_label: 'changeset'
|
||||||
|
action: 'replace'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
replacement: '1'
|
||||||
|
target_label: '__value__'
|
||||||
|
action: 'replace'
|
||||||
|
|
||||||
|
- name: 'mqtt_broker_timestamp'
|
||||||
|
help: 'Mosquitto build timestamp (static)'
|
||||||
|
type: 'gauge'
|
||||||
|
topic: '$SYS/broker/timestamp'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
target_label: 'timestamp'
|
||||||
|
action: 'replace'
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
replacement: '1'
|
||||||
|
target_label: '__value__'
|
||||||
|
action: 'replace'
|
||||||
|
|
||||||
|
- name: "fhem_temperature_celsius"
|
||||||
|
help: "443 Mhz Sensors, Temperature in C"
|
||||||
|
type: "gauge"
|
||||||
|
topic: "fhem/+/+/temperature"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: "fhem_humidity_percent"
|
||||||
|
help: "443 Mhz Sensors, Humidity in %"
|
||||||
|
type: "gauge"
|
||||||
|
topic: "fhem/+/+/humidity"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: "fhem_rain_mm"
|
||||||
|
help: "443 Mhz Sensors, rain in mm/m2"
|
||||||
|
type: "counter"
|
||||||
|
topic: "fhem/+/+/rain_total"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
15
tests/test_data/test2/mqtt_msg.csv
Normal file
15
tests/test_data/test2/mqtt_msg.csv
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
in_topic;in_payload;out_name;out_labels;out_value;delay;assert
|
||||||
|
$SYS/broker/version;' 2.0.11'; mqtt_broker_version; {"topic": "$SYS/broker/version", "version": "2.0.11"}; 1.0; 0; True
|
||||||
|
fhem/Terrasse/TerrasseWeiss/humidity; 20.0; fhem_humidity_percent; {"location": "Terrasse", "topic": "fhem/Terrasse/TerrasseWeiss/humidity"}; 20.0; 0; True
|
||||||
|
fhem/Terrasse/TermPearl02/temperature; 17.5; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/temperature"}; 17.5; 0; True
|
||||||
|
fhem/Terrasse/TermPearl02/humidity; 20.0; fhem_humidity_percent; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/humidity"}; 20.0; 0; True
|
||||||
|
fhem/Terrasse/TermPearl02/humidity; 21.0; fhem_humidity_percent; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/humidity"}; 21.0; 0; True
|
||||||
|
fhem/Terrasse/TermPearl02/temperature; 17.6; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/temperature"}; 17.6; 0; True
|
||||||
|
fhem/Garten/TermFetanten01/humidity; 66.0; fhem_humidity_percent; {"location": "Garten", "topic": "fhem/Garten/TermFetanten01/humidity"}; 66.0; 0; True
|
||||||
|
fhem/Terrasse/TermPearl02/temperature; 17.5; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/temperature"}; 17.5; 0; True
|
||||||
|
$SYS/broker/version;' 2.0.11'; mqtt_broker_version; {"topic": "$SYS/broker/version", "version": "2.0.11"}; 1.0; 0; True
|
||||||
|
fhem/Garten/rainmeter01/rain_total; 106.426; fhem_rain_mm; {"location": "Garten", "topic": "fhem/Garten/rainmeter01/rain_total"}; {"_total": 106.426, "_created": 1628459492.695393}; 0; True
|
||||||
|
fhem/Garten/TermFetanten01/humidity; 65.0; fhem_humidity_percent; {"location": "Garten", "topic": "fhem/Garten/TermFetanten01/humidity"}; 65.0; 0; True
|
||||||
|
fhem/paz/TermPearl01/temperature; 24.3; fhem_temperature_celsius; {"location": "paz", "topic": "fhem/paz/TermPearl01/temperature"}; 24.3; 0; True
|
||||||
|
fhem/paz/TermPearl01/humidity; 17.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True
|
||||||
|
fhem/Terrasse/TerrasseWeiss/humidity; 20.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True
|
Can't render this file because it contains an unexpected character in line 2 and column 54.
|
22
tests/test_data/test_counter_absolute/conf.yaml
Normal file
22
tests/test_data/test_counter_absolute/conf.yaml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
|
||||||
|
# Logging
|
||||||
|
logging:
|
||||||
|
# logfile: 'conf/mqttexperter.log' # Optional default '' (stdout)
|
||||||
|
level: 'debug' # Optional default 'info'
|
||||||
|
|
||||||
|
timescale: 0
|
||||||
|
|
||||||
|
# Metric definitions
|
||||||
|
metrics:
|
||||||
|
- name: "fhem_rain_mm"
|
||||||
|
help: "443 Mhz Sensors, rain in mm/m2"
|
||||||
|
type: "counter_absolute"
|
||||||
|
topic: "fhem/+/+/rain_total"
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
13
tests/test_data/test_counter_absolute/mqtt_msg.csv
Normal file
13
tests/test_data/test_counter_absolute/mqtt_msg.csv
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
in_topic;in_payload;out_name;out_labels;out_value;delay;assert
|
||||||
|
fhem/Garten/rainmeter01/rain_total;4.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};4.8;4;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;11.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};11.1;3;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;110;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};110;5;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};134.8;4;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;211.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};211.1;3;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;155.9;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};155.9;5;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;2134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};2134.8;4;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;11.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};11.1;3;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;155.9;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};155.9;5;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;23134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};23134.8;4;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;1233123.123123123123;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};1233123.123123123123;3;True
|
||||||
|
fhem/Garten/rainmeter01/rain_total;1233123.123123123124;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};1233123.123123123124;5;True
|
Can't render this file because it contains an unexpected character in line 2 and column 54.
|
58
tests/test_data/test_enum/conf.yaml
Normal file
58
tests/test_data/test_enum/conf.yaml
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
|
||||||
|
# Logging
|
||||||
|
logging:
|
||||||
|
# logfile: 'conf/mqttexperter.log' # Optional default '' (stdout)
|
||||||
|
level: 'debug' # Optional default 'info'
|
||||||
|
|
||||||
|
timescale: 0
|
||||||
|
|
||||||
|
# Metric definitions
|
||||||
|
metrics:
|
||||||
|
- name: "fhem_light_state"
|
||||||
|
help: "Light state on/off"
|
||||||
|
type: "enum"
|
||||||
|
topic: "fhem/+/+/light"
|
||||||
|
parameters:
|
||||||
|
states:
|
||||||
|
- 'on'
|
||||||
|
- 'off'
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
regex: "(ON|0)"
|
||||||
|
target_label: '__value__'
|
||||||
|
replacement: 'on'
|
||||||
|
action: "replace"
|
||||||
|
- source_labels: ['__value__']
|
||||||
|
regex: "(OFF|1)"
|
||||||
|
target_label: '__value__'
|
||||||
|
replacement: 'off'
|
||||||
|
action: "replace"
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "fhem/([^/]+).*"
|
||||||
|
target_label: "location"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- name: 'network_ping_ms'
|
||||||
|
help: 'ping response in ms'
|
||||||
|
type: 'histogram'
|
||||||
|
topic: 'network/+/+/ping'
|
||||||
|
parameters:
|
||||||
|
buckets:
|
||||||
|
- 0.5
|
||||||
|
- 5
|
||||||
|
- 10
|
||||||
|
label_configs:
|
||||||
|
- source_labels: ['__msg_topic__']
|
||||||
|
target_label: '__topic__'
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "network/([^/]+).*"
|
||||||
|
target_label: "network"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
||||||
|
- source_labels: ["__msg_topic__"]
|
||||||
|
regex: "network/[^/]+/([^/]+).*"
|
||||||
|
target_label: "server"
|
||||||
|
replacement: '\1'
|
||||||
|
action: "replace"
|
23
tests/test_data/test_enum/mqtt_msg.csv
Normal file
23
tests/test_data/test_enum/mqtt_msg.csv
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
in_topic;in_payload;out_name;out_labels;out_value;delay;assert
|
||||||
|
fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;4;True
|
||||||
|
fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;3;True
|
||||||
|
fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;5;True
|
||||||
|
fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;4;True
|
||||||
|
fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;3;True
|
||||||
|
fhem/room01/desk/light01;ON;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;5;True
|
||||||
|
fhem/room01/desk/light01;OFF;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;4;True
|
||||||
|
fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;3;True
|
||||||
|
fhem/room01/desk/light01;1;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;5;True
|
||||||
|
fhem/room01/desk/light01;0;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;4;True
|
||||||
|
fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;3;True
|
||||||
|
fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;5;True
|
||||||
|
network/vlan11/srv01.local/ping;2;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 1, "_sum": 2, "_bucket": 1};2;True
|
||||||
|
network/vlan11/srv01.local/ping;4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 2, "_sum": 6, "_bucket": 2};6;True
|
||||||
|
network/vlan11/srv01.local/ping;7;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 3, "_sum": 13, "_bucket": 3};1;True
|
||||||
|
network/vlan11/srv01.local/ping;0.4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 4, "_sum": 13.4, "_bucket": 1};4;True
|
||||||
|
network/vlan11/srv01.local/ping;20;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 5, "_sum": 33.4, "_bucket": 5};5;True
|
||||||
|
network/vlan11/srv01.local/ping;11.1;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 6, "_sum": 44.5, "_bucket": 6};2;True
|
||||||
|
network/vlan11/srv01.local/ping;5;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 7, "_sum": 49.5, "_bucket": 4};4;True
|
||||||
|
network/vlan11/srv01.local/ping;6;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 8, "_sum": 55.5, "_bucket": 6};1;True
|
||||||
|
network/vlan11/srv01.local/ping;0.05;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 9, "_sum": 55.55, "_bucket": 2};4;True
|
||||||
|
network/vlan11/srv01.local/ping;30;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 10, "_sum": 85.55, "_bucket": 10};5;True
|
Can't render this file because it contains an unexpected character in line 2 and column 47.
|
178
tests/test_mqtt_exporter.py
Normal file
178
tests/test_mqtt_exporter.py
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
"""
|
||||||
|
py test testing for mqtt_exporter
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import distutils.util
|
||||||
|
import json
|
||||||
|
from json.decoder import JSONDecodeError
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import prometheus_client as prometheus
|
||||||
|
import prometheus_client.registry
|
||||||
|
import mqtt_exporter
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
|
TMP_DIR=os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'tmp_data'
|
||||||
|
)
|
||||||
|
DATA_DIR=os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'test_data'
|
||||||
|
)
|
||||||
|
|
||||||
|
def setup_module(module): #pylint: disable=unused-argument
|
||||||
|
""" setup any state specific to the execution of the given module."""
|
||||||
|
delete_temp_test_files()
|
||||||
|
|
||||||
|
|
||||||
|
def delete_temp_test_files():
|
||||||
|
# delete TEMP files
|
||||||
|
for file in os.listdir(TMP_DIR):
|
||||||
|
if file == '.gitkeep':
|
||||||
|
continue
|
||||||
|
os.remove(os.path.join(TMP_DIR, file))
|
||||||
|
|
||||||
|
|
||||||
|
class MqttCVS:
|
||||||
|
in_topic = "in_topic"
|
||||||
|
in_payload = "in_payload"
|
||||||
|
out_name = "out_name"
|
||||||
|
out_labels = "out_labels"
|
||||||
|
out_value = "out_value"
|
||||||
|
delay = "delay"
|
||||||
|
expected_assert = "assert"
|
||||||
|
|
||||||
|
def _get_mqtt_data(file_name):
|
||||||
|
"""
|
||||||
|
Reads mqtt fake data and expected results from file
|
||||||
|
"""
|
||||||
|
mqtt_data = []
|
||||||
|
with open(file_name, newline='') as mqtt_data_csv:
|
||||||
|
csv_reader = csv.DictReader(mqtt_data_csv, quotechar="'", delimiter=';')
|
||||||
|
for row in csv_reader:
|
||||||
|
row[MqttCVS.in_topic] = row[MqttCVS.in_topic].strip()
|
||||||
|
row[MqttCVS.out_name] = row[MqttCVS.out_name].strip()
|
||||||
|
# covert payloud to bytes, as in a MQTT Message
|
||||||
|
row[MqttCVS.in_payload] = row[MqttCVS.in_payload].encode('UTF-8')
|
||||||
|
# parse labels, to a python object.
|
||||||
|
try:
|
||||||
|
row[MqttCVS.out_labels] = json.loads(row.get(MqttCVS.out_labels, '{}'))
|
||||||
|
except json.decoder.JSONDecodeError as jde:
|
||||||
|
logging.error(f"json.decoder.JSONDecodeError while decoding {row.get(MqttCVS.out_labels, '{}')}")
|
||||||
|
raise jde
|
||||||
|
# Value could be a JSON, a float or anthing else.
|
||||||
|
try:
|
||||||
|
row[MqttCVS.out_value] = float(row.get(MqttCVS.out_value))
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
row[MqttCVS.out_value] = json.loads(row.get(MqttCVS.out_value))
|
||||||
|
except (JSONDecodeError, TypeError):
|
||||||
|
pass # leave as it is
|
||||||
|
# set delay to 0 if not a number
|
||||||
|
try:
|
||||||
|
row[MqttCVS.delay] = float(row.get(MqttCVS.delay, 0))
|
||||||
|
except ValueError:
|
||||||
|
row[MqttCVS.delay] = 0
|
||||||
|
# convert string to bool for expected assertion.
|
||||||
|
row[MqttCVS.expected_assert] = bool(
|
||||||
|
distutils.util.strtobool(row.get(MqttCVS.expected_assert, "True").strip()))
|
||||||
|
mqtt_data.append(row)
|
||||||
|
return mqtt_data
|
||||||
|
|
||||||
|
|
||||||
|
def _get_test_data():
|
||||||
|
"""
|
||||||
|
Reads test data from DATA_DIR sub directories.
|
||||||
|
Each subdirectory is expected to contain a `conf.yaml` file with a metrics config (like in the config file)
|
||||||
|
and a CSV file `mqtt_msg.csv` with fake mqtt data ";" delimited:
|
||||||
|
`in_topic;in_payload;out_name;out_labels;out_value;delay;assert`
|
||||||
|
where
|
||||||
|
lables_out: json string with all expected lables
|
||||||
|
delay: delay until the next line is processed
|
||||||
|
assert: expected assert result, True if out_value matches prometheus metric
|
||||||
|
"""
|
||||||
|
test_data_sets = []
|
||||||
|
test_data_dirs = [f.path for f in os.scandir(DATA_DIR) if f.is_dir()]
|
||||||
|
test_names = [ os.path.basename(os.path.normpath(name)) for name in test_data_dirs]
|
||||||
|
for test_data_dir in test_data_dirs:
|
||||||
|
conf_file = os.path.join(test_data_dir, 'conf.yaml')
|
||||||
|
mqtt_data_file = os.path.join(test_data_dir, 'mqtt_msg.csv')
|
||||||
|
if not os.path.isfile(conf_file) or not os.path.isfile(mqtt_data_file):
|
||||||
|
logging.error(f"Test data dir {test_data_dir} doesn't contain required files, skipping")
|
||||||
|
continue
|
||||||
|
config_yaml = mqtt_exporter._read_config(conf_file)
|
||||||
|
config_yaml = mqtt_exporter._parse_config_and_add_defaults(config_yaml)
|
||||||
|
test_data_sets.append((
|
||||||
|
config_yaml['metrics'],
|
||||||
|
_get_mqtt_data(mqtt_data_file),
|
||||||
|
config_yaml.get('timescale', 0),
|
||||||
|
))
|
||||||
|
return test_names, test_data_sets
|
||||||
|
|
||||||
|
def _get_suffixes_by_metric_name(metrics, metric_name):
|
||||||
|
metric_type = None
|
||||||
|
for _, outer_metric in metrics.items():
|
||||||
|
for metric in outer_metric:
|
||||||
|
if metric['name'] == metric_name:
|
||||||
|
metric_type = metric['type']
|
||||||
|
break
|
||||||
|
|
||||||
|
for suffix in mqtt_exporter.SUFFIXES_PER_TYPE[metric_type]:
|
||||||
|
if len(suffix) == 0:
|
||||||
|
yield suffix
|
||||||
|
else:
|
||||||
|
yield f"_{suffix}"
|
||||||
|
|
||||||
|
|
||||||
|
class FakeMSG():
|
||||||
|
""""Simulate MQTT Msg"""
|
||||||
|
def __init__(self, topic, payload) -> None:
|
||||||
|
self.topic = topic
|
||||||
|
self.payload = payload
|
||||||
|
|
||||||
|
|
||||||
|
param_test_data_dirs, param_test_data_sets = _get_test_data()
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("metrics,mqtt_data_set,timescale", param_test_data_sets, ids=param_test_data_dirs)
|
||||||
|
def test_update_metrics(caplog, request, metrics, mqtt_data_set, timescale):
|
||||||
|
"""
|
||||||
|
reads a label_config and some mqtt data and asserts if they are in the metrics
|
||||||
|
"""
|
||||||
|
logging.info(f"Start test_update_metrics with ID {request.node.callspec.id}")
|
||||||
|
|
||||||
|
# reset prometheus registry between tests
|
||||||
|
collectors = list(prometheus.REGISTRY._collector_to_names.keys())
|
||||||
|
for collector in collectors:
|
||||||
|
prometheus.REGISTRY.unregister(collector)
|
||||||
|
|
||||||
|
i = 1
|
||||||
|
for mqtt_data in mqtt_data_set:
|
||||||
|
msg = FakeMSG(mqtt_data[MqttCVS.in_topic], mqtt_data[MqttCVS.in_payload])
|
||||||
|
mqtt_exporter._on_message(None, metrics, msg)
|
||||||
|
prometheus.REGISTRY.collect()
|
||||||
|
prometheus.write_to_textfile(os.path.join(TMP_DIR, f"metric_{request.node.callspec.id}_{i:02}.txt"), prometheus.REGISTRY)
|
||||||
|
# depending on metric type one or more metrics with different suffixes are added.
|
||||||
|
for suffix in _get_suffixes_by_metric_name(metrics, mqtt_data[MqttCVS.out_name]):
|
||||||
|
# historgram with buckets need special handling, remove bucket labe label 'le'
|
||||||
|
labels = mqtt_data[MqttCVS.out_labels].copy()
|
||||||
|
if not suffix == "_bucket" and labels.get('le'):
|
||||||
|
labels.pop('le')
|
||||||
|
|
||||||
|
expected_result = mqtt_data[MqttCVS.out_value]
|
||||||
|
expected_result = expected_result if not isinstance(expected_result, dict) else expected_result[suffix]
|
||||||
|
logging.info(f"Assert {mqtt_data[MqttCVS.out_name]}{suffix} from testdata record {i}")
|
||||||
|
assert ( prometheus.REGISTRY.get_sample_value(
|
||||||
|
f"{mqtt_data[MqttCVS.out_name]}{suffix}",
|
||||||
|
labels
|
||||||
|
) == expected_result ) == mqtt_data[MqttCVS.expected_assert]
|
||||||
|
time.sleep(mqtt_data[MqttCVS.delay] * timescale)
|
||||||
|
i += 1
|
||||||
|
for record in caplog.records:
|
||||||
|
assert record.levelno < logging.ERROR
|
72
tests/test_prometheus_additions.py
Normal file
72
tests/test_prometheus_additions.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""
|
||||||
|
Pytest for prometheus client enhancements
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import pytest
|
||||||
|
from utils.prometheus_additions import CounterAbsolute
|
||||||
|
import prometheus_client as prometheus
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
|
TMP_DIR=os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'tmp_data'
|
||||||
|
)
|
||||||
|
DATA_DIR=os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'test_data'
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def get_registry():
|
||||||
|
yield prometheus.REGISTRY
|
||||||
|
# reset prometheus registry between tests
|
||||||
|
collectors = list(prometheus.REGISTRY._collector_to_names.keys())
|
||||||
|
for collector in collectors:
|
||||||
|
prometheus.REGISTRY.unregister(collector)
|
||||||
|
|
||||||
|
old_creation_time = 0.0
|
||||||
|
|
||||||
|
class TestCounterWithReset:
|
||||||
|
a_counter_absolute = CounterAbsolute('Absolute_Counter', 'Test metric' )
|
||||||
|
old_creation_time = 0.0
|
||||||
|
param_test_data_sets = [
|
||||||
|
(10, False),
|
||||||
|
(10, True),
|
||||||
|
(11, True),
|
||||||
|
(110, True),
|
||||||
|
(110, True),
|
||||||
|
(210, True),
|
||||||
|
(310.7, True),
|
||||||
|
(110, False),
|
||||||
|
(210, True),
|
||||||
|
(310.7, True),
|
||||||
|
]
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("value, same_creation_time", param_test_data_sets)
|
||||||
|
def test_counter_absolute(self, request, get_registry, value, same_creation_time):
|
||||||
|
global old_creation_time
|
||||||
|
self.a_counter_absolute.set(value)
|
||||||
|
creation_time = self.a_counter_absolute._created
|
||||||
|
logging.info(f"Creation time: {creation_time:e}")
|
||||||
|
registry = get_registry
|
||||||
|
registry.collect()
|
||||||
|
prometheus.write_to_textfile(os.path.join(TMP_DIR, f"absolute_counter_{request.node.callspec.id}_{value:05}.txt"), prometheus.REGISTRY)
|
||||||
|
|
||||||
|
assert self.a_counter_absolute._value.get() == value
|
||||||
|
assert (creation_time == old_creation_time ) == same_creation_time
|
||||||
|
old_creation_time = creation_time
|
||||||
|
time.sleep(0.005)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCounterRestForbidden:
|
||||||
|
a_counter_absolute = CounterAbsolute('Strict_Absolute_Counter', "This Counters doesn't allow reset")
|
||||||
|
|
||||||
|
def test_counter_reset(self):
|
||||||
|
val_first = 0.3324234
|
||||||
|
val_second = 0.3324233
|
||||||
|
self.a_counter_absolute.set(val_first, fail_on_decrease=True)
|
||||||
|
with pytest.raises(ValueError, match=rf"Counter must increase {val_second} lower {val_first}"):
|
||||||
|
self.a_counter_absolute.set(val_second, fail_on_decrease=True)
|
0
tests/tmp_data/.gitkeep
Normal file
0
tests/tmp_data/.gitkeep
Normal file
0
utils/__init__.py
Normal file
0
utils/__init__.py
Normal file
BIN
utils/__pycache__/__init__.cpython-39.pyc
Normal file
BIN
utils/__pycache__/__init__.cpython-39.pyc
Normal file
Binary file not shown.
BIN
utils/__pycache__/prometheus_additions.cpython-39.pyc
Normal file
BIN
utils/__pycache__/prometheus_additions.cpython-39.pyc
Normal file
Binary file not shown.
40
utils/prometheus_additions.py
Normal file
40
utils/prometheus_additions.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""
|
||||||
|
Additions and enhancements to the prometheus_client package
|
||||||
|
"""
|
||||||
|
|
||||||
|
import prometheus_client as prometheus
|
||||||
|
|
||||||
|
class CounterAbsolute(prometheus.Counter):
|
||||||
|
"""
|
||||||
|
CounterAbsolute allows to set the Counter by an absolute value like Gauge, but data is
|
||||||
|
handled properly if counter resets or over flows.
|
||||||
|
CounterAbsolute is typically used if values need to by proxied from another source e.g.
|
||||||
|
a network counter, SMTP or MQTT data which return increasing but absolute numbers
|
||||||
|
instead of a diff.
|
||||||
|
|
||||||
|
As Counter must not decrease, setting CounterAbsolute to a lower value is handled as follows:
|
||||||
|
A counter overflow or a reset is assumed and the create timestamp gets reset and internally
|
||||||
|
a new Value object created.
|
||||||
|
|
||||||
|
An example for a CounterAbsolute:
|
||||||
|
from prometheus_client import Counter
|
||||||
|
c = CounterAbsolute('my_failures_total', 'Description of counter')
|
||||||
|
c.set(1123.63213) # Set to an absolute value. If lower than last value, Counter gets reset.
|
||||||
|
|
||||||
|
"""
|
||||||
|
_type = 'counter'
|
||||||
|
|
||||||
|
|
||||||
|
def set(self, value, fail_on_decrease=False):
|
||||||
|
"""Increment counter to the given amount."""
|
||||||
|
self._raise_if_not_observable()
|
||||||
|
if value < 0:
|
||||||
|
raise ValueError('Counters can be a positive number only.')
|
||||||
|
if value >= self._value.get():
|
||||||
|
self._value.set(float(value))
|
||||||
|
else:
|
||||||
|
if fail_on_decrease:
|
||||||
|
raise ValueError(f"Counter must increase {value} lower {self._value.get()}")
|
||||||
|
else:
|
||||||
|
self._metric_init()
|
||||||
|
self._value.set(float(value))
|
4
version.py
Normal file
4
version.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
"""
|
||||||
|
Version of the whole project.
|
||||||
|
"""
|
||||||
|
__version__ = "1.0.0"
|
649
victron_mqtt_exporter.py
Executable file
649
victron_mqtt_exporter.py
Executable file
@@ -0,0 +1,649 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
'''
|
||||||
|
A purposely build Prometheus exporter for Victron Energy Cerbo GX MQTT devices
|
||||||
|
|
||||||
|
(C) 2022 M. Konstapel https://meezenest.nl/mees
|
||||||
|
|
||||||
|
Based on the General purpose Prometheus exporter for MQTT from Frederic Hemberger
|
||||||
|
(https://github.com/fhemberger/mqtt_exporter)
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2018 Bendik Wang Andreassen
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
'''
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
|
import json
|
||||||
|
from collections import defaultdict
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import operator
|
||||||
|
import time
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import paho.mqtt.client as mqtt
|
||||||
|
from paho.mqtt.properties import Properties
|
||||||
|
from paho.mqtt.packettypes import PacketTypes
|
||||||
|
import yaml
|
||||||
|
import prometheus_client as prometheus
|
||||||
|
from yamlreader import yaml_load
|
||||||
|
import utils.prometheus_additions
|
||||||
|
import version
|
||||||
|
|
||||||
|
VERSION = version.__version__
|
||||||
|
SUFFIXES_PER_TYPE = {
|
||||||
|
"gauge": [],
|
||||||
|
"counter": ['total'],
|
||||||
|
"counter_absolute": ['total'],
|
||||||
|
"summary": ['sum', 'count'],
|
||||||
|
"histogram": ['sum', 'count', 'bucket'],
|
||||||
|
"enum": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _read_config(config_path):
|
||||||
|
"""Read config file from given location, and parse properties"""
|
||||||
|
|
||||||
|
if config_path is not None:
|
||||||
|
if os.path.isfile(config_path):
|
||||||
|
logging.info(f'Config file found at: {config_path}')
|
||||||
|
try:
|
||||||
|
with open(config_path, 'r') as f:
|
||||||
|
return yaml.safe_load(f.read())
|
||||||
|
except yaml.YAMLError:
|
||||||
|
logging.exception('Failed to parse configuration file:')
|
||||||
|
|
||||||
|
elif os.path.isdir(config_path):
|
||||||
|
logging.info(
|
||||||
|
f'Config directory found at: {config_path}')
|
||||||
|
try:
|
||||||
|
return yaml_load(config_path)
|
||||||
|
except yaml.YAMLError:
|
||||||
|
logging.exception('Failed to parse configuration directory:')
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_config_and_add_defaults(config_from_file):
|
||||||
|
"""Parse content of configfile and add default values where needed"""
|
||||||
|
|
||||||
|
config = deepcopy(config_from_file)
|
||||||
|
logging.debug(f'_parse_config Config from file: {str(config_from_file)}')
|
||||||
|
# Logging values ('logging' is optional in config
|
||||||
|
if 'logging' in config_from_file:
|
||||||
|
config['logging'] = _add_config_and_defaults(
|
||||||
|
config_from_file['logging'], {'logfile': '', 'level': 'info'})
|
||||||
|
else:
|
||||||
|
config['logging'] = _add_config_and_defaults(
|
||||||
|
None, {'logfile': '', 'level': 'info'})
|
||||||
|
|
||||||
|
# MQTT values
|
||||||
|
if 'mqtt' in config_from_file:
|
||||||
|
config['mqtt'] = _add_config_and_defaults(
|
||||||
|
config_from_file['mqtt'], {'host': 'localhost', 'port': 1883})
|
||||||
|
else:
|
||||||
|
config['mqtt'] = _add_config_and_defaults(
|
||||||
|
None, {'host': 'localhost', 'port': 1883})
|
||||||
|
|
||||||
|
if 'auth' in config['mqtt']:
|
||||||
|
config['mqtt']['auth'] = _add_config_and_defaults(
|
||||||
|
config['mqtt']['auth'], {})
|
||||||
|
_validate_required_fields(config['mqtt']['auth'], 'auth', ['username'])
|
||||||
|
|
||||||
|
if 'tls' in config['mqtt']:
|
||||||
|
config['mqtt']['tls'] = _add_config_and_defaults(
|
||||||
|
config['mqtt']['tls'], {})
|
||||||
|
|
||||||
|
# Prometheus values
|
||||||
|
if 'prometheus' in config:
|
||||||
|
config['prometheus'] = _add_config_and_defaults(
|
||||||
|
config_from_file['prometheus'], {'exporter_port': 9344})
|
||||||
|
else:
|
||||||
|
config['prometheus'] = _add_config_and_defaults(
|
||||||
|
None, {'exporter_port': 9344})
|
||||||
|
|
||||||
|
metrics = {}
|
||||||
|
if not 'metrics' in config_from_file:
|
||||||
|
logging.critical('No metrics defined in config. Aborting.')
|
||||||
|
sys.exit(1)
|
||||||
|
for metric in config_from_file['metrics']:
|
||||||
|
parse_and_validate_metric_config(metric, metrics)
|
||||||
|
|
||||||
|
config['metrics'] = _group_by_topic(list(metrics.values()))
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def parse_and_validate_metric_config(metric, metrics):
|
||||||
|
m = _add_config_and_defaults(metric, {})
|
||||||
|
_validate_required_fields(m, None, ['name', 'help', 'type', 'topic'])
|
||||||
|
if 'label_configs' in m and m['label_configs']:
|
||||||
|
label_configs = []
|
||||||
|
for lc in m['label_configs']:
|
||||||
|
if lc:
|
||||||
|
lc = _add_config_and_defaults(lc, {'separator': ';', 'regex': '^(.*)$', 'replacement': '\\1',
|
||||||
|
'action': 'replace'})
|
||||||
|
if lc['action'] == 'replace':
|
||||||
|
_validate_required_fields(lc, None,
|
||||||
|
['source_labels', 'target_label', 'separator', 'regex', 'replacement',
|
||||||
|
'action'])
|
||||||
|
else:
|
||||||
|
_validate_required_fields(lc, None,
|
||||||
|
['source_labels', 'separator', 'regex', 'replacement',
|
||||||
|
'action'])
|
||||||
|
label_configs.append(lc)
|
||||||
|
m['label_configs'] = label_configs
|
||||||
|
# legacy config handling move 'buckets' to params directory
|
||||||
|
if m.get('buckets'):
|
||||||
|
m.setdefault('parameters', {})['buckets'] = (m['buckets'])
|
||||||
|
metrics[m['name']] = m
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_required_fields(config, parent, required_fields):
|
||||||
|
"""Fail if required_fields is not present in config"""
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in config or config[field] is None:
|
||||||
|
if parent is None:
|
||||||
|
error = f'\'{field}\' is a required field in configfile'
|
||||||
|
else:
|
||||||
|
error = f'\'{field}\' is a required parameter for field {parent} in configfile'
|
||||||
|
raise TypeError(error)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_config_and_defaults(config, defaults):
|
||||||
|
"""Return dict with values from config, if present, or values from defaults"""
|
||||||
|
if config is not None:
|
||||||
|
defaults.update(config)
|
||||||
|
return defaults.copy()
|
||||||
|
|
||||||
|
|
||||||
|
def _strip_config(config, allowed_keys):
|
||||||
|
return {k: v for k, v in config.items() if k in allowed_keys and v}
|
||||||
|
|
||||||
|
|
||||||
|
def _group_by_topic(metrics):
|
||||||
|
"""Group metrics by topic"""
|
||||||
|
t = defaultdict(list)
|
||||||
|
for metric in metrics:
|
||||||
|
t[metric['topic']].append(metric)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
def _topic_matches(topic1, topic2):
|
||||||
|
"""Check if wildcard-topics match"""
|
||||||
|
if topic1 == topic2:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If topic1 != topic2 and no wildcard is present in topic1, no need for regex
|
||||||
|
if '+' not in topic1 and '#' not in topic1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
logging.debug(
|
||||||
|
f'_topic_matches: Topic1: {topic1}, Topic2: {topic2}')
|
||||||
|
topic1 = re.escape(topic1)
|
||||||
|
regex = topic1.replace('/\\#', '.*$').replace('\\+', '[^/]+')
|
||||||
|
match = re.match(regex, topic2)
|
||||||
|
|
||||||
|
logging.debug(f'_topic_matches: Match: {match is not None}')
|
||||||
|
return match is not None
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection SpellCheckingInspection
|
||||||
|
def _log_setup(logging_config):
|
||||||
|
"""Setup application logging"""
|
||||||
|
|
||||||
|
logfile = logging_config['logfile']
|
||||||
|
|
||||||
|
log_level = logging_config['level']
|
||||||
|
|
||||||
|
numeric_level = logging.getLevelName(log_level.upper())
|
||||||
|
if not isinstance(numeric_level, int):
|
||||||
|
raise TypeError(f'Invalid log level: {log_level}')
|
||||||
|
|
||||||
|
if logfile != '':
|
||||||
|
logging.info(f"Logging redirected to: {logfile}")
|
||||||
|
# Need to replace the current handler on the root logger:
|
||||||
|
file_handler = logging.FileHandler(logfile, 'a')
|
||||||
|
formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
|
||||||
|
file_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
log = logging.getLogger() # root logger
|
||||||
|
for handler in log.handlers: # remove all old handlers
|
||||||
|
log.removeHandler(handler)
|
||||||
|
log.addHandler(file_handler)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s')
|
||||||
|
|
||||||
|
logging.getLogger().setLevel(numeric_level)
|
||||||
|
logging.info(f'log_level set to: {log_level}')
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyUnusedLocal
|
||||||
|
def _on_connect(client, userdata, flags, rc): # pylint: disable=unused-argument,invalid-name
|
||||||
|
"""The callback for when the client receives a CONNACK response from the server."""
|
||||||
|
logging.info(f'Connected to broker, result code {str(rc)}')
|
||||||
|
|
||||||
|
for topic in userdata.keys():
|
||||||
|
client.subscribe(topic)
|
||||||
|
logging.info(f'Subscribing to topic: {topic}')
|
||||||
|
|
||||||
|
|
||||||
|
def _label_config_match(label_config, labels):
|
||||||
|
"""Action 'keep' and 'drop' in label_config: Matches joined 'source_labels' to 'regex'"""
|
||||||
|
source = label_config['separator'].join(
|
||||||
|
[labels[x] for x in label_config['source_labels']])
|
||||||
|
logging.debug(f'_label_config_match source: {source}')
|
||||||
|
match = re.match(label_config['regex'], source)
|
||||||
|
|
||||||
|
if label_config['action'] == 'keep':
|
||||||
|
logging.debug(
|
||||||
|
f"_label_config_match Action: {label_config['action']}, Keep msg: {match is not None}")
|
||||||
|
return match is not None
|
||||||
|
if label_config['action'] == 'drop':
|
||||||
|
logging.debug(
|
||||||
|
f"_label_config_match Action: {label_config['action']}, Drop msg: {match is not None}")
|
||||||
|
return match is None
|
||||||
|
else:
|
||||||
|
logging.debug(
|
||||||
|
f"_label_config_match Action: {label_config['action']} is not supported, metric is dropped")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_label_config(labels, label_configs):
|
||||||
|
"""Create/change labels based on label_config in config file."""
|
||||||
|
|
||||||
|
for label_config in label_configs:
|
||||||
|
if label_config['action'] == 'replace':
|
||||||
|
_label_config_rename(label_config, labels)
|
||||||
|
else:
|
||||||
|
if not _label_config_match(label_config, labels):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _label_config_rename(label_config, labels):
|
||||||
|
"""Action 'rename' in label_config: Add/change value for label 'target_label'"""
|
||||||
|
source = label_config['separator'].join(
|
||||||
|
[labels[x] for x in label_config['source_labels']])
|
||||||
|
|
||||||
|
if re.match(re.compile(label_config['regex']), source):
|
||||||
|
logging.debug(f'_label_config_rename source: {source}')
|
||||||
|
result = re.sub(label_config['regex'],
|
||||||
|
label_config['replacement'], source)
|
||||||
|
logging.debug(f'_label_config_rename result: {result}')
|
||||||
|
labels[label_config['target_label']] = result
|
||||||
|
|
||||||
|
def convert_victron_value(victron_value):
|
||||||
|
"""Convert value from Victron Cerbo GX (which is a JSON string) to an actual numeric value. The format from Victron MQTT is JSON: {"value": 0} """
|
||||||
|
victron_value_dict = json.loads(victron_value)
|
||||||
|
return victron_value_dict['value']
|
||||||
|
|
||||||
|
|
||||||
|
def finalize_labels(labels):
|
||||||
|
"""Keep '__value__', and '__topic__' but remove all other labels starting with '__'"""
|
||||||
|
#labels['value'] = labels['__value__']
|
||||||
|
labels['value'] = convert_victron_value(labels['__value__'])
|
||||||
|
labels['topic'] = labels['__topic__']
|
||||||
|
|
||||||
|
return {k: v for k, v in labels.items() if not k.startswith('__')}
|
||||||
|
|
||||||
|
|
||||||
|
def _update_metrics(metrics, msg):
|
||||||
|
"""For each metric on this topic, apply label renaming if present, and export to prometheus"""
|
||||||
|
for metric in metrics:
|
||||||
|
labels = {'__topic__': metric['topic'],
|
||||||
|
'__msg_topic__': msg.topic, '__value__': str(msg.payload, 'utf-8')}
|
||||||
|
|
||||||
|
if 'label_configs' in metric:
|
||||||
|
# If action 'keep' in label_configs fails, or 'drop' succeeds, the metric is not updated
|
||||||
|
if not _apply_label_config(labels, metric['label_configs']):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# try to convert to float, but leave as is if conversion not possible
|
||||||
|
try:
|
||||||
|
labels['__value__'] = float(labels['__value__'].replace(',', '.'))
|
||||||
|
except ValueError:
|
||||||
|
logging.debug(f"Conversion of {labels['__value__']} to float not possible, continue with value as is.")
|
||||||
|
|
||||||
|
logging.debug('_update_metrics all labels:')
|
||||||
|
logging.debug(labels)
|
||||||
|
|
||||||
|
labels = finalize_labels(labels)
|
||||||
|
|
||||||
|
derived_metric = metric.setdefault('derived_metric',
|
||||||
|
# Add derived metric for when the message was last received (timestamp in milliseconds)
|
||||||
|
{
|
||||||
|
'name': f"{metric['name']}_last_received",
|
||||||
|
'help': f"Last received message for '{metric['name']}'",
|
||||||
|
'type': 'gauge'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
derived_labels = {'topic': metric['topic'],
|
||||||
|
'value': int(round(time.time() * 1000))}
|
||||||
|
|
||||||
|
_export_to_prometheus(metric['name'], metric, labels)
|
||||||
|
|
||||||
|
_export_to_prometheus(
|
||||||
|
derived_metric['name'], derived_metric, derived_labels)
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyUnusedLocal
|
||||||
|
def _on_message(client, userdata, msg):
|
||||||
|
"""The callback for when a PUBLISH message is received from the server."""
|
||||||
|
logging.debug(
|
||||||
|
f'_on_message Msg received on topic: {msg.topic}, Value: {str(msg.payload)}')
|
||||||
|
|
||||||
|
for topic in userdata.keys():
|
||||||
|
if _topic_matches(topic, msg.topic):
|
||||||
|
_update_metrics(userdata[topic], msg)
|
||||||
|
|
||||||
|
def victron_mqtt_keep_alive(mqtt_client):
|
||||||
|
""" Send keepalive to Cerbo GX """
|
||||||
|
properties=Properties(PacketTypes.PUBLISH)
|
||||||
|
mqtt_client.publish('R/c0619ab1cab9/system/0/Serial','',2,properties=properties);
|
||||||
|
|
||||||
|
def _mqtt_init(mqtt_config, metrics):
|
||||||
|
"""Setup mqtt connection"""
|
||||||
|
mqtt_client = mqtt.Client(userdata=metrics)
|
||||||
|
mqtt_client.on_connect = _on_connect
|
||||||
|
mqtt_client.on_message = _on_message
|
||||||
|
|
||||||
|
if 'auth' in mqtt_config:
|
||||||
|
auth = _strip_config(mqtt_config['auth'], ['username', 'password'])
|
||||||
|
mqtt_client.username_pw_set(**auth)
|
||||||
|
|
||||||
|
if 'tls' in mqtt_config:
|
||||||
|
tls_config = _strip_config(mqtt_config['tls'], [
|
||||||
|
'ca_certs', 'certfile', 'keyfile', 'cert_reqs', 'tls_version'])
|
||||||
|
mqtt_client.tls_set(**tls_config)
|
||||||
|
|
||||||
|
try:
|
||||||
|
mqtt_client.connect(**_strip_config(mqtt_config,
|
||||||
|
['host', 'port', 'keepalive']))
|
||||||
|
except ConnectionRefusedError as err:
|
||||||
|
logging.critical(
|
||||||
|
f"Error connecting to {mqtt_config['host']}:{mqtt_config['port']}: {err.strerror}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return mqtt_client
|
||||||
|
|
||||||
|
|
||||||
|
def _export_to_prometheus(name, metric, labels):
|
||||||
|
"""Export metric and labels to prometheus."""
|
||||||
|
metric_wrappers = {'gauge': GaugeWrapper,
|
||||||
|
'counter': CounterWrapper,
|
||||||
|
'counter_absolute': CounterAbsoluteWrapper,
|
||||||
|
'summary': SummaryWrapper,
|
||||||
|
'histogram': HistogramWrapper,
|
||||||
|
'enum': EnumWrapper,
|
||||||
|
}
|
||||||
|
valid_types = metric_wrappers.keys()
|
||||||
|
if metric['type'] not in valid_types:
|
||||||
|
logging.error(
|
||||||
|
f"Metric type: {metric['type']}, is not a valid metric type. Must be one of: {valid_types} - ingnoring"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
value = labels['value']
|
||||||
|
del labels['value']
|
||||||
|
|
||||||
|
sorted_labels = _get_sorted_tuple_list(labels)
|
||||||
|
label_names, label_values = list(zip(*sorted_labels))
|
||||||
|
|
||||||
|
prometheus_metric = None
|
||||||
|
if not metric.get('prometheus_metric') or not metric['prometheus_metric'].get('parent'):
|
||||||
|
# parent metric not seen before, create metric
|
||||||
|
additional_parameters = metric.get('parameters', {})
|
||||||
|
|
||||||
|
metric_wrapper = metric_wrappers[metric['type']]
|
||||||
|
prometheus_metric = metric_wrapper(
|
||||||
|
metric['name'], metric['help'], label_names, **additional_parameters)
|
||||||
|
metric['prometheus_metric'] = {}
|
||||||
|
metric['prometheus_metric']['parent'] = prometheus_metric
|
||||||
|
else:
|
||||||
|
prometheus_metric = metric['prometheus_metric']['parent']
|
||||||
|
try:
|
||||||
|
prometheus_metric.update(label_values, value)
|
||||||
|
except ValueError as ve:
|
||||||
|
logging.error(f"Value {value} is not compatible with metric {metric['name']} of type {metric['type']}")
|
||||||
|
logging.exception('ve:')
|
||||||
|
|
||||||
|
logging.debug(
|
||||||
|
f"_export_to_prometheus metric ({metric['type']}): {name}{labels} updated with value: {value}")
|
||||||
|
if logging.DEBUG >= logging.root.level: # log test data only in debugging mode
|
||||||
|
_log_test_data(metric, labels['topic'], value)
|
||||||
|
|
||||||
|
|
||||||
|
def _log_test_data(metric, topic, value):
|
||||||
|
try:
|
||||||
|
base_metric = metric['prometheus_metric']['parent'].metric.collect()
|
||||||
|
samples = {}
|
||||||
|
for child_metric in base_metric:
|
||||||
|
if child_metric.name.endswith('_last_received'):
|
||||||
|
# ignore derived metrics '*._last_received'
|
||||||
|
continue
|
||||||
|
first_sample = child_metric.samples[0]
|
||||||
|
for first_sample in child_metric.samples:
|
||||||
|
if first_sample.labels.get('topic', '') == topic:
|
||||||
|
samples[first_sample.name] = first_sample
|
||||||
|
|
||||||
|
if len(samples) == 1:
|
||||||
|
logging.debug(
|
||||||
|
f"TEST_DATA: {topic}; {value}; {child_metric.name}; {json.dumps(first_sample.labels)}; {first_sample.value}; 0; True")
|
||||||
|
else:
|
||||||
|
out_value = {}
|
||||||
|
labels = first_sample.labels
|
||||||
|
for sample_name, first_sample in samples.items():
|
||||||
|
suffix = sample_name[len(child_metric.name):]
|
||||||
|
out_value[suffix] = first_sample.value
|
||||||
|
if suffix == "_bucket": # buckets have extra "le" label
|
||||||
|
labels = first_sample.labels
|
||||||
|
logging.debug(
|
||||||
|
f"TEST_DATA: {topic}; {value}; {child_metric.name}; {json.dumps(labels)}; {json.dumps(out_value)}; 0; True")
|
||||||
|
except: # pylint: disable=bare-except
|
||||||
|
logging.exception("Failed to log TEST_DATA. ignoring.")
|
||||||
|
|
||||||
|
|
||||||
|
class GaugeWrapper():
|
||||||
|
"""
|
||||||
|
Wrapper to provide generic interface to Gauge metric
|
||||||
|
"""
|
||||||
|
def __init__(self, name, help_text, label_names, *args, **kwargs) -> None:
|
||||||
|
self.metric = prometheus.Gauge(
|
||||||
|
name, help_text, list(label_names)
|
||||||
|
)
|
||||||
|
def update(self, label_values, value):
|
||||||
|
child = self.metric.labels(*label_values)
|
||||||
|
child.set(value)
|
||||||
|
return child
|
||||||
|
|
||||||
|
|
||||||
|
class CounterWrapper():
|
||||||
|
"""
|
||||||
|
Wrapper to provide generic interface to Counter metric
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, help_text, label_names, *args, **kwargs) -> None:
|
||||||
|
self.metric = prometheus.Counter(
|
||||||
|
name, help_text, list(label_names)
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, label_values, value):
|
||||||
|
child = self.metric.labels(*label_values)
|
||||||
|
child.inc(value)
|
||||||
|
return child
|
||||||
|
|
||||||
|
class HistogramWrapper():
|
||||||
|
"""
|
||||||
|
Wrapper to provide generic interface to Summary metric
|
||||||
|
"""
|
||||||
|
|
||||||
|
class CounterAbsoluteWrapper():
|
||||||
|
"""
|
||||||
|
Wrapper to provide generic interface to CounterAbsolute metric
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, help_text, label_names, *args, **kwargs) -> None:
|
||||||
|
self.metric = utils.prometheus_additions.CounterAbsolute(
|
||||||
|
name, help_text, list(label_names)
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, label_values, value):
|
||||||
|
child = self.metric.labels(*label_values)
|
||||||
|
child.set(value)
|
||||||
|
return child
|
||||||
|
|
||||||
|
|
||||||
|
class SummaryWrapper():
|
||||||
|
"""
|
||||||
|
Wrapper to provide generic interface to Summary metric
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, help_text, label_names, *args, **kwargs) -> None:
|
||||||
|
self.metric = prometheus.Summary(
|
||||||
|
name, help_text, list(label_names)
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, label_values, value):
|
||||||
|
child = self.metric.labels(*label_values)
|
||||||
|
child.observe(value)
|
||||||
|
return child
|
||||||
|
|
||||||
|
|
||||||
|
class HistogramWrapper():
|
||||||
|
"""
|
||||||
|
Wrapper to provide generic interface to Summary metric
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, help_text, label_names, *args, **kwargs) -> None:
|
||||||
|
params = {}
|
||||||
|
if kwargs.get('buckets'):
|
||||||
|
if isinstance(kwargs['buckets'], str):
|
||||||
|
params['buckets'] = kwargs['buckets'].split(',')
|
||||||
|
else:
|
||||||
|
params['buckets'] = kwargs['buckets']
|
||||||
|
|
||||||
|
self.metric = prometheus.Histogram(
|
||||||
|
name, help_text, list(label_names), **params
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, label_values, value):
|
||||||
|
child = self.metric.labels(*label_values)
|
||||||
|
child.observe(value)
|
||||||
|
return child
|
||||||
|
|
||||||
|
|
||||||
|
class EnumWrapper():
|
||||||
|
def __init__(self, name, help_text, label_names, *args, **kwargs) -> None:
|
||||||
|
params = {}
|
||||||
|
if kwargs.get('states'):
|
||||||
|
params['states'] = kwargs['states']
|
||||||
|
|
||||||
|
self.metric = prometheus.Enum(
|
||||||
|
name, help_text, list(label_names), **params
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, label_values, value):
|
||||||
|
child = self.metric.labels(*label_values)
|
||||||
|
child.state(value)
|
||||||
|
return child
|
||||||
|
|
||||||
|
|
||||||
|
def add_static_metric(timestamp):
|
||||||
|
g = prometheus.Gauge('mqtt_exporter_timestamp', 'Startup time of exporter in millis since EPOC (static)',
|
||||||
|
['exporter_version'])
|
||||||
|
g.labels(VERSION).set(timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_sorted_tuple_list(source):
|
||||||
|
"""Return a sorted list of tuples"""
|
||||||
|
filtered_source = source.copy()
|
||||||
|
sorted_tuple_list = sorted(
|
||||||
|
list(filtered_source.items()), key=operator.itemgetter(0))
|
||||||
|
return sorted_tuple_list
|
||||||
|
|
||||||
|
|
||||||
|
def _signal_handler(sig, frame):
|
||||||
|
# pylint: disable=E1101
|
||||||
|
logging.info('Received {0}'.format(signal.Signals(sig).name))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
add_static_metric(int(time.time() * 1000))
|
||||||
|
# Setup argument parsing
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Simple program to export formatted MQTT messages to Prometheus')
|
||||||
|
parser.add_argument('-c', '--config', action='store', dest='config', default='conf',
|
||||||
|
help='Set config location (file or directory), default: \'conf\'')
|
||||||
|
options = parser.parse_args()
|
||||||
|
|
||||||
|
# Initial logging to console
|
||||||
|
_log_setup({'logfile': '', 'level': 'info'})
|
||||||
|
signal.signal(signal.SIGTERM, _signal_handler)
|
||||||
|
signal.signal(signal.SIGINT, _signal_handler)
|
||||||
|
|
||||||
|
# Read config file from disk
|
||||||
|
from_file = _read_config(options.config)
|
||||||
|
config = _parse_config_and_add_defaults(from_file)
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
_log_setup(config['logging'])
|
||||||
|
|
||||||
|
# Start prometheus exporter
|
||||||
|
logging.info(
|
||||||
|
f"Starting Prometheus exporter on port: {str(config['prometheus']['exporter_port'])}")
|
||||||
|
try:
|
||||||
|
prometheus.start_http_server(config['prometheus']['exporter_port'])
|
||||||
|
except OSError as err:
|
||||||
|
logging.critical(
|
||||||
|
f"Error starting Prometheus exporter: {err.strerror}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Set up mqtt client and loop forever
|
||||||
|
mqtt_client = _mqtt_init(config['mqtt'], config['metrics'])
|
||||||
|
|
||||||
|
# loop_forever is a blocking call
|
||||||
|
#mqtt_client.loop_forever()
|
||||||
|
# loop_start re-schedules itself over and aover as a thread: place outside the main loop!
|
||||||
|
mqtt_client.loop_start()
|
||||||
|
|
||||||
|
# Wake up Victron Energy Cerbo GX
|
||||||
|
victron_mqtt_keep_alive(mqtt_client)
|
||||||
|
|
||||||
|
# Get system time for keepalive delay loop
|
||||||
|
starttime = time.time()
|
||||||
|
|
||||||
|
while 1 == 1:
|
||||||
|
|
||||||
|
# Should be called every 30 seconds to keep Victron Energy Cebo GX awake (keepalive)
|
||||||
|
if time.time() - starttime > 30:
|
||||||
|
#logging.info(f"Keepalive")
|
||||||
|
victron_mqtt_keep_alive(mqtt_client)
|
||||||
|
starttime = time.time()
|
||||||
|
|
||||||
|
loop_stop()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
Reference in New Issue
Block a user