Commit a168b48f authored by Gezer's avatar Gezer
Browse files

logger wurde hinzugefügt, logging wurde teilweise

gemacht. timestamp muss noch angepasst werden
parent 2741fb4e
Showing with 67 additions and 41 deletions
+67 -41
import json import json
import os
file_name = "src/mqtt_influx_backend/mac_to_room.json" def load_json(file_name: str) -> dict:
"""
def load_json(): ladet eine JSON Datei, wenn diese existiert,
und gibt diese als dictionary zurück
key : value
"""
if not os.path.exists(file_name):
return {}
with open(file_name) as f: with open(file_name) as f:
mac_room_mapping = json.load(f) mac_room_mapping = json.load(f)
return mac_room_mapping return mac_room_mapping
def write_json(mac_room_mapping: dict, file_name: str):
def write_json(mac_room_mapping: dict): """
Nimmt ein dictionary und schreibt dessen
Inhalte in eine JSON Datei
"""
with open(file_name, "w") as f: with open(file_name, "w") as f:
f.seek(0) f.seek(0)
json.dump(mac_room_mapping, f, indent=4) json.dump(mac_room_mapping, f, indent=4)
......
import logging
import os
from logging.handlers import RotatingFileHandler
LOG_DIR = "logs"
LOG_FILE = "app.log"
LOG_PATH = os.path.join(LOG_DIR, LOG_FILE)
class LoggerFactory:
#logger.info("Connected with result code %s", str(rc))
#logger.warning("Neue MAC-Adresse gefunden: %s", mac)
#logger.error("Failed writing to InfluxDb: %s", e)
@staticmethod
def get_logger(name: str, level=logging.DEBUG) -> logging.Logger:
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
logger = logging.getLogger(name)
if logger.hasHandlers():
return logger # vermeidet doppelte Handler
logger.setLevel(level)
formatter = logging.Formatter(
'[%(asctime)s] %(levelname)s in %(name)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
file_handler = RotatingFileHandler(LOG_PATH, maxBytes=5_000_000, backupCount=5)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
import json import json
from src.mqtt_influx_backend.loggingFactory import LoggerFactory
from datetime import datetime from datetime import datetime
import paho.mqtt.client as mqtt import paho.mqtt.client as mqtt
from src.mqtt_influx_backend import jsonhandler from src.mqtt_influx_backend import jsonhandler
...@@ -7,6 +8,7 @@ from src.mqtt_influx_backend import influxDBWriter ...@@ -7,6 +8,7 @@ from src.mqtt_influx_backend import influxDBWriter
class MQTTClientHandler: class MQTTClientHandler:
MAPPING_FILE_NAME = "src/mqtt_influx_backend/mac_to_room.json"
MEASUREMENT_NAME = "sensor_data" MEASUREMENT_NAME = "sensor_data"
TAG_ROOM = "room" TAG_ROOM = "room"
TAG_MAC = "mac" TAG_MAC = "mac"
...@@ -16,8 +18,9 @@ class MQTTClientHandler: ...@@ -16,8 +18,9 @@ class MQTTClientHandler:
# Konstruktor # Konstruktor
def __init__(self, broker_url: str, topic: str, influx_writer: influxDBWriter): def __init__(self, broker_url: str, topic: str, influx_writer: influxDBWriter):
# logger sollte hier zugeweist werden self.logger = LoggerFactory.get_logger(__name__)
self.mac_to_room = jsonhandler.load_json() # key: mac : value : room
self.mac_to_room = jsonhandler.load_json(self.MAPPING_FILE_NAME)
self.broker_url = broker_url self.broker_url = broker_url
self.topic = topic self.topic = topic
self.influx_writer = influx_writer self.influx_writer = influx_writer
...@@ -27,9 +30,9 @@ class MQTTClientHandler: ...@@ -27,9 +30,9 @@ class MQTTClientHandler:
self.client.on_message = self.on_message self.client.on_message = self.on_message
def on_connect(self, client, userdata, flags, rc): def on_connect(self, client, userdata, flags, rc):
# log self.logger.info("Connected with result code " + str(rc))
print("Connected with result code " + str(rc))
client.subscribe(self.topic) client.subscribe(self.topic)
self.logger.info("Subscribed to " + self.topic )
# eventuell refactorn und die Aufgaben in Methoden aufteilen # eventuell refactorn und die Aufgaben in Methoden aufteilen
def on_message(self, client, userdata, msg): def on_message(self, client, userdata, msg):
...@@ -40,24 +43,23 @@ class MQTTClientHandler: ...@@ -40,24 +43,23 @@ class MQTTClientHandler:
""" """
msg = json.loads(msg.payload) msg = json.loads(msg.payload)
metadate = msg["metadata"] metadate = msg["metadata"]
# key: mac : value : room
# hier prüfen, ob die Mac-Adresse einen Raum hat, # hier prüfen, ob die Mac-Adresse einen Raum hat,
# wenn nicht trage es in mac_to_room leer ein # wenn nicht trage es in mac_to_room leer ein
# "aa:bb:cc:dd:ee:ff" : "" # "aa:bb:cc:dd:ee:ff" : ""
mac = metadate["mac-address"] mac = metadate["mac-address"]
if mac not in self.mac_to_room: if mac not in self.mac_to_room:
# log self.logger.warning(f"Neue MAC-Adresse gefunden: {mac}. Mapping wird ergänzt.")
print(f"Neue MAC-Adresse gefunden: {mac}. Mapping wird ergänzt.")
self.mac_to_room[mac] = "" # leerer Platzhalter self.mac_to_room[mac] = "" # leerer Platzhalter
jsonhandler.write_json(self.mac_to_room) jsonhandler.write_json(self.mac_to_room, self.MAPPING_FILE_NAME)
self.mac_to_room = jsonhandler.load_json() self.mac_to_room = jsonhandler.load_json(self.MAPPING_FILE_NAME)
return return
self.write_to_influxDB(msg,metadate) self.write_to_influxDB(msg,metadate)
def write_to_influxDB(self, msg, metadate): def write_to_influxDB(self, msg : dict, metadate: dict):
try: try:
self.influx_writer.write_point( self.influx_writer.write_point(
measurement=self.MEASUREMENT_NAME, measurement=self.MEASUREMENT_NAME,
...@@ -70,16 +72,11 @@ class MQTTClientHandler: ...@@ -70,16 +72,11 @@ class MQTTClientHandler:
self.FIELD_TEMP: msg["temp"], self.FIELD_TEMP: msg["temp"],
self.FIELD_HUMIDITY: msg["rh"], self.FIELD_HUMIDITY: msg["rh"],
}, },
timestamp=metadate["time"], timestamp=metadate["time"], #fix
) )
print( print("Wrote to InfluxDB:", msg) # muss später rausgeschmiessen werden
"Wrote to InfluxDB:", msg
) # muss später rausgeschmiessen werden
except Exception as e: except Exception as e:
# log self.logger.error(f"Failed writing to InfluxDb: {e}")
print("Failed writing to InfluxDb: ",e)
def start(self): def start(self):
self.client.connect(self.broker_url) self.client.connect(self.broker_url)
......
...@@ -2,5 +2,6 @@ ...@@ -2,5 +2,6 @@
"AA:BB:CC:DD:EE:FF": "Wohnzimmer", "AA:BB:CC:DD:EE:FF": "Wohnzimmer",
"11:22:33:44:55:66": "K\u00fcche", "11:22:33:44:55:66": "K\u00fcche",
"77:88:99:AA:BB:CC": "Schlafzimmer", "77:88:99:AA:BB:CC": "Schlafzimmer",
"DE:AD:BE:EF:12:34": "" "DE:AD:BE:EF:12:34": "",
"DK:AD:BE:EF:12:34": ""
} }
\ No newline at end of file
...@@ -4,26 +4,10 @@ import logging ...@@ -4,26 +4,10 @@ import logging
from src.mqtt_influx_backend.mQTTClientHandler import MQTTClientHandler from src.mqtt_influx_backend.mQTTClientHandler import MQTTClientHandler
from src.mqtt_influx_backend.influxDBWriter import InfluxDBWriter from src.mqtt_influx_backend.influxDBWriter import InfluxDBWriter
def get_logger(name: str = "default_logger") -> logging.Logger:
logger = logging.getLogger(name)
if (
not logger.handlers
): # Verhindert doppelte Handler bei mehrfacher Initialisierung
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
formatter = logging.Formatter(
"[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s"
)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
load_dotenv() load_dotenv()
def main(): def main():
influx_writer = InfluxDBWriter( influx_writer = InfluxDBWriter(
url=os.getenv("INFLUXDB_URL"), url=os.getenv("INFLUXDB_URL"),
token=os.getenv("INFLUXDB_TOKEN"), token=os.getenv("INFLUXDB_TOKEN"),
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment